diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b16c8d9e13..cb03c08398 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -9,7 +9,7 @@ on: jobs: lint: - runs-on: [self-hosted, CI-worker] + runs-on: [self-hosted, ci-runner] steps: - uses: actions/checkout@v2 @@ -37,7 +37,7 @@ jobs: ci_run zk lint --check unit-tests: - runs-on: [self-hosted, CI-worker] + runs-on: [self-hosted, ci-runner] steps: - uses: actions/checkout@v2 @@ -78,7 +78,7 @@ jobs: run: ci_run zk test server-rust integration: - runs-on: [self-hosted, FAST] + runs-on: [self-hosted, ci-runner] steps: - uses: actions/checkout@v2 @@ -138,7 +138,7 @@ jobs: ci_run cat dummy_prover.log circuit-tests: - runs-on: [self-hosted, CI-worker] + runs-on: [self-hosted, ci-runner] steps: - uses: actions/checkout@v2 @@ -147,6 +147,13 @@ jobs: run: | echo ZKSYNC_HOME=$(pwd) >> $GITHUB_ENV echo $(pwd)/bin >> $GITHUB_PATH + + - name: start-services + run: | + docker-compose -f docker-compose-runner.yml down + docker-compose -f docker-compose-runner.yml pull + docker-compose -f docker-compose-runner.yml up --build -d geth postgres zk + ci_run sccache --start-server - name: init run: | @@ -155,10 +162,10 @@ jobs: ci_run zk run verify-keys unpack - name: circuit-tests - run: ci_run zk test crypto-rust + run: ci_run zk test circuit 20 testkit: - runs-on: [self-hosted, CI-worker] + runs-on: [self-hosted, ci-runner] steps: - uses: actions/checkout@v2 @@ -187,12 +194,13 @@ jobs: run: ci_run zk test integration testkit --offline notify: - if: failure() + if: always() name: Notify on failures runs-on: ubuntu-latest needs: [lint, unit-tests, integration, circuit-tests, testkit] steps: - + if: failure() name: Notify to Mattermost (on incidents) uses: tferreira/matterfy@releases/v1 with: diff --git a/.github/workflows/npm.publish-packages.yml b/.github/workflows/npm.publish-packages.yml index 55ad50e369..b9cdec3aca 100644 --- a/.github/workflows/npm.publish-packages.yml +++ b/.github/workflows/npm.publish-packages.yml @@ -10,7 +10,7 @@ on: jobs: publish-zksync: name: Publish zksync.js - uses: matter-labs/zksync-dev/.github/workflows/npm.publish.yml@devops/publish-npm-zksync + uses: matter-labs/zksync-dev/.github/workflows/npm.publish.yml@dev with: working-directory: sdk/zksync.js build-command: | @@ -19,3 +19,4 @@ jobs: ref: ${{ github.event.inputs.ref }} secrets: notify-webhook: ${{ secrets.MATTERMOST_WEBHOOK }} + token: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/npm.publish.yml b/.github/workflows/npm.publish.yml index 4530de3536..abed9bc0f0 100644 --- a/.github/workflows/npm.publish.yml +++ b/.github/workflows/npm.publish.yml @@ -20,7 +20,9 @@ on: notify-webhook: description: Chat notification webhook required: true - + token: + description: NPM token + required: true jobs: local-call-publish: name: Build NPM package @@ -53,7 +55,7 @@ jobs: run: | npm publish env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + NODE_AUTH_TOKEN: ${{ secrets.token }} - if: failure() name: Notify to Mattermost (on incidents) diff --git a/Cargo.lock b/Cargo.lock index 62170ed2c9..46ab86acf1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1560,6 +1560,14 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "flamegraph_target" +version = "0.1.0" +dependencies = [ + "structopt", + "zksync_crypto", +] + [[package]] name = "flate2" version = "1.0.22" @@ -3860,21 +3868,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "remove_outstanding_tx_filters" -version = "0.1.0" -dependencies = [ - "anyhow", - "ethabi", - "structopt", - "tokio", - "web3", - "zksync_config", - "zksync_eth_client", - "zksync_storage", - "zksync_types", -] - [[package]] name = "remove_proofs" version = "1.0.0" @@ -5269,18 +5262,6 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" -[[package]] -name = "update_sequnce_number" -version = "1.0.0" -dependencies = [ - "anyhow", - "structopt", - "tokio", - "zksync_config", - "zksync_storage", - "zksync_types", -] - [[package]] name = "url" version = "1.7.2" diff --git a/Cargo.toml b/Cargo.toml index a1f6d3d71e..f92957afbc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,9 +8,7 @@ members = [ "core/bin/parse_pub_data", "core/bin/block_revert", "core/bin/remove_proofs", - "core/bin/update_sequnce_number", - "core/bin/remove_outstanding_tx_filters", # Server micro-services "core/bin/zksync_api", "core/bin/zksync_core", @@ -41,6 +39,7 @@ members = [ "core/lib/balancer", # Test infrastructure + "core/tests/flamegraph_target", "core/tests/test_account", "core/tests/testkit", "core/tests/loadnext", @@ -48,3 +47,7 @@ members = [ # SDK section "sdk/zksync-rs" ] + +[profile.release.package.flamegraph_target] +# We need both performance and debug info to analyze. +debug = true diff --git a/core/bin/remove_outstanding_tx_filters/Cargo.toml b/core/bin/remove_outstanding_tx_filters/Cargo.toml deleted file mode 100644 index c1fcbd9e03..0000000000 --- a/core/bin/remove_outstanding_tx_filters/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "remove_outstanding_tx_filters" -version = "0.1.0" -edition = "2021" -authors = ["The Matter Labs Team "] -homepage = "https://zksync.io/" -repository = "https://github.com/matter-labs/zksync" -license = "Apache-2.0" -keywords = ["blockchain", "zksync"] -categories = ["cryptography"] -publish = false # We don't want to publish our binaries. - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -# TODO remove it ZKS-931 -[dependencies] -zksync_types = { path = "../../lib/types", version = "1.0" } -zksync_storage = { path = "../../lib/storage", version = "1.0" } -zksync_eth_client = { path = "../../lib/eth_client", version = "1.0" } -zksync_config = { path = "../../lib/config", version = "1.0" } - -tokio = { version = "1", features = ["full"] } -ethabi = "14.0.0" -anyhow = "1.0" -web3 = "0.16.0" -structopt = "0.3.20" diff --git a/core/bin/remove_outstanding_tx_filters/src/main.rs b/core/bin/remove_outstanding_tx_filters/src/main.rs deleted file mode 100644 index 9dc33b6449..0000000000 --- a/core/bin/remove_outstanding_tx_filters/src/main.rs +++ /dev/null @@ -1,12 +0,0 @@ -use zksync_storage::StorageProcessor; - -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let mut storage = StorageProcessor::establish_connection().await?; - storage - .chain() - .operations_schema() - .remove_outstanding_tx_filters() - .await?; - Ok(()) -} diff --git a/core/bin/update_sequnce_number/Cargo.toml b/core/bin/update_sequnce_number/Cargo.toml deleted file mode 100644 index 1b1b215664..0000000000 --- a/core/bin/update_sequnce_number/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -name = "update_sequnce_number" -version = "1.0.0" -edition = "2018" -authors = ["The Matter Labs Team "] -homepage = "https://zksync.io/" -repository = "https://github.com/matter-labs/zksync" -license = "Apache-2.0" -keywords = ["blockchain", "zksync"] -categories = ["cryptography"] -publish = false # We don't want to publish our binaries. -#TODO remove it ZKS-931 - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - - -[dependencies] -zksync_types = { path = "../../lib/types", version = "1.0" } -zksync_storage = { path = "../../lib/storage", version = "1.0" } -zksync_config = { path = "../../lib/config", version = "1.0" } - -tokio = { version = "1", features = ["full"] } -anyhow = "1.0" -structopt = "0.3.20" diff --git a/core/bin/update_sequnce_number/src/main.rs b/core/bin/update_sequnce_number/src/main.rs deleted file mode 100644 index 6670d1177f..0000000000 --- a/core/bin/update_sequnce_number/src/main.rs +++ /dev/null @@ -1,42 +0,0 @@ -use std::cmp; -use structopt::StructOpt; -use zksync_storage::StorageProcessor; - -#[derive(Debug, StructOpt)] -struct Opt { - #[structopt(long)] - last_block_for_update: i64, -} - -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let opt = Opt::from_args(); - - let mut storage = StorageProcessor::establish_connection().await?; - let block_range = 200; - let mut start_block = storage - .chain() - .operations_ext_schema() - .min_block_for_update_sequence() - .await; - - let mut to_block = cmp::min(start_block + block_range, opt.last_block_for_update); - let mut last_known_sequence_number = None; - while to_block <= opt.last_block_for_update { - println!("Start updating from {:?} to {:?}", start_block, to_block); - last_known_sequence_number = Some( - storage - .chain() - .operations_ext_schema() - .update_sequence_number_for_blocks( - start_block, - to_block, - last_known_sequence_number, - ) - .await, - ); - start_block = to_block + 1; - to_block = cmp::min(start_block + block_range, opt.last_block_for_update); - } - Ok(()) -} diff --git a/core/bin/zksync_api/src/api_server/helpers.rs b/core/bin/zksync_api/src/api_server/helpers.rs index 46766e1926..039bb3dca3 100644 --- a/core/bin/zksync_api/src/api_server/helpers.rs +++ b/core/bin/zksync_api/src/api_server/helpers.rs @@ -32,7 +32,7 @@ pub fn try_parse_hash(query: &str) -> Result { Ok(H256::from_slice(&slice)) } -async fn depositing_from_pending_ops( +pub(crate) async fn depositing_from_pending_ops( storage: &mut StorageProcessor<'_>, tokens: &TokenDBCache, pending_ops: OngoingDepositsResp, @@ -72,7 +72,7 @@ async fn depositing_from_pending_ops( Ok(DepositingAccountBalances { balances }) } -async fn get_pending_ops( +pub(crate) async fn get_pending_ops( core_api_client: &CoreApiClient, address: Address, ) -> Result { diff --git a/core/bin/zksync_api/src/api_server/rest/v02/account.rs b/core/bin/zksync_api/src/api_server/rest/v02/account.rs index cd9660fa0f..8d67bb6306 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/account.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/account.rs @@ -26,8 +26,8 @@ use super::{ response::ApiResult, }; use crate::{ - api_server::helpers::get_depositing, api_try, core_api_client::CoreApiClient, - fee_ticker::PriceError, utils::token_db_cache::TokenDBCache, + api_try, core_api_client::CoreApiClient, fee_ticker::PriceError, + utils::token_db_cache::TokenDBCache, }; /// Shared data between `api/v02/accounts` endpoints. @@ -242,17 +242,23 @@ impl ApiAccountData { address: Address, account_id: Option, ) -> Result { + // **Important**: We should get ongoing deposits *before* acquiring connection to the database. + // Otherwise we can starve the pool. + let pending_ops = + crate::api_server::helpers::get_pending_ops(&self.core_api_client, address).await?; // TODO: add timeout. + + // Only acquire connection *after* we got info on deposits. let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; let mut transaction = storage.start_transaction().await.map_err(Error::storage)?; - let depositing = get_depositing( + let depositing = crate::api_server::helpers::depositing_from_pending_ops( &mut transaction, - &self.core_api_client, &self.tokens, - address, + pending_ops, self.confirmations_for_eth_event, ) .await?; + let (committed, finalized) = if let Some(account_id) = account_id { let (finalized_state, committed_state) = transaction .chain() diff --git a/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs b/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs index 938129a884..7204a1f383 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs @@ -35,6 +35,7 @@ impl ApiTransactionData { } async fn tx_status(&self, tx_hash: TxHash) -> Result, Error> { + // Try to find in the DB. let mut storage = self .tx_sender .pool @@ -48,23 +49,31 @@ impl ApiTransactionData { .await .map_err(Error::storage)? { - Ok(Some(receipt)) - } else if let Some(op) = self + return Ok(Some(receipt)); + } + + // DB lookup failed. Important to drop the connection, so it returns to the pool. + // Otherwise, if remote call takes too long, the connection will not be available for the whole time. + drop(storage); + + // Try to get pending op. + if let Some(op) = self .tx_sender .core_api_client .get_unconfirmed_op(PriorityOpLookupQuery::ByAnyHash(tx_hash)) .await .map_err(Error::core_api)? { - Ok(Some(Receipt::L1(L1Receipt { + return Ok(Some(Receipt::L1(L1Receipt { status: TxInBlockStatus::Queued, eth_block: EthBlockId(op.eth_block), rollup_block: None, id: op.serial_id, - }))) - } else { - Ok(None) + }))); } + + // Nothing. + Ok(None) } async fn tx_data(&self, tx_hash: TxHash) -> Result, Error> { diff --git a/core/bin/zksync_core/src/state_keeper/mod.rs b/core/bin/zksync_core/src/state_keeper/mod.rs index 86628dacb9..772f89f5db 100644 --- a/core/bin/zksync_core/src/state_keeper/mod.rs +++ b/core/bin/zksync_core/src/state_keeper/mod.rs @@ -387,6 +387,8 @@ impl ZkSyncStateKeeper { let non_executed_op = self .state .priority_op_to_zksync_op(priority_op.data.clone()); + + #[allow(clippy::question_mark)] // False positive, we aren't returning `Result`. if self .pending_block .gas_counter diff --git a/core/bin/zksync_core/src/state_keeper/root_hash_calculator/mod.rs b/core/bin/zksync_core/src/state_keeper/root_hash_calculator/mod.rs index 9b6e7c69e0..46e037727b 100644 --- a/core/bin/zksync_core/src/state_keeper/root_hash_calculator/mod.rs +++ b/core/bin/zksync_core/src/state_keeper/root_hash_calculator/mod.rs @@ -102,6 +102,16 @@ impl RootHashCalculator { job.block.0 as f64, "stage" => "root_hash_calculator" ); + self.report_memory_stats(); + } + + fn report_memory_stats(&self) { + let memory_stats = self.state.tree_memory_stats(); + metrics::histogram!("tree_memory_usage", memory_stats.allocated_total as f64, "type" => "total"); + metrics::histogram!("tree_memory_usage", memory_stats.items as f64, "type" => "items"); + metrics::histogram!("tree_memory_usage", memory_stats.nodes as f64, "type" => "nodes"); + metrics::histogram!("tree_memory_usage", memory_stats.prehashed as f64, "type" => "prehashed"); + metrics::histogram!("tree_memory_usage", memory_stats.cache as f64, "type" => "cache"); } } diff --git a/core/bin/zksync_witness_generator/src/witness_generator.rs b/core/bin/zksync_witness_generator/src/witness_generator.rs index 577da0b363..63d0e47ae8 100644 --- a/core/bin/zksync_witness_generator/src/witness_generator.rs +++ b/core/bin/zksync_witness_generator/src/witness_generator.rs @@ -1,3 +1,4 @@ +use std::time::Instant; // Built-in use std::{thread, time}; // External @@ -74,6 +75,7 @@ impl WitnessGenerator { &self, block_number: BlockNumber, ) -> Result { + let start = Instant::now(); let mut storage = self.database.acquire_connection().await?; let mut transaction = storage.start_transaction().await?; let block = self @@ -94,6 +96,7 @@ impl WitnessGenerator { BlockInfo::NotReadyBlock }; transaction.commit().await?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "should_work_on_block"); Ok(block_info) } @@ -101,13 +104,17 @@ impl WitnessGenerator { &self, block: BlockNumber, ) -> Result { - let start = time::Instant::now(); + let fn_start = Instant::now(); + let mut storage = self.database.acquire_connection().await?; + + let start = Instant::now(); let mut circuit_account_tree = CircuitAccountTree::new(account_tree_depth()); + let cache = self.database.load_account_tree_cache(&mut storage).await?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "load_cache"); - if let Some((cached_block, account_tree_cache)) = - self.database.load_account_tree_cache(&mut storage).await? - { + let start = Instant::now(); + if let Some((cached_block, account_tree_cache)) = cache { let (_, accounts) = self .database .load_committed_state(&mut storage, Some(block)) @@ -117,6 +124,11 @@ impl WitnessGenerator { } circuit_account_tree.set_internals(serde_json::from_value(account_tree_cache)?); if block != cached_block { + // There is no relevant cache, so we have to use some outdated cache and update the tree. + metrics::increment_counter!("witness_generator.cache_access", "type" => "miss"); + + vlog::info!("Reconstructing the cache for the block {} using the cached tree for the block {}", block, cached_block); + let (_, accounts) = self .database .load_committed_state(&mut storage, Some(block)) @@ -138,6 +150,9 @@ impl WitnessGenerator { } } circuit_account_tree.root_hash(); + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "recreate_tree_from_cache"); + + let start = Instant::now(); let account_tree_cache = circuit_account_tree.get_internals(); self.database .store_account_tree_cache( @@ -146,8 +161,13 @@ impl WitnessGenerator { serde_json::to_value(account_tree_cache)?, ) .await?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "store_cache"); + } else { + // There exists a cache for the block we are interested in. + metrics::increment_counter!("witness_generator.cache_access", "type" => "hit"); } } else { + // There are no caches at all. let (_, accounts) = self .database .load_committed_state(&mut storage, Some(block)) @@ -156,16 +176,21 @@ impl WitnessGenerator { circuit_account_tree.insert(*id, account.into()); } circuit_account_tree.root_hash(); - let account_tree_cache = circuit_account_tree.get_internals(); + + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "recreate_tree_from_scratch"); + + let start = Instant::now(); + let tree_cache = serde_json::to_value(circuit_account_tree.get_internals())?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "serialize_cache"); + + let start = Instant::now(); self.database - .store_account_tree_cache( - &mut storage, - block, - serde_json::to_value(account_tree_cache)?, - ) + .store_account_tree_cache(&mut storage, block, tree_cache) .await?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "store_cache"); } + let start = Instant::now(); if block != BlockNumber(0) { let storage_block = self .database @@ -178,29 +203,25 @@ impl WitnessGenerator { "account tree root hash restored incorrectly" ); } + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "ensure_root_hash"); - metrics::histogram!("witness_generator.load_account_tree", start.elapsed()); + metrics::histogram!("witness_generator", fn_start.elapsed(), "stage" => "load_account_tree"); Ok(circuit_account_tree) } async fn prepare_witness_and_save_it(&self, block: Block) -> anyhow::Result<()> { - let start = time::Instant::now(); - let timer = time::Instant::now(); + let fn_start = Instant::now(); let mut storage = self.database.acquire_connection().await?; + let start = Instant::now(); let mut circuit_account_tree = self.load_account_tree(block.block_number - 1).await?; - vlog::trace!( - "Witness generator loading circuit account tree {}s", - timer.elapsed().as_secs() - ); + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "load_tree_full"); - let timer = time::Instant::now(); + let start = Instant::now(); let witness: ProverData = build_block_witness(&mut circuit_account_tree, &block)?.into(); - vlog::trace!( - "Witness generator witness build {}s", - timer.elapsed().as_secs() - ); + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "build_witness"); + let start = Instant::now(); self.database .store_witness( &mut storage, @@ -208,11 +229,9 @@ impl WitnessGenerator { serde_json::to_value(witness).expect("Witness serialize to json"), ) .await?; + metrics::histogram!("witness_generator", start.elapsed(), "stage" => "store_witness"); - metrics::histogram!( - "witness_generator.prepare_witness_and_save_it", - start.elapsed() - ); + metrics::histogram!("witness_generator", fn_start.elapsed(), "stage" => "prepare_witness_and_save_it"); Ok(()) } diff --git a/core/lib/crypto/src/merkle_tree/mod.rs b/core/lib/crypto/src/merkle_tree/mod.rs index 386e626a10..209e82cf87 100644 --- a/core/lib/crypto/src/merkle_tree/mod.rs +++ b/core/lib/crypto/src/merkle_tree/mod.rs @@ -8,3 +8,25 @@ mod tests; pub type SparseMerkleTree = parallel_smt::SparseMerkleTree; /// Default hasher used in the zkSync network for state hash calculations. pub type RescueHasher = rescue_hasher::RescueHasher; + +/// Represents the amount of RAM consumed by the tree. +/// Only data allocated on the heap is counted. +/// +/// Field represent the amount of memory actually requested by the system. +/// For example, Rust `Vec`s allocate 2x previous amount on resize, so the `Vec` can +/// request up to 2x the amount of memory than is needed to fit all the elements. +/// +/// All the fields represent the memory amount in bytes. +#[derive(Debug, Clone, Copy)] +pub struct TreeMemoryUsage { + /// Memory used to store actual values in the tree. + pub items: usize, + /// Memory used to store hash nodes in the tree. + pub nodes: usize, + /// Memory used to store pre-calculated hashes for the "default" nodes. + pub prehashed: usize, + /// Memory used to store cache of calculated hashes for all the nodes in the tree. + pub cache: usize, + /// Total memory allocated by containers in the tree. + pub allocated_total: usize, +} diff --git a/core/lib/crypto/src/merkle_tree/parallel_smt.rs b/core/lib/crypto/src/merkle_tree/parallel_smt.rs index 69aa881b36..7ea90b90d1 100644 --- a/core/lib/crypto/src/merkle_tree/parallel_smt.rs +++ b/core/lib/crypto/src/merkle_tree/parallel_smt.rs @@ -1,5 +1,5 @@ /// Sparse Merkle tree with batch updates -use super::hasher::Hasher; +use super::{hasher::Hasher, TreeMemoryUsage}; use crate::{ ff::{PrimeField, PrimeFieldRepr}, primitives::GetBits, @@ -196,6 +196,28 @@ where root: 0, } } + + /// Roughly calculates the data on the RAM usage for this tree object. + /// See the [`TreeMemoryUsage`] doc-comments for details. + pub fn memory_stats(&self) -> TreeMemoryUsage { + use std::mem::size_of; + + // For hashmaps, we use both size of keys and values. + let items = self.items.capacity() * (size_of::() + size_of::()); + let nodes = self.nodes.capacity() * size_of::(); + let prehashed = self.prehashed.capacity() * size_of::(); + let cache = + self.cache.read().unwrap().capacity() * (size_of::() + size_of::()); + let allocated_total = items + nodes + prehashed + cache; + + TreeMemoryUsage { + items, + nodes, + prehashed, + cache, + allocated_total, + } + } } impl SparseMerkleTree diff --git a/core/lib/state/src/state.rs b/core/lib/state/src/state.rs index f5222cbafd..70c9e86589 100644 --- a/core/lib/state/src/state.rs +++ b/core/lib/state/src/state.rs @@ -1,7 +1,7 @@ use num::BigUint; use std::collections::{HashMap, HashSet}; -use zksync_crypto::{params, params::NFT_STORAGE_ACCOUNT_ID, Fr}; +use zksync_crypto::{merkle_tree::TreeMemoryUsage, params, params::NFT_STORAGE_ACCOUNT_ID, Fr}; use zksync_types::{ helpers::reverse_updates, operations::{TransferOp, TransferToNewOp, ZkSyncOp}, @@ -111,6 +111,10 @@ impl ZkSyncState { } } + pub fn tree_memory_stats(&self) -> TreeMemoryUsage { + self.balance_tree.memory_stats() + } + pub fn get_accounts(&self) -> Vec<(u32, Account)> { self.balance_tree .items diff --git a/core/lib/storage/sqlx-data.json b/core/lib/storage/sqlx-data.json index 174fa38ca2..05a95948cf 100644 --- a/core/lib/storage/sqlx-data.json +++ b/core/lib/storage/sqlx-data.json @@ -199,26 +199,6 @@ "nullable": [] } }, - "0a5a4f5bee099e8a439f3cd4b1023f995bd82106a7e65417b0df4ca23168298a": { - "query": "SELECT sequence_number FROM executed_priority_operations\n WHERE tx_hash = $1 ORDER BY created_at DESC", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sequence_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - true - ] - } - }, "0bdd32081fc9c8fbfb63787696884617129c30915c400e5647d2a81f882c6d4d": { "query": "SELECT eth_op_id FROM eth_aggregated_ops_binding WHERE op_id = ANY($1)", "describe": { @@ -509,21 +489,24 @@ "nullable": [] } }, - "11decdd07f890f291a8526e42e110ed277ff894b1b5b52f319001c9d8311e954": { - "query": "\n WITH transactions AS (\n SELECT tx_hash\n FROM executed_transactions\n ), priority_ops AS (\n SELECT tx_hash\n FROM executed_priority_operations\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything", + "12c3c4b49198c469f5f411d86f40079d38e1cfd65da1d9721a895fa15e80df3c": { + "query": "SELECT sequence_number FROM executed_priority_operations\n WHERE tx_hash = $1 AND block_number = $2 ORDER BY sequence_number DESC", "describe": { "columns": [ { "ordinal": 0, - "name": "tx_hash!", - "type_info": "Bytea" + "name": "sequence_number", + "type_info": "Int8" } ], "parameters": { - "Left": [] + "Left": [ + "Bytea", + "Int8" + ] }, "nullable": [ - null + true ] } }, @@ -745,92 +728,6 @@ "nullable": [] } }, - "171cecab10a6667529dca3c7b89b035caf184916e1772a1a7aae778f4c3e0fe9": { - "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.confirmed, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), aggr_exec AS (\n SELECT \n aggregate_operations.confirmed, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions as (\n select\n *\n from (\n select\n concat_ws(',', block_number, block_index) as tx_id,\n tx,\n 'sync-tx:' || encode(executed_transactions.tx_hash, 'hex') as hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at,\n batch_id\n from tx_hashes\n inner join executed_transactions\n on tx_hashes.tx_hash = executed_transactions.tx_hash\n where\n block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6)\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at,\n Null::bigint as batch_id\n from \n executed_priority_operations\n where \n (\n from_account = $1\n or\n to_account = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n ) t\n order by\n block_number desc, created_at desc\n limit \n $7\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n from transactions\n left join aggr_comm committed on\n committed.block_number = transactions.block_number AND committed.confirmed = true\n left join aggr_exec verified on\n verified.block_number = transactions.block_number AND verified.confirmed = true\n order by transactions.block_number desc, created_at desc\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_id!", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "hash?", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "eth_block?", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "pq_id?", - "type_info": "Int8" - }, - { - "ordinal": 4, - "name": "tx!", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "success?", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "commited!", - "type_info": "Bool" - }, - { - "ordinal": 8, - "name": "verified!", - "type_info": "Bool" - }, - { - "ordinal": 9, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 10, - "name": "batch_id?", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int8", - "Int8", - "Int4", - "Int4", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - } - }, "17626aba706502252ba06108c8b1563732a3e85094f8d76ce55f1d3487fc605b": { "query": "\n select \n created_at as \"created_at!\"\n from (\n select\n created_at\n from\n executed_transactions\n where\n from_account = $1\n or\n to_account = $1\n or\n primary_account_address = $1\n union all\n select\n created_at\n from \n executed_priority_operations\n where \n from_account = $1\n or\n to_account = $1\n ) t\n order by\n created_at asc\n limit \n 1\n ", "describe": { @@ -1110,6 +1007,44 @@ ] } }, + "1f40ff1c67db96001b6169ffd904da734fb146527ecdfda9d413eae8958c9bae": { + "query": "\n SELECT tx_hash, created_at, success, block_number\n FROM executed_transactions\n INNER JOIN txs_batches_hashes\n ON txs_batches_hashes.batch_id = COALESCE(executed_transactions.batch_id, 0)\n WHERE batch_hash = $1\n ORDER BY sequence_number ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "success", + "type_info": "Bool" + }, + { + "ordinal": 3, + "name": "block_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + } + }, "1fbfd087b4c05dc6a682c0020bfae07b3eea537e3e96f0316a7ec3ed63df9f88": { "query": "DELETE FROM account_tree_cache WHERE block < $1", "describe": { @@ -1122,6 +1057,26 @@ "nullable": [] } }, + "202a566486f481a87129d38bc4168dfc9c9511df1005e862c60722ed160be1b5": { + "query": "SELECT sequence_number FROM executed_priority_operations\n WHERE tx_hash = $1 ORDER BY sequence_number DESC", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sequence_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + true + ] + } + }, "21d959769e02bf5c52b68e69732363716534dbbbf0638a500ef46152136d2cab": { "query": "\n SELECT id, address, decimals, kind as \"kind: _\", symbol FROM tokens\n WHERE address = $1\n LIMIT 1\n ", "describe": { @@ -1892,26 +1847,6 @@ "nullable": [] } }, - "362fd44171d332181078c5b7d2856df3ec60b370adde3b8af2c482abca02445f": { - "query": "\n WITH tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions AS (\n SELECT executed_transactions.tx_hash, created_at, block_index\n FROM tx_hashes\n INNER JOIN executed_transactions\n ON tx_hashes.tx_hash = executed_transactions.tx_hash\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ), priority_ops AS (\n SELECT executed_priority_operations.tx_hash, created_at, block_index\n FROM tx_hashes\n INNER JOIN executed_priority_operations\n ON tx_hashes.tx_hash = executed_priority_operations.tx_hash\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash!", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - null - ] - } - }, "38e7464ba17d495fe87cf1412ffe10af8dc4b44f99dc2df1ec580f20f609b650": { "query": "SELECT max(number) FROM incomplete_blocks", "describe": { @@ -2039,24 +1974,6 @@ "nullable": [] } }, - "4082b12156cab7fb088fc5326d29e981cccf8644f72b02e4bc1ad0742d83a038": { - "query": "SELECT MIN(block_number) FROM executed_priority_operations WHERE sequence_number IS NULL", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "min", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null - ] - } - }, "411ae4152496dfa80c3ba50ad99c5ad72cce7d072d47a9a9a2c88587bf021952": { "query": "LOCK TABLE prover_job_queue IN EXCLUSIVE MODE", "describe": { @@ -2171,26 +2088,6 @@ "nullable": [] } }, - "444fd4dfaf4e0fdf1675c857b17feb5cf1f02ea0eb3e1e7bb60deb093fadb19f": { - "query": "\n WITH transactions AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash!", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - } - }, "4469f85caafd8e489247f5a16d567910a113975fb5911622e40440b09eac7e4f": { "query": "DELETE FROM account_pubkey_updates WHERE block_number > $1", "describe": { @@ -2262,127 +2159,45 @@ ] } }, - "46f6716e4da5b4ce70ae9dfceb0f35d18908f4d7369da95f209a979e896db269": { - "query": "\n WITH aggr_exec AS (\n SELECT\n aggregate_operations.confirmed,\n execute_aggregated_blocks_binding.block_number\n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true\n ), tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions AS (\n SELECT\n *\n FROM (\n SELECT\n concat_ws(',', block_number, block_index) AS tx_id,\n tx,\n 'sync-tx:' || encode(executed_transactions.tx_hash, 'hex') AS hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at,\n batch_id\n FROM tx_hashes\n INNER JOIN executed_transactions\n ON tx_hashes.tx_hash = executed_transactions.tx_hash\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at,\n Null::bigint as batch_id\n from\n executed_priority_operations\n where\n from_account = $1\n or\n to_account = $1) t\n order by\n block_number desc, created_at desc\n offset\n $2\n limit\n $3\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n from transactions\n LEFT JOIN aggr_exec verified ON transactions.block_number = verified.block_number\n order by transactions.block_number desc, created_at desc\n ", + "47dd80567908f3b37161e4f92a97654e7af4a5e921145bdedbc446a653926b88": { + "query": "SELECT * FROM block_metadata WHERE block_number = $1", "describe": { "columns": [ { "ordinal": 0, - "name": "tx_id!", - "type_info": "Text" + "name": "block_number", + "type_info": "Int8" }, { "ordinal": 1, - "name": "hash?", - "type_info": "Text" + "name": "fast_processing", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + false + ] + } + }, + "4a0bc713a57201aa894b96acdb462c03d3ad63cf4fbc8a14b9ac5e2e02121207": { + "query": "\n SELECT * FROM ticker_market_volume\n WHERE token_id = $1\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "token_id", + "type_info": "Int4" }, { - "ordinal": 2, - "name": "eth_block?", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "pq_id?", - "type_info": "Int8" - }, - { - "ordinal": 4, - "name": "tx!", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "success?", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "commited!", - "type_info": "Bool" - }, - { - "ordinal": 8, - "name": "verified!", - "type_info": "Bool" - }, - { - "ordinal": 9, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 10, - "name": "batch_id?", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - } - }, - "47dd80567908f3b37161e4f92a97654e7af4a5e921145bdedbc446a653926b88": { - "query": "SELECT * FROM block_metadata WHERE block_number = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "fast_processing", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false, - false - ] - } - }, - "4a0bc713a57201aa894b96acdb462c03d3ad63cf4fbc8a14b9ac5e2e02121207": { - "query": "\n SELECT * FROM ticker_market_volume\n WHERE token_id = $1\n LIMIT 1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "token_id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "market_volume", - "type_info": "Numeric" + "ordinal": 1, + "name": "market_volume", + "type_info": "Numeric" }, { "ordinal": 2, @@ -2675,27 +2490,6 @@ ] } }, - "5176e417ccde40f2676f02889241d003d81cd7f5ce8bbb9b8668347682306676": { - "query": "SELECT sequence_number FROM executed_priority_operations\n WHERE tx_hash = $1 AND block_number = $2 ORDER BY created_at DESC", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sequence_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8" - ] - }, - "nullable": [ - true - ] - } - }, "51edc4a74becb050ee8727c6fd24e6793254386e3403f36509fffc11ceff40a1": { "query": "\n WITH tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1 AND ($2::boolean OR token = $3)\n INTERSECT\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $4 AND ($2::boolean OR token = $3)\n )\n SELECT COUNT(*) as \"count!\" FROM tx_hashes\n ", "describe": { @@ -2792,45 +2586,6 @@ ] } }, - "56b9c06acc08f1468fd9c9e9c077cc8380dd7615dd75f4412d25364785bd8fa0": { - "query": "\n WITH transactions AS (\n SELECT block_number, created_at, Null::bigint as priority_op_serialid, tx_hash, block_index\n FROM executed_transactions\n WHERE block_number BETWEEN $1 AND $2\n ), priority_ops AS (\n SELECT block_number, created_at, priority_op_serialid, tx_hash, block_index\n FROM executed_priority_operations\n WHERE block_number BETWEEN $1 AND $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n block_number, priority_op_serialid, tx_hash, block_index\n FROM everything\n ORDER BY created_at, block_index\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "priority_op_serialid", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "tx_hash", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "block_index", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null - ] - } - }, "58b251c3fbdf9be9b62f669f8cdc2d98940026c831e02a53337474d36a5224f0": { "query": "UPDATE aggregate_operations\n SET confirmed = $1\n WHERE from_block >= $2 AND to_block <= $3 AND action_type = $4", "describe": { @@ -3043,6 +2798,82 @@ ] } }, + "6103793af701f7f80ffbdcc4bca194732add8f6ba41049d534f8a821287734f0": { + "query": "\n WITH transactions AS (\n SELECT\n sequence_number,\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index,\n batch_id\n FROM executed_transactions\n WHERE block_number = $1 AND sequence_number >= $2\n ), priority_ops AS (\n SELECT\n sequence_number,\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index,\n Null::bigint as batch_id\n FROM executed_priority_operations\n WHERE block_number = $1 AND sequence_number >= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n sequence_number,\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY sequence_number ASC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sequence_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 8, + "name": "priority_op_serialid?", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "batch_id?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "62304acbc93efab5117766689c6413d152dc0104c49c6f305e26b245b6ff7cde": { "query": "SELECT * FROM executed_priority_operations WHERE eth_hash = $1", "describe": { @@ -3163,78 +2994,22 @@ ] } }, - "67836bc6188d6d95431142b1d5e6953ecef86456798a69addfff47b8835f3a91": { - "query": "\n WITH transactions AS (\n SELECT\n sequence_number,\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index,\n batch_id\n FROM executed_transactions\n WHERE block_number = $1 AND sequence_number <= $2\n ), priority_ops AS (\n SELECT\n sequence_number,\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index,\n Null::bigint as batch_id\n FROM executed_priority_operations\n WHERE block_number = $1 AND sequence_number <= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n sequence_number,\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT $3\n ", + "67cad4f873f4a8b41b5574aae3c64a88795fe69b7ef353a785f9b12357f12913": { + "query": "\n WITH tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions AS (\n SELECT executed_transactions.tx_hash, sequence_number\n FROM tx_hashes\n INNER JOIN executed_transactions\n ON tx_hashes.tx_hash = executed_transactions.tx_hash\n ORDER BY sequence_number DESC\n LIMIT 1\n ), priority_ops AS (\n SELECT executed_priority_operations.tx_hash, executed_priority_operations.sequence_number\n FROM tx_hashes\n INNER JOIN executed_priority_operations\n ON tx_hashes.tx_hash = executed_priority_operations.tx_hash\n ORDER BY sequence_number DESC\n LIMIT 1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY sequence_number DESC\n LIMIT 1\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "sequence_number", - "type_info": "Int8" - }, - { - "ordinal": 1, "name": "tx_hash!", "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "op!", - "type_info": "Jsonb" - }, - { - "ordinal": 4, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 5, - "name": "success!", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "eth_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 8, - "name": "priority_op_serialid?", - "type_info": "Int8" - }, - { - "ordinal": 9, - "name": "batch_id?", - "type_info": "Int8" } ], "parameters": { "Left": [ - "Int8", - "Int8", - "Int8" + "Bytea" ] }, "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, null ] } @@ -3278,26 +3053,6 @@ ] } }, - "6b247b0317557d281b886dd3bbf32414d67582d350780ba940d110ac9964b9a1": { - "query": "SELECT COUNT(*) as \"count!\" FROM executed_transactions WHERE block_number < $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - } - }, "6d676581f14d0935983aca496bc37b58206b90320058290809020a2604b11df3": { "query": "SELECT max(number) FROM blocks", "describe": { @@ -3431,82 +3186,6 @@ "nullable": [] } }, - "743e9af6a86e03205d3fa8889655ea3b3c84cde730f30be7bd9ab680e5aae3b3": { - "query": "\n WITH transactions AS (\n SELECT\n sequence_number,\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index,\n batch_id\n FROM executed_transactions\n WHERE block_number = $1 AND sequence_number >= $2\n ), priority_ops AS (\n SELECT\n sequence_number,\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index,\n Null::bigint as batch_id\n FROM executed_priority_operations\n WHERE block_number = $1 AND sequence_number >= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n sequence_number,\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY created_at ASC, block_index ASC\n LIMIT $3\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "sequence_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "tx_hash!", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "op!", - "type_info": "Jsonb" - }, - { - "ordinal": 4, - "name": "created_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 5, - "name": "success!", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "eth_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 8, - "name": "priority_op_serialid?", - "type_info": "Int8" - }, - { - "ordinal": 9, - "name": "batch_id?", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Int8" - ] - }, - "nullable": [ - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ] - } - }, "74a5cc4affa23433b5b7834df6dfa1a7a2c5a65f23289de3de5a4f1b93f89c06": { "query": "SELECT address FROM account_creates WHERE account_id = $1", "describe": { @@ -3844,44 +3523,6 @@ "nullable": [] } }, - "7d22d9facba43b954a6ffbccffaee54feab17317910247d7752e0d59dcf3af9a": { - "query": "\n SELECT tx_hash, created_at, success, block_number\n FROM executed_transactions\n INNER JOIN txs_batches_hashes\n ON txs_batches_hashes.batch_id = COALESCE(executed_transactions.batch_id, 0)\n WHERE batch_hash = $1\n ORDER BY created_at ASC, block_index ASC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash", - "type_info": "Bytea" - }, - { - "ordinal": 1, - "name": "created_at", - "type_info": "Timestamptz" - }, - { - "ordinal": 2, - "name": "success", - "type_info": "Bool" - }, - { - "ordinal": 3, - "name": "block_number", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - false, - false, - false, - false - ] - } - }, "7dfa76c3e12c301dc3d7fbf820ecf0be45e0b1c5f01ce13f7cdc1a82880804c1": { "query": "\n SELECT * FROM forced_exit_requests\n WHERE id = $1\n LIMIT 1\n ", "describe": { @@ -4019,24 +3660,6 @@ ] } }, - "7fe39c62da8c820d95f98238a0f206970328e97a9eed6f532e97d2346074941e": { - "query": "SELECT DISTINCT tx_hash FROM tx_filters", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - } - }, "7ff98a4fddc441ea83f72a4a75a7caf53b9661c37f26a90984a349bfa5aeab70": { "query": "INSERT INTO eth_aggregated_ops_binding (op_id, eth_op_id) VALUES ($1, $2)", "describe": { @@ -4072,32 +3695,118 @@ "type_info": "Int8" }, { - "ordinal": 1, - "name": "withdrawal_hash", - "type_info": "Bytea" + "ordinal": 1, + "name": "withdrawal_hash", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false + ] + } + }, + "82486779f7f76a4a50c2a3d5cbc460dae08a2296ffcb9744dfde5c44e70d2a5d": { + "query": "TRUNCATE eth_unprocessed_aggregated_ops", + "describe": { + "columns": [], + "parameters": { + "Left": [] + }, + "nullable": [] + } + }, + "8271f6c777f2b4ebdbe313af873ff6e223fce9ea47cf7f5df076fe5da7633902": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.confirmed, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), aggr_exec AS (\n SELECT \n aggregate_operations.confirmed, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true \n ), tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions as (\n select\n *\n from (\n select\n concat_ws(',', block_number, block_index) as tx_id,\n tx,\n 'sync-tx:' || encode(executed_transactions.tx_hash, 'hex') as hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at,\n sequence_number,\n batch_id\n from tx_hashes\n inner join executed_transactions\n on tx_hashes.tx_hash = executed_transactions.tx_hash\n where\n block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6)\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at,\n sequence_number,\n Null::bigint as batch_id\n from \n executed_priority_operations\n where \n (\n from_account = $1\n or\n to_account = $1\n )\n and\n (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6))\n ) t\n order by\n sequence_number desc\n limit \n $7\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n from transactions\n left join aggr_comm committed on\n committed.block_number = transactions.block_number AND committed.confirmed = true\n left join aggr_exec verified on\n verified.block_number = transactions.block_number AND verified.confirmed = true\n order by transactions.sequence_number desc\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_id!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "hash?", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "eth_block?", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "pq_id?", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "tx!", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "commited!", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "verified!", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "batch_id?", + "type_info": "Int8" } ], "parameters": { "Left": [ - "Bytea" + "Bytea", + "Int8", + "Int8", + "Int8", + "Int4", + "Int4", + "Int8" ] }, "nullable": [ - false, - false + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null ] } }, - "82486779f7f76a4a50c2a3d5cbc460dae08a2296ffcb9744dfde5c44e70d2a5d": { - "query": "TRUNCATE eth_unprocessed_aggregated_ops", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - } - }, "839caf265f3e87a43a788d8fc321ec8d3ada6987d46ce1179683aefb0bb1e789": { "query": "SELECT COUNT(*) from mempool_txs\n WHERE tx_hash = $1", "describe": { @@ -4461,26 +4170,6 @@ "nullable": [] } }, - "8d1a348f249844a09f9be886074184f4fef1f0140230b8fd51d1d000681128f5": { - "query": "SELECT COUNT(*) as \"count!\" FROM executed_priority_operations WHERE block_number < $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - } - }, "8ead89cb48612f9415b7904aa1579be0eed225f14ee2628d55f56602cf3e4acc": { "query": "\n INSERT INTO tokens ( id, address, symbol, decimals, kind )\n VALUES ( $1, $2, $3, $4, $5 )\n ", "describe": { @@ -4675,26 +4364,6 @@ "nullable": [] } }, - "95a9be8b9c48e2a18c1bdb63ad4afa9d73fdaa0758f2cced759c32c069eb2bd3": { - "query": "\n WITH transactions AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY created_at, block_index\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash!", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - null - ] - } - }, "963cad1979935b50bc5c2bbe174f5d94fbd5c38ea752d304f987229c89e6070a": { "query": "\n DELETE FROM forced_exit_requests\n WHERE fulfilled_by IS NULL AND valid_until < $1\n ", "describe": { @@ -4822,19 +4491,6 @@ "nullable": [] } }, - "9f7d6ba7d22b619ca57063ce01af6ec4b96cc72f6ef8185402085392ee52a16f": { - "query": "UPDATE executed_priority_operations SET sequence_number = u.sequence_number FROM UNNEST ($1::bigint[], $2::bigint[])\n AS u(serial_id, sequence_number)\n WHERE executed_priority_operations.priority_op_serialid= u.serial_id", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8Array", - "Int8Array" - ] - }, - "nullable": [] - } - }, "9fbf3d0ae8610fb464ac74ff989860eb913f4bfb14790373021ef456b671ed96": { "query": "SELECT * FROM eth_tx_hashes\n WHERE eth_op_id = $1\n ORDER BY id ASC", "describe": { @@ -5291,6 +4947,62 @@ "nullable": [] } }, + "b4a6d5568ac2e4d27689277c120f1ca960cb210682bdb3f050cba82bc0168f53": { + "query": "\n WITH transactions AS (\n SELECT\n '0x' || encode(tx_hash, 'hex') as tx_hash,\n tx as op,\n block_number,\n success,\n fail_reason,\n created_at,\n batch_id,\n sequence_number\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT\n '0x' || encode(eth_hash, 'hex') as tx_hash,\n operation as op,\n block_number,\n true as success,\n Null as fail_reason,\n created_at,\n Null::bigint as batch_id,\n sequence_number\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY sequence_number DESC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "batch_id?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null + ] + } + }, "b63daeea7fab180b5eba3721d26ad0a8f89193b9e459339e76e1a1bd87d9f37b": { "query": "SELECT * FROM mempool_txs\n ORDER BY batch_id DESC\n LIMIT 1", "describe": { @@ -5345,19 +5057,6 @@ ] } }, - "b73068eda52bc4b119edd3c2479106d64522e7f72915ed8ff9e89bade70491f5": { - "query": "UPDATE executed_transactions SET sequence_number = u.sequence_number FROM UNNEST ($1::bytea[], $2::bigint[])\n AS u(tx_hash, sequence_number)\n WHERE executed_transactions.tx_hash = u.tx_hash", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "ByteaArray", - "Int8Array" - ] - }, - "nullable": [] - } - }, "b89088c6516e2db2e01bfdf0afa5a8fdd7e20fde80183884a9769eae9b635010": { "query": "DELETE FROM executed_priority_operations WHERE block_number > $1", "describe": { @@ -5779,119 +5478,63 @@ }, { "ordinal": 3, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 4, - "name": "old_pubkey_hash", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "new_pubkey_hash", - "type_info": "Bytea" - }, - { - "ordinal": 6, - "name": "old_nonce", - "type_info": "Int8" - }, - { - "ordinal": 7, - "name": "new_nonce", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false - ] - } - }, - "c23bc5ab7c6f6148d1e12d408d4c8842d80cca11e3eb539fd9153ae808a11f28": { - "query": "\n UPDATE accounts \n SET last_block = $1, nonce = $2, pubkey_hash = $3\n WHERE id = $4\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int8", - "Bytea", - "Int8" - ] - }, - "nullable": [] - } - }, - "c28c3bba6a78ed3bbf24289d7e0f7ccf53c60bc8dc5ed38e85d02f016ef03b43": { - "query": "\n WITH transactions AS (\n SELECT\n '0x' || encode(tx_hash, 'hex') as tx_hash,\n tx as op,\n block_number,\n success,\n fail_reason,\n created_at,\n batch_id\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT\n '0x' || encode(eth_hash, 'hex') as tx_hash,\n operation as op,\n block_number,\n true as success,\n Null as fail_reason,\n created_at,\n Null::bigint as batch_id\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY created_at DESC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash!", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "op!", - "type_info": "Jsonb" - }, - { - "ordinal": 3, - "name": "success!", - "type_info": "Bool" + "name": "block_number", + "type_info": "Int8" }, { "ordinal": 4, - "name": "fail_reason?", - "type_info": "Text" + "name": "old_pubkey_hash", + "type_info": "Bytea" }, { "ordinal": 5, - "name": "created_at!", - "type_info": "Timestamptz" + "name": "new_pubkey_hash", + "type_info": "Bytea" }, { "ordinal": 6, - "name": "batch_id?", + "name": "old_nonce", + "type_info": "Int8" + }, + { + "ordinal": 7, + "name": "new_nonce", "type_info": "Int8" } ], "parameters": { "Left": [ + "Int8", "Int8" ] }, "nullable": [ - null, - null, - null, - null, - null, - null, - null + false, + false, + false, + false, + false, + false, + false, + false ] } }, + "c23bc5ab7c6f6148d1e12d408d4c8842d80cca11e3eb539fd9153ae808a11f28": { + "query": "\n UPDATE accounts \n SET last_block = $1, nonce = $2, pubkey_hash = $3\n WHERE id = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Bytea", + "Int8" + ] + }, + "nullable": [] + } + }, "c2b72cb3aeb4b448b240edef3988a1026577a82fb4ae1c416fcaf4622afa4ac0": { "query": "INSERT INTO aggregate_operations (action_type, arguments, from_block, to_block)\n VALUES ($1, $2, $3, $4)\n ON CONFLICT (id)\n DO NOTHING\n RETURNING id", "describe": { @@ -6206,24 +5849,6 @@ ] } }, - "c84ac9526b77dc31e60793216c122b48bc686661ef659b34d536bad828406d65": { - "query": "SELECT MIN(block_number) FROM executed_transactions WHERE sequence_number IS NULL", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "min", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null - ] - } - }, "cb492484bab6e66f89a4d80649d3559566a681db153152a52449acf931a1d039": { "query": "SELECT * FROM block_witness WHERE block = $1", "describe": { @@ -6753,6 +6378,88 @@ ] } }, + "dc0b69a1138a4ec747b30ec443e3d1a434a68f464ab70c85589daca32d29a77a": { + "query": "\n WITH aggr_exec AS (\n SELECT\n aggregate_operations.confirmed,\n execute_aggregated_blocks_binding.block_number\n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n WHERE aggregate_operations.confirmed = true\n ), tx_hashes AS (\n SELECT DISTINCT tx_hash FROM tx_filters\n WHERE address = $1\n ), transactions AS (\n SELECT\n *\n FROM (\n SELECT\n concat_ws(',', block_number, block_index) AS tx_id,\n tx,\n 'sync-tx:' || encode(executed_transactions.tx_hash, 'hex') AS hash,\n null as pq_id,\n null as eth_block,\n success,\n fail_reason,\n block_number,\n created_at,\n sequence_number,\n batch_id\n FROM tx_hashes\n INNER JOIN executed_transactions\n ON tx_hashes.tx_hash = executed_transactions.tx_hash\n union all\n select\n concat_ws(',', block_number, block_index) as tx_id,\n operation as tx,\n '0x' || encode(eth_hash, 'hex') as hash,\n priority_op_serialid as pq_id,\n eth_block,\n true as success,\n null as fail_reason,\n block_number,\n created_at,\n sequence_number,\n Null::bigint as batch_id\n from\n executed_priority_operations\n where\n from_account = $1\n or\n to_account = $1) t\n order by\n block_number desc, created_at desc\n offset\n $2\n limit\n $3\n )\n select\n tx_id as \"tx_id!\",\n hash as \"hash?\",\n eth_block as \"eth_block?\",\n pq_id as \"pq_id?\",\n tx as \"tx!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n true as \"commited!\",\n coalesce(verified.confirmed, false) as \"verified!\",\n created_at as \"created_at!\",\n batch_id as \"batch_id?\"\n from transactions\n LEFT JOIN aggr_exec verified ON transactions.block_number = verified.block_number\n order by transactions.block_number desc, sequence_number desc\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_id!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "hash?", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "eth_block?", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "pq_id?", + "type_info": "Int8" + }, + { + "ordinal": 4, + "name": "tx!", + "type_info": "Jsonb" + }, + { + "ordinal": 5, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "commited!", + "type_info": "Bool" + }, + { + "ordinal": 8, + "name": "verified!", + "type_info": "Bool" + }, + { + "ordinal": 9, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "batch_id?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "dcef2a0727cc074e66d5d5ac5c0d65e7581d0c4d635452950f1704859b06a94b": { "query": "DELETE FROM prover_job_queue WHERE first_block > $1", "describe": { @@ -6821,6 +6528,82 @@ ] } }, + "e0248634665596e30a0e27eaa4e55327e4141b28f809392adcc6b56903c5575f": { + "query": "\n WITH transactions AS (\n SELECT\n sequence_number,\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index,\n batch_id\n FROM executed_transactions\n WHERE block_number = $1 AND sequence_number <= $2\n ), priority_ops AS (\n SELECT\n sequence_number,\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index,\n Null::bigint as batch_id\n FROM executed_priority_operations\n WHERE block_number = $1 AND sequence_number <= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n sequence_number,\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\",\n batch_id as \"batch_id?\"\n FROM everything\n ORDER BY sequence_number DESC \n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "sequence_number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 4, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 5, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 6, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 8, + "name": "priority_op_serialid?", + "type_info": "Int8" + }, + { + "ordinal": 9, + "name": "batch_id?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "e10f37a3c41cf1446b91605ffdeef37da79d7d3a77d47fb3dfab764831509536": { "query": "\n DELETE FROM accounts\n WHERE id = $1\n ", "describe": { @@ -7008,6 +6791,26 @@ "nullable": [] } }, + "e5651ad3ff357ee85bc4411bc007db9bb9f61a54a6df0bed294be19e431257b9": { + "query": "\n WITH transactions AS (\n SELECT tx_hash, sequence_number\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT tx_hash, sequence_number\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY sequence_number DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + } + }, "e56b7f4f240fe2ad368efb9cd845b0e7bca4a3c8f2f91b00a120a3e6dfc91a6e": { "query": "SELECT * FROM executed_transactions WHERE block_number BETWEEN $1 AND $2 AND success = true", "describe": { @@ -7906,6 +7709,26 @@ ] } }, + "f50d90aa1f82e4db1de9c84768d7fce4f20f7abbd8b817b6949730f444efb7a6": { + "query": "\n WITH transactions AS (\n SELECT tx_hash, sequence_number\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT tx_hash, sequence_number\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY sequence_number\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + } + }, "f57d1b54785c52a96afa9a95c4f1a08808732c9aaa850c32ed7d1158fdf541c4": { "query": "\n SELECT tx_hash FROM executed_transactions \n WHERE success = false AND created_at < $1 LIMIT 1000", "describe": { diff --git a/core/lib/storage/src/chain/block/mod.rs b/core/lib/storage/src/chain/block/mod.rs index b7b9d1b6db..6ea15b058a 100644 --- a/core/lib/storage/src/chain/block/mod.rs +++ b/core/lib/storage/src/chain/block/mod.rs @@ -239,7 +239,8 @@ impl<'a, 'c> BlockSchema<'a, 'c> { success, fail_reason, created_at, - batch_id + batch_id, + sequence_number FROM executed_transactions WHERE block_number = $1 ), priority_ops AS ( @@ -250,7 +251,8 @@ impl<'a, 'c> BlockSchema<'a, 'c> { true as success, Null as fail_reason, created_at, - Null::bigint as batch_id + Null::bigint as batch_id, + sequence_number FROM executed_priority_operations WHERE block_number = $1 ), everything AS ( @@ -267,7 +269,7 @@ impl<'a, 'c> BlockSchema<'a, 'c> { created_at as "created_at!", batch_id as "batch_id?" FROM everything - ORDER BY created_at DESC + ORDER BY sequence_number DESC "#, i64::from(*block) ) @@ -1362,7 +1364,7 @@ impl<'a, 'c> BlockSchema<'a, 'c> { priority_op_serialid as "priority_op_serialid?", batch_id as "batch_id?" FROM everything - ORDER BY created_at ASC, block_index ASC + ORDER BY sequence_number ASC LIMIT $3 "#, i64::from(*query.from.block_number), @@ -1423,7 +1425,7 @@ impl<'a, 'c> BlockSchema<'a, 'c> { priority_op_serialid as "priority_op_serialid?", batch_id as "batch_id?" FROM everything - ORDER BY created_at DESC, block_index DESC + ORDER BY sequence_number DESC LIMIT $3 "#, i64::from(*query.from.block_number), @@ -1654,11 +1656,11 @@ impl<'a, 'c> BlockSchema<'a, 'c> { let records = sqlx::query!( r#" WITH transactions AS ( - SELECT tx_hash, created_at, block_index + SELECT tx_hash, sequence_number FROM executed_transactions WHERE block_number = $1 ), priority_ops AS ( - SELECT tx_hash, created_at, block_index + SELECT tx_hash, sequence_number FROM executed_priority_operations WHERE block_number = $1 ), everything AS ( @@ -1668,7 +1670,7 @@ impl<'a, 'c> BlockSchema<'a, 'c> { ) SELECT tx_hash as "tx_hash!" FROM everything - ORDER BY created_at, block_index + ORDER BY sequence_number "#, i64::from(*block_number) ) diff --git a/core/lib/storage/src/chain/operations/mod.rs b/core/lib/storage/src/chain/operations/mod.rs index 367cbea677..3548c4fc5d 100644 --- a/core/lib/storage/src/chain/operations/mod.rs +++ b/core/lib/storage/src/chain/operations/mod.rs @@ -1,4 +1,3 @@ -use std::collections::HashSet; // Built-in deps use std::time::Instant; // External imports @@ -306,82 +305,6 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { Ok(()) } - // TODO remove it ZKS-931 - pub async fn remove_outstanding_tx_filters(&mut self) -> QueryResult<()> { - // We can do something like this, but this query will block tx_filter table for a long long time. - // So I have to rewrite this logic to rust - // sqlx::query!( - // r#"DELETE FROM tx_filters WHERE tx_hash NOT IN ( - // WITH transactions AS ( - // SELECT tx_hash - // FROM executed_transactions - // ), priority_ops AS ( - // SELECT tx_hash - // FROM executed_priority_operations - // ), everything AS ( - // SELECT * FROM transactions - // UNION ALL - // SELECT * FROM priority_ops - // ) - // SELECT - // tx_hash as "tx_hash!" - // FROM everything - // )"# - // ) - // .execute(self.0.conn()) - // .await?; - - let mut transaction = self.0.start_transaction().await?; - let tx_hashes: HashSet> = sqlx::query!( - r#" - WITH transactions AS ( - SELECT tx_hash - FROM executed_transactions - ), priority_ops AS ( - SELECT tx_hash - FROM executed_priority_operations - ), everything AS ( - SELECT * FROM transactions - UNION ALL - SELECT * FROM priority_ops - ) - SELECT - tx_hash as "tx_hash!" - FROM everything"# - ) - .fetch_all(transaction.conn()) - .await? - .into_iter() - .map(|value| value.tx_hash) - .collect(); - - println!("Txs len {:?}", tx_hashes.len()); - let tx_filter_hashes: HashSet> = - sqlx::query!("SELECT DISTINCT tx_hash FROM tx_filters") - .fetch_all(transaction.conn()) - .await? - .into_iter() - .map(|value| value.tx_hash) - .collect(); - println!("Filters len {:?}", tx_filter_hashes.len()); - - let difference: Vec> = tx_filter_hashes - .difference(&tx_hashes) - .into_iter() - .cloned() - .collect(); - - println!("Difference len {:?}", difference.len()); - for chunk in difference.chunks(100) { - sqlx::query!("DELETE FROM tx_filters WHERE tx_hash = ANY ($1)", chunk) - .execute(transaction.conn()) - .await?; - } - - transaction.commit().await?; - - Ok(()) - } /// Stores executed priority operation in database. /// /// This method is made public to fill the database for tests, do not use it for diff --git a/core/lib/storage/src/chain/operations_ext/mod.rs b/core/lib/storage/src/chain/operations_ext/mod.rs index cd556f3236..fcfc221e0d 100644 --- a/core/lib/storage/src/chain/operations_ext/mod.rs +++ b/core/lib/storage/src/chain/operations_ext/mod.rs @@ -1,4 +1,3 @@ -use std::cmp::min; // Built-in deps use std::time::Instant; @@ -669,6 +668,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { fail_reason, block_number, created_at, + sequence_number, batch_id FROM tx_hashes INNER JOIN executed_transactions @@ -684,6 +684,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { null as fail_reason, block_number, created_at, + sequence_number, Null::bigint as batch_id from executed_priority_operations @@ -712,7 +713,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { batch_id as "batch_id?" from transactions LEFT JOIN aggr_exec verified ON transactions.block_number = verified.block_number - order by transactions.block_number desc, created_at desc + order by transactions.block_number desc, sequence_number desc "#, address.as_ref(), offset as i64, limit as i64 ).fetch_all(transaction.conn()) @@ -841,6 +842,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { fail_reason, block_number, created_at, + sequence_number, batch_id from tx_hashes inner join executed_transactions @@ -858,6 +860,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { null as fail_reason, block_number, created_at, + sequence_number, Null::bigint as batch_id from executed_priority_operations @@ -871,7 +874,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { (block_number BETWEEN $3 AND $4 or (block_number = $2 and block_index BETWEEN $5 AND $6)) ) t order by - block_number desc, created_at desc + sequence_number desc limit $7 ) @@ -892,7 +895,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { committed.block_number = transactions.block_number AND committed.confirmed = true left join aggr_exec verified on verified.block_number = transactions.block_number AND verified.confirmed = true - order by transactions.block_number desc, created_at desc + order by transactions.sequence_number desc "#, address.as_ref(), block_id as i64, @@ -1246,18 +1249,18 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { SELECT DISTINCT tx_hash FROM tx_filters WHERE address = $1 ), transactions AS ( - SELECT executed_transactions.tx_hash, created_at, block_index + SELECT executed_transactions.tx_hash, sequence_number FROM tx_hashes INNER JOIN executed_transactions ON tx_hashes.tx_hash = executed_transactions.tx_hash - ORDER BY created_at DESC, block_index DESC + ORDER BY sequence_number DESC LIMIT 1 ), priority_ops AS ( - SELECT executed_priority_operations.tx_hash, created_at, block_index + SELECT executed_priority_operations.tx_hash, executed_priority_operations.sequence_number FROM tx_hashes INNER JOIN executed_priority_operations ON tx_hashes.tx_hash = executed_priority_operations.tx_hash - ORDER BY created_at DESC, block_index DESC + ORDER BY sequence_number DESC LIMIT 1 ), everything AS ( SELECT * FROM transactions @@ -1267,7 +1270,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { SELECT tx_hash as "tx_hash!" FROM everything - ORDER BY created_at DESC, block_index DESC + ORDER BY sequence_number DESC LIMIT 1 "#, address.as_bytes() @@ -1291,11 +1294,11 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { let record = sqlx::query!( r#" WITH transactions AS ( - SELECT tx_hash, created_at, block_index + SELECT tx_hash, sequence_number FROM executed_transactions WHERE block_number = $1 ), priority_ops AS ( - SELECT tx_hash, created_at, block_index + SELECT tx_hash, sequence_number FROM executed_priority_operations WHERE block_number = $1 ), everything AS ( @@ -1306,7 +1309,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { SELECT tx_hash as "tx_hash!" FROM everything - ORDER BY created_at DESC, block_index DESC + ORDER BY sequence_number DESC LIMIT 1 "#, i64::from(*block_number) @@ -1402,7 +1405,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { // because we are using this function for paginating starting from the latest transaction let result = sqlx::query!( r#"SELECT sequence_number FROM executed_priority_operations - WHERE tx_hash = $1 AND block_number = $2 ORDER BY created_at DESC"#, + WHERE tx_hash = $1 AND block_number = $2 ORDER BY sequence_number DESC"#, tx_hash.as_ref(), block_number.0 as i32 ) @@ -1435,7 +1438,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { // because we are using this function for paginating starting from the latest transaction let record = sqlx::query!( r#"SELECT sequence_number FROM executed_priority_operations - WHERE tx_hash = $1 ORDER BY created_at DESC"#, + WHERE tx_hash = $1 ORDER BY sequence_number DESC"#, tx_hash.as_ref() ) .fetch_optional(transaction.conn()) @@ -1465,7 +1468,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { INNER JOIN txs_batches_hashes ON txs_batches_hashes.batch_id = COALESCE(executed_transactions.batch_id, 0) WHERE batch_hash = $1 - ORDER BY created_at ASC, block_index ASC + ORDER BY sequence_number ASC "#, batch_hash.as_ref() ) @@ -1824,147 +1827,4 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { .await?; Ok(()) } - - // TODO ZKS-931 Remove it after migration - pub async fn min_block_for_update_sequence(&mut self) -> i64 { - let executed_txs_block = sqlx::query!( - "SELECT MIN(block_number) FROM executed_transactions WHERE sequence_number IS NULL" - ) - .fetch_one(self.0.conn()) - .await - .unwrap() - .min - .unwrap_or(0); - let executed_pr_op_block = sqlx::query!( - "SELECT MIN(block_number) FROM executed_priority_operations WHERE sequence_number IS NULL" - ) - .fetch_one(self.0.conn()) - .await - .unwrap() - .min - .unwrap_or(0); - min(executed_txs_block, executed_pr_op_block) - } - - // TODO ZKS-931 Remove it after migration - pub async fn update_sequence_number_for_blocks( - &mut self, - from_block: i64, - to_block: i64, - known_last_sequence_number: Option, - ) -> i64 { - let mut transaction = self.0.start_transaction().await.unwrap(); - let mut sequence_number: i64 = if let Some(number) = known_last_sequence_number { - number - } else { - let executed_txs_count = sqlx::query!( - r#"SELECT COUNT(*) as "count!" FROM executed_transactions WHERE block_number < $1"#, - from_block - ) - .fetch_one(transaction.conn()) - .await - .unwrap() - .count; - let priority_ops_count = sqlx::query!( - r#"SELECT COUNT(*) as "count!" FROM executed_priority_operations WHERE block_number < $1"#, - from_block - ) - .fetch_one(transaction.conn()) - .await - .unwrap() - .count; - executed_txs_count + priority_ops_count - }; - println!("seq_no {:?}", sequence_number); - - let records = sqlx::query!( - r#" - WITH transactions AS ( - SELECT block_number, created_at, Null::bigint as priority_op_serialid, tx_hash, block_index - FROM executed_transactions - WHERE block_number BETWEEN $1 AND $2 - ), priority_ops AS ( - SELECT block_number, created_at, priority_op_serialid, tx_hash, block_index - FROM executed_priority_operations - WHERE block_number BETWEEN $1 AND $2 - ), everything AS ( - SELECT * FROM transactions - UNION ALL - SELECT * FROM priority_ops - ) - SELECT - block_number, priority_op_serialid, tx_hash, block_index - FROM everything - ORDER BY created_at, block_index - "#, - from_block, - to_block - ) - .fetch_all(transaction.conn()) - .await - .unwrap(); - - println!("Number of txs {}", records.len()); - let mut priority_ops_serial_ids = vec![]; - let mut priority_ops_seqeunce_numbers = vec![]; - - let mut executed_txs_hashes = vec![]; - let mut executed_txs_seqeunce_numbers = vec![]; - for record in records { - sequence_number += 1; - if let Some(serial_id) = record.priority_op_serialid { - priority_ops_serial_ids.push(serial_id); - priority_ops_seqeunce_numbers.push(sequence_number); - // sqlx::query!( - // "UPDATE executed_priority_operations SET sequence_number = $1 WHERE priority_op_serialid = $2", - // sequence_number, - // serial_id - // ) - // .execute(transaction.conn()) - // .await - // .unwrap(); - } else { - executed_txs_hashes.push(record.tx_hash.unwrap()); - executed_txs_seqeunce_numbers.push(sequence_number); - // sqlx::query!( - // "UPDATE executed_transactions SET sequence_number = $1 WHERE tx_hash = $2", - // sequence_number, - // record.tx_hash - // ) - // .execute(transaction.conn()) - // .await - // .unwrap(); - } - } - sqlx::query!( - "UPDATE executed_priority_operations SET sequence_number = u.sequence_number \ - FROM UNNEST ($1::bigint[], $2::bigint[]) - AS u(serial_id, sequence_number) - WHERE executed_priority_operations.priority_op_serialid= u.serial_id", - &priority_ops_serial_ids, - &priority_ops_seqeunce_numbers - ) - .execute(transaction.conn()) - .await - .unwrap(); - - println!( - "Update priority ops seq no {:?}", - &priority_ops_seqeunce_numbers - ); - sqlx::query!( - "UPDATE executed_transactions SET sequence_number = u.sequence_number \ - FROM UNNEST ($1::bytea[], $2::bigint[]) - AS u(tx_hash, sequence_number) - WHERE executed_transactions.tx_hash = u.tx_hash", - &executed_txs_hashes, - &executed_txs_seqeunce_numbers - ) - .execute(transaction.conn()) - .await - .unwrap(); - println!("Update txs seq no {:?}", &executed_txs_seqeunce_numbers); - transaction.commit().await.unwrap(); - sequence_number - } } diff --git a/core/lib/types/src/tx/swap.rs b/core/lib/types/src/tx/swap.rs index b8bed7e406..be04b0e803 100644 --- a/core/lib/types/src/tx/swap.rs +++ b/core/lib/types/src/tx/swap.rs @@ -116,8 +116,8 @@ impl Order { "Ratio: {sell}:{buy}\n\ Address: {recipient:?}\n\ Nonce: {nonce}", - sell = self.price.0.to_string(), - buy = self.price.1.to_string(), + sell = self.price.0, + buy = self.price.1, recipient = self.recipient_address, nonce = self.nonce ) diff --git a/core/lib/types/src/tx/tests.rs b/core/lib/types/src/tx/tests.rs index 273e0e0003..e91217d897 100644 --- a/core/lib/types/src/tx/tests.rs +++ b/core/lib/types/src/tx/tests.rs @@ -98,7 +98,7 @@ fn test_print_swap_for_protocol() { let print_signer = |name, key: PrivateKey| { println!("Signer ({}):", name); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); }; @@ -163,7 +163,7 @@ fn test_print_transfer_for_protocol() { ); println!("Signer:"); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); @@ -224,7 +224,7 @@ fn test_print_change_pub_key_for_protocol() { ); println!("Signer:"); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); @@ -283,7 +283,7 @@ fn test_print_withdraw_for_protocol() { ); println!("Signer:"); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); @@ -346,7 +346,7 @@ fn test_print_withdraw_nft_for_protocol() { ); println!("Signer:"); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); @@ -398,7 +398,7 @@ fn test_print_mint_nft_for_protocol() { ); println!("Signer:"); - println!("Private key: {}", key.0.to_string()); + println!("Private key: {}", key.0); let (pk_x, pk_y) = public_key_from_private(&key).0.into_xy(); println!("Public key: x: {}, y: {}\n", pk_x, pk_y); diff --git a/core/tests/flamegraph_target/Cargo.toml b/core/tests/flamegraph_target/Cargo.toml new file mode 100644 index 0000000000..e4c353ee7c --- /dev/null +++ b/core/tests/flamegraph_target/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "flamegraph_target" +version = "0.1.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] +publish = false # We don't want to publish our tests. + +[dependencies] +zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +structopt = "0.3.20" + +# Debug symbols are enabled for this binary even in release. +# The corresponding setting is set in the workspace root's `Cargo.toml`. diff --git a/core/tests/flamegraph_target/README.md b/core/tests/flamegraph_target/README.md new file mode 100644 index 0000000000..3848c701ba --- /dev/null +++ b/core/tests/flamegraph_target/README.md @@ -0,0 +1,24 @@ +# zkSync flamegraph target + +This binary is a playground for analyzing the performance of the zkSync components, using tools such as [perf] and +[cargo-flamegraph]. + +While analyzing the performance of the application overall may be useful, it doesn't really help when you want to +analyze one particular component when it takes ~100% of the runtime, in order to see weak points clearly. + +[perf]: https://perf.wiki.kernel.org/index.php/Main_Page +[cargo-flamegraph]: https://github.com/flamegraph-rs/flamegraph + +## Design notes + +Since the goal is to avoid _everything_ that is not related to the analysis of the particular component, try to avoid +"hidden" runtime logic such as async runtimes (e.g. `tokio::main`) or logging (e.g. `vlog`) in this crate. If the code +you are going to analyze requires `async` support, explicitly create the runtime for said code only. + +Target code should be written in a manner that stresses component in the most possible way. If your code needs +initialization, separate it from the actual stressing logic, so that in the resulting flamegraph they will be easily +distinguishable. + +That being said, the code is just a playground. It is expected to be adjusted to what you need, so it is OK to keep the +code simple to parameterize with loops, constants, and whatever else helps you achieve representative performance +charts. diff --git a/core/tests/flamegraph_target/src/main.rs b/core/tests/flamegraph_target/src/main.rs new file mode 100644 index 0000000000..790979b6e2 --- /dev/null +++ b/core/tests/flamegraph_target/src/main.rs @@ -0,0 +1,45 @@ +use std::str::FromStr; +use structopt::StructOpt; + +mod tree_target; + +/// Target to analyze. +#[derive(Debug)] +enum Target { + /// Merkle tree. + Tree, +} + +impl FromStr for Target { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + let target = match s { + "tree" | "merkle-tree" => Self::Tree, + _ => return Err("Unknown taget. Available options are: tree"), + }; + + Ok(target) + } +} + +#[derive(Debug, StructOpt)] +#[structopt( + name = "flamegraph_target", + about = "Binary for stress-testing zkSync components" +)] +struct Options { + /// Name of the target to run. + target: Target, +} + +fn main() { + let options = Options::from_args(); + + // Not much currently, but we may want to add another targets in the future. + match options.target { + Target::Tree => { + tree_target::analyze_tree(); + } + } +} diff --git a/core/tests/flamegraph_target/src/tree_target.rs b/core/tests/flamegraph_target/src/tree_target.rs new file mode 100644 index 0000000000..2742b55ee9 --- /dev/null +++ b/core/tests/flamegraph_target/src/tree_target.rs @@ -0,0 +1,69 @@ +//! Target code to analyze tree performance. + +use zksync_crypto::{ + merkle_tree::{RescueHasher, SparseMerkleTree}, + params::ACCOUNT_TREE_DEPTH, + Engine, Fr, +}; + +type Value = u64; +type Tree = SparseMerkleTree>; + +// This binary is a playground, so feel free to change the params to achieve behavior you want. +const INITIALIZED_VALUES: usize = 100_000; +const N_CYCLES: usize = 1_000; +const VALUES_PER_CYCLE: usize = 1_000; + +/// An entry point for analysis. +pub(crate) fn analyze_tree() { + let mut tree = Tree::new(ACCOUNT_TREE_DEPTH); + + prepare(&mut tree); + stress_get(&mut tree); + stress_affected(&mut tree); + stress_new(&mut tree); + drop(tree); +} + +fn prepare(tree: &mut Tree) { + for val in 0..INITIALIZED_VALUES { + tree.insert(val as u32, val as Value); + } + tree.root_hash(); +} + +fn stress_get(tree: &mut Tree) { + const START_FROM: usize = INITIALIZED_VALUES / 2; + for _ in 0..N_CYCLES { + for val in START_FROM..(START_FROM + VALUES_PER_CYCLE) { + // Insert some new value. + let got = tree.get(val as u32).copied(); + // Deny optimizing this stuff out. + assert_eq!(got, Some(val as Value)); + } + } +} + +fn stress_affected(tree: &mut Tree) { + // Range contains affected accounts and rewrites them multiple time. + const START_FROM: usize = INITIALIZED_VALUES / 2; + for cycle in 0..N_CYCLES { + for val in START_FROM..(START_FROM + VALUES_PER_CYCLE) { + // Insert some new value. + tree.insert(val as u32, (val + cycle) as Value); + } + // Recalculate the root hash. + tree.root_hash(); + } +} + +fn stress_new(tree: &mut Tree) { + // Range is outside of where we have been putting out elements. + // Each account is updated one time. + const START_FROM: usize = INITIALIZED_VALUES * 100; + for val in START_FROM..(START_FROM + INITIALIZED_VALUES) { + // Each ID is multiplied by 2, so that they are not neighbors and the access + // is somewhat sparse. + tree.insert((val * 2) as u32, val as Value); + } +} diff --git a/core/tests/ts-tests/tests/api.test.ts b/core/tests/ts-tests/tests/api.test.ts index e20920c784..0c80bf440f 100644 --- a/core/tests/ts-tests/tests/api.test.ts +++ b/core/tests/ts-tests/tests/api.test.ts @@ -254,7 +254,7 @@ describe('ZkSync REST API V0.2 tests', () => { .addTransfer({ to: bob.address(), token: 'ETH', amount: alice.provider.tokenSet.parseToken('ETH', '1') }) .addTransfer({ to: bob.address(), token: 'ETH', amount: alice.provider.tokenSet.parseToken('ETH', '1') }) .build('ETH'); - const submitBatchResponse = await provider.submitTxsBatchNew(batch.txs, [batch.signature]); + const submitBatchResponse = await provider.submitTxsBatchNew(batch.txs, [batch.signature!]); await provider.notifyAnyTransaction(submitBatchResponse.transactionHashes[0], 'COMMIT'); const batchInfo = await provider.getBatch(submitBatchResponse.batchHash); expect(batchInfo.batchHash).to.eql(submitBatchResponse.batchHash); @@ -294,7 +294,7 @@ describe('ZkSync web3 API tests', () => { tokenAddress = alice.provider.tokenSet.resolveTokenAddress(token); const erc20InterfacePath = path.join(process.env['ZKSYNC_HOME'] as string, 'etc', 'web3-abi', 'ERC20.json'); const erc20Interface = new ethers.utils.Interface(require(erc20InterfacePath)); - erc20Contract = new ethers.Contract(tokenAddress, erc20Interface, alice.ethSigner); + erc20Contract = new ethers.Contract(tokenAddress, erc20Interface, alice.ethSigner()); const zksyncProxyInterfacePath = path.join( process.env['ZKSYNC_HOME'] as string, @@ -303,7 +303,7 @@ describe('ZkSync web3 API tests', () => { 'ZkSyncProxy.json' ); const zksyncProxyInterface = new ethers.utils.Interface(require(zksyncProxyInterfacePath)); - zksyncProxyContract = new ethers.Contract(zksyncProxyAddress, zksyncProxyInterface, alice.ethSigner); + zksyncProxyContract = new ethers.Contract(zksyncProxyAddress, zksyncProxyInterface, alice.ethSigner()); const nftFactoryInterfacePath = path.join( process.env['ZKSYNC_HOME'] as string, @@ -312,7 +312,7 @@ describe('ZkSync web3 API tests', () => { 'NFTFactory.json' ); const nftFactoryInterface = new ethers.utils.Interface(require(nftFactoryInterfacePath)); - nftFactoryContract = new ethers.Contract(nftFactoryAddress, nftFactoryInterface, alice.ethSigner); + nftFactoryContract = new ethers.Contract(nftFactoryAddress, nftFactoryInterface, alice.ethSigner()); }); it('should check logs', async () => { diff --git a/core/tests/ts-tests/tests/suits/full-exit.ts b/core/tests/ts-tests/tests/suits/full-exit.ts index 25c6d121f7..dcc2783a3c 100644 --- a/core/tests/ts-tests/tests/suits/full-exit.ts +++ b/core/tests/ts-tests/tests/suits/full-exit.ts @@ -59,12 +59,12 @@ const FullExitTestSuite = (token: types.TokenSymbol) => // make a deposit so that wallet is assigned an accountId await tester.testDeposit(carl, token, DEPOSIT_AMOUNT, true); - const oldSigner = carl.ethSigner; - carl.ethSigner = tester.ethWallet; + const oldSigner = carl._ethSigner; + carl._ethSigner = tester.ethWallet; const [before, after] = await tester.testFullExit(carl, token); expect(before.eq(0), 'Balance before Full Exit must be non-zero').to.be.false; expect(before.eq(after), 'Balance after incorrect Full Exit should not change').to.be.true; - carl.ethSigner = oldSigner; + carl._ethSigner = oldSigner; }); step('should execute NFT full-exit', async () => { diff --git a/core/tests/ts-tests/tests/suits/no2fa.ts b/core/tests/ts-tests/tests/suits/no2fa.ts index 71f8d296c1..25aad1cf92 100644 --- a/core/tests/ts-tests/tests/suits/no2fa.ts +++ b/core/tests/ts-tests/tests/suits/no2fa.ts @@ -87,7 +87,7 @@ describe(`No2FA tests`, () => { // Making sure that the wallet has no Ethereum private key // but has wrong l2 private key hilda = await Wallet.fromSyncSigner( - new No2FAWalletSigner(hilda.address(), hilda.ethSigner.provider), + new No2FAWalletSigner(hilda.address(), hilda.ethSigner().provider), Signer.fromPrivateKey(zkPrivateKeyWith2FA), hilda.provider ); diff --git a/core/tests/ts-tests/tests/tester/batch-builder.ts b/core/tests/ts-tests/tests/tester/batch-builder.ts index c2d3b69b43..b800901431 100644 --- a/core/tests/ts-tests/tests/tester/batch-builder.ts +++ b/core/tests/ts-tests/tests/tester/batch-builder.ts @@ -85,7 +85,7 @@ Tester.prototype.testBatchBuilderChangePubKey = async function ( const totalFee = batch.totalFee.get(token)!; const balanceBefore = await sender.getBalance(token); - const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); expect(await sender.isSigningKeySet(), 'ChangePubKey failed').to.be.true; const balanceAfter = await sender.getBalance(token); @@ -133,7 +133,7 @@ Tester.prototype.testBatchBuilderSignedChangePubKey = async function ( expect(fee.eq(totalFee), 'Wrong caclucated fee').to.be.true; const balanceBefore = await sender.getBalance(token); - const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); expect(await sender.isSigningKeySet(), 'ChangePubKey failed').to.be.true; const balanceAfter = await sender.getBalance(token); @@ -157,7 +157,7 @@ Tester.prototype.testBatchBuilderTransfers = async function ( const senderBefore = await sender.getBalance(token); const receiverBefore = await receiver.getBalance(token); - const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); const senderAfter = await sender.getBalance(token); const receiverAfter = await receiver.getBalance(token); @@ -215,7 +215,7 @@ Tester.prototype.testBatchBuilderPayInDifferentToken = async function ( const senderBeforeFeeToken = await sender.getBalance(feeToken); const senderBefore = await sender.getBalance(token); const receiverBefore = await receiver.getBalance(token); - const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); const senderAfterFeeToken = await sender.getBalance(feeToken); const senderAfter = await sender.getBalance(token); @@ -244,7 +244,7 @@ Tester.prototype.testBatchBuilderGenericUsage = async function ( const senderBefore = await sender.getBalance(token); const receiverBefore = await receiver.getBalance(token); - const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(sender.provider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); const senderAfter = await sender.getBalance(token); const receiverAfter = await receiver.getBalance(token); @@ -266,7 +266,7 @@ Tester.prototype.testBatchBuilderNFT = async function (from: Wallet, to: Wallet, const totalMintFee = mint_batch.totalFee.get(feeToken)!; const mint_handles = await wallet.submitSignedTransactionsBatch(from.provider, mint_batch.txs, [ - mint_batch.signature + mint_batch.signature! ]); await Promise.all(mint_handles.map((handle) => handle.awaitVerifyReceipt())); @@ -290,7 +290,7 @@ Tester.prototype.testBatchBuilderNFT = async function (from: Wallet, to: Wallet, const totalWithdrawFee = withdraw_batch.totalFee.get(feeToken)!; const withdraw_handles = await wallet.submitSignedTransactionsBatch(to.provider, withdraw_batch.txs, [ - withdraw_batch.signature + withdraw_batch.signature! ]); await Promise.all(withdraw_handles.map((handle) => handle.awaitReceipt())); diff --git a/core/tests/ts-tests/tests/tester/forced-exit-requests.ts b/core/tests/ts-tests/tests/tester/forced-exit-requests.ts index 65bf098ef4..705cd7b6fb 100644 --- a/core/tests/ts-tests/tests/tester/forced-exit-requests.ts +++ b/core/tests/ts-tests/tests/tester/forced-exit-requests.ts @@ -62,7 +62,7 @@ Tester.prototype.testForcedExitRequestMultipleTokens = async function ( }); }); const batch = await batchBuilder.build('ETH'); - const handles = await wallet.submitSignedTransactionsBatch(from.provider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(from.provider, batch.txs, [batch.signature!]); // Waiting only for the first tx since we send the transactions in batch await handles[0].awaitReceipt(); diff --git a/core/tests/ts-tests/tests/tester/misc.ts b/core/tests/ts-tests/tests/tester/misc.ts index 66ba749b2e..1ae069ec0c 100644 --- a/core/tests/ts-tests/tests/tester/misc.ts +++ b/core/tests/ts-tests/tests/tester/misc.ts @@ -2,7 +2,7 @@ import { Tester } from './tester'; import { expect } from 'chai'; import { Wallet, types, Create2WalletSigner } from 'zksync'; import { BigNumber, ethers } from 'ethers'; -import { SignedTransaction, TxEthSignature } from 'zksync/build/types'; +import { ChangePubKey, SignedTransaction, TxEthSignature } from 'zksync/build/types'; import { submitSignedTransactionsBatch } from 'zksync/build/wallet'; import { MAX_TIMESTAMP } from 'zksync/build/utils'; import { Transaction } from 'zksync'; @@ -192,7 +192,7 @@ Tester.prototype.testMultipleBatchSigners = async function (wallets: Wallet[], t validFrom: 0, validUntil: MAX_TIMESTAMP }; - const transfer = await sender.getTransfer(transferArgs); + const transfer = (await sender.signSyncTransfer(transferArgs)).tx; batch.push({ tx: transfer }); const messagePart = await sender.getTransferEthMessagePart(transferArgs); @@ -203,7 +203,7 @@ Tester.prototype.testMultipleBatchSigners = async function (wallets: Wallet[], t // For every sender there's corresponding signature, otherwise, batch verification would fail. const ethSignatures: TxEthSignature[] = []; for (let i = 0; i < wallets.length - 1; ++i) { - ethSignatures.push(await wallets[i].getEthMessageSignature(message)); + ethSignatures.push(await wallets[i].ethMessageSigner().getEthMessageSignature(message)); } const senderBefore = await batchSender.getBalance(token); @@ -246,15 +246,15 @@ Tester.prototype.testMultipleWalletsWrongSignature = async function ( validFrom: 0, validUntil: MAX_TIMESTAMP }; - const transfer1 = await from.getTransfer(_transfer1); - const transfer2 = await to.getTransfer(_transfer2); + const transfer1 = (await from.signSyncTransfer(_transfer1)).tx; + const transfer2 = (await to.signSyncTransfer(_transfer2)).tx; // transfer1 and transfer2 are from different wallets. const batch: SignedTransaction[] = [{ tx: transfer1 }, { tx: transfer2 }]; const message = `From: ${from.address().toLowerCase()}\n${from.getTransferEthMessagePart(_transfer1)}\nNonce: ${ _transfer1.nonce }\n\nFrom: ${to.address().toLowerCase()}\n${to.getTransferEthMessagePart(_transfer2)}\nNonce: ${_transfer1.nonce}`; - const ethSignature = await from.getEthMessageSignature(message); + const ethSignature = await from.ethMessageSigner().getEthMessageSignature(message); let thrown = true; try { @@ -295,7 +295,7 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( validFrom: 0, validUntil: MAX_TIMESTAMP }; - const transfer = await from.getTransfer(_transfer); + const transfer = (await from.signSyncTransfer(_transfer)).tx as Transfer; // Resolve all the information needed for human-readable message. const stringAmount = from.provider.tokenSet.formatToken(_transfer.token, transfer.amount); let stringFee = from.provider.tokenSet.formatToken(_transfer.token, transfer.fee); @@ -306,7 +306,10 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( `Nonce: ${transfer.nonce}\n` + `Fee: ${stringFee} ${stringToken}\n` + `Account Id: ${transfer.accountId}`; - const signedTransfer = { tx: transfer, ethereumSignature: await from.getEthMessageSignature(transferMessage) }; // Transfer + const signedTransfer = { + tx: transfer, + ethereumSignature: await from.ethMessageSigner().getEthMessageSignature(transferMessage) + }; // Transfer // Withdraw const nonce = await to.getNonce(); @@ -319,7 +322,7 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( validFrom: 0, validUntil: MAX_TIMESTAMP }; - const withdraw = await to.getWithdrawFromSyncToEthereum(_withdraw); + const withdraw = (await to.signWithdrawFromSyncToEthereum(_withdraw)).tx as Withdraw; stringFee = from.provider.tokenSet.formatToken(_transfer.token, 0); const withdrawMessage = `Withdraw ${stringAmount} ${stringToken}\n` + @@ -327,7 +330,10 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( `Nonce: ${withdraw.nonce}\n` + `Fee: ${stringFee} ${stringToken}\n` + `Account Id: ${withdraw.accountId}`; - const signedWithdraw = { tx: withdraw, ethereumSignature: await to.getEthMessageSignature(withdrawMessage) }; // Withdraw + const signedWithdraw = { + tx: withdraw, + ethereumSignature: await to.ethMessageSigner().getEthMessageSignature(withdrawMessage) + }; // Withdraw const batch = [signedTransfer, signedWithdraw]; @@ -340,7 +346,10 @@ Tester.prototype.testBackwardCompatibleEthMessages = async function ( const message = Uint8Array.from(Buffer.from(batchHash, 'hex')); // Both wallets sign it. - const ethSignatures = [await to.getEthMessageSignature(message), await from.getEthMessageSignature(message)]; + const ethSignatures = [ + await to.ethMessageSigner().getEthMessageSignature(message), + await from.ethMessageSigner().getEthMessageSignature(message) + ]; const handles = await submitSignedTransactionsBatch(to.provider, batch, ethSignatures); // We only expect that API doesn't reject this batch due to Eth signature error. @@ -404,18 +413,21 @@ Tester.prototype.testSubsidyForCREATE2ChangePubKey = async function (create2Wall ).to.be.true; // Now we submit the CREATE2 ChangePubKey - const create2data = (create2Wallet.ethSigner as Create2WalletSigner).create2WalletData; - const cpkTx = await create2Wallet.getChangePubKey({ - feeToken: token, - fee: subsidyYotalFee, - nonce: 0, - validFrom: 0, - validUntil: MAX_TIMESTAMP, - ethAuthData: { - type: 'CREATE2', - ...create2data - } - }); + const create2data = (create2Wallet.ethSigner() as Create2WalletSigner).create2WalletData; + const cpkTx = ( + await create2Wallet.signSetSigningKey({ + feeToken: token, + fee: subsidyYotalFee, + nonce: 0, + validFrom: 0, + validUntil: MAX_TIMESTAMP, + ethAuthType: 'CREATE2' + }) + ).tx as ChangePubKey; + cpkTx.ethAuthData = { + type: 'CREATE2', + ...create2data + }; await expect(transport.request('tx_submit', [cpkTx, null])).to.be.rejected; // The transaction is submitted successfully @@ -476,27 +488,32 @@ Tester.prototype.testSubsidyForBatch = async function (create2Wallet: Wallet, to .gte(5 * SUBSIDY_ACCEPTED_SCALED_DIFFERENCE) ).to.be.true; - const create2data = (create2Wallet.ethSigner as Create2WalletSigner).create2WalletData; - const cpkTx = await create2Wallet.getChangePubKey({ - feeToken: token, - fee: BigNumber.from(0), - nonce: 0, - validFrom: 0, - validUntil: MAX_TIMESTAMP, - ethAuthData: { - type: 'CREATE2', - ...create2data - } - }); - const transferTx = await create2Wallet.getTransfer({ - token, - fee: subsidyYotalFee, - nonce: 1, - validFrom: 0, - validUntil: MAX_TIMESTAMP, - amount: BigNumber.from('1'), - to: create2Wallet.address() - }); + const create2data = (create2Wallet.ethSigner() as Create2WalletSigner).create2WalletData; + const cpkTx = ( + await create2Wallet.signSetSigningKey({ + feeToken: token, + fee: BigNumber.from(0), + nonce: 0, + validFrom: 0, + validUntil: MAX_TIMESTAMP, + ethAuthType: 'CREATE2' + }) + ).tx as ChangePubKey; + cpkTx.ethAuthData = { + type: 'CREATE2', + ...create2data + }; + const transferTx = ( + await create2Wallet.signSyncTransfer({ + token, + fee: subsidyYotalFee, + nonce: 1, + validFrom: 0, + validUntil: MAX_TIMESTAMP, + amount: BigNumber.from('1'), + to: create2Wallet.address() + }) + ).tx; const txs = [{ tx: cpkTx }, { tx: transferTx }]; await expect(transport.request('submit_txs_batch', [txs, []]), 'Submitted transaction with the wrong fee').to.be diff --git a/core/tests/ts-tests/tests/tester/register-factory.ts b/core/tests/ts-tests/tests/tester/register-factory.ts index 82639fb2df..e93c012597 100644 --- a/core/tests/ts-tests/tests/tester/register-factory.ts +++ b/core/tests/ts-tests/tests/tester/register-factory.ts @@ -29,7 +29,7 @@ declare module './tester' { Tester.prototype.testRegisterFactory = async function (wallet: Wallet, feeToken: TokenLike) { const contractAddress = await wallet.provider.getContractAddress(); - const ethProxy = new ETHProxy(wallet.ethSigner.provider!, contractAddress); + const ethProxy = new ETHProxy(wallet.ethSigner().provider!, contractAddress); const defaultNFTFactoryAddress = (await ethProxy.getGovernanceContract().defaultFactory()).toLowerCase(); const type = 'MintNFT'; @@ -55,7 +55,7 @@ Tester.prototype.testRegisterFactory = async function (wallet: Wallet, feeToken: expect(nftInfo.withdrawnFactory, 'NFT info before withdrawing is wrong').to.be.null; const contract = await deployContract( - wallet.ethSigner, + wallet.ethSigner(), readFactoryCode(), [ 'TestFactory', diff --git a/core/tests/ts-tests/tests/tester/swap.ts b/core/tests/ts-tests/tests/tester/swap.ts index fdc3194385..e193800f37 100644 --- a/core/tests/ts-tests/tests/tester/swap.ts +++ b/core/tests/ts-tests/tests/tester/swap.ts @@ -42,7 +42,7 @@ Tester.prototype.testSwapMissingSignatures = async function ( ) { const { totalFee: fee } = await this.syncProvider.getTransactionFee('Swap', walletA.address(), tokenA); - const orderA = await walletA.getOrder({ + const orderA = await walletA.signOrder({ tokenSell: tokenA, tokenBuy: tokenB, amount, @@ -52,7 +52,7 @@ Tester.prototype.testSwapMissingSignatures = async function ( }) }); - const orderB = await walletB.getOrder({ + const orderB = await walletB.signOrder({ tokenSell: tokenB, tokenBuy: tokenA, amount: amount.mul(2), @@ -98,7 +98,7 @@ Tester.prototype.testSwapNFT = async function ( const { totalFee: fee } = await this.syncProvider.getTransactionFee('Swap', walletA.address(), token); expect(await walletB.getNFT(nft), 'wallet does not own an NFT').to.exist; - const orderA = await walletA.getOrder({ + const orderA = await walletA.signOrder({ tokenSell: token, tokenBuy: nft, amount, @@ -108,7 +108,7 @@ Tester.prototype.testSwapNFT = async function ( }) }); - const orderB = await walletB.getOrder({ + const orderB = await walletB.signOrder({ tokenSell: nft, tokenBuy: token, amount: 1, @@ -143,7 +143,7 @@ Tester.prototype.testSwap = async function ( const stateABefore = (await this.syncProvider.getState(walletA.address())).committed; const stateBBefore = (await this.syncProvider.getState(walletB.address())).committed; - const orderA = await walletA.getOrder({ + const orderA = await walletA.signOrder({ tokenSell: tokenA, tokenBuy: tokenB, amount, @@ -153,7 +153,7 @@ Tester.prototype.testSwap = async function ( }) }); - const orderB = await walletB.getOrder({ + const orderB = await walletB.signOrder({ tokenSell: tokenB, tokenBuy: tokenA, amount: amount.mul(2), @@ -207,7 +207,7 @@ Tester.prototype.testSwapBatch = async function ( const nonceBefore = await walletA.getNonce(); // these are limit orders, so they can be reused - const orderA = await walletA.getLimitOrder({ + const orderA = await walletA.signLimitOrder({ tokenSell: tokenA, tokenBuy: tokenB, ratio: utils.weiRatio({ @@ -216,7 +216,7 @@ Tester.prototype.testSwapBatch = async function ( }) }); - const orderB = await walletB.getLimitOrder({ + const orderB = await walletB.signLimitOrder({ tokenSell: tokenB, tokenBuy: tokenA, ratio: utils.weiRatio({ @@ -239,7 +239,7 @@ Tester.prototype.testSwapBatch = async function ( }) .build(tokenA); - const handles = await wallet.submitSignedTransactionsBatch(this.syncProvider, batch.txs, [batch.signature]); + const handles = await wallet.submitSignedTransactionsBatch(this.syncProvider, batch.txs, [batch.signature!]); await Promise.all(handles.map((handle) => handle.awaitReceipt())); const nonceAfter = await walletA.getNonce(); diff --git a/core/tests/ts-tests/tests/tester/withdrawal-helpers.ts b/core/tests/ts-tests/tests/tester/withdrawal-helpers.ts index 2bca9c9ee0..403f1d716a 100644 --- a/core/tests/ts-tests/tests/tester/withdrawal-helpers.ts +++ b/core/tests/ts-tests/tests/tester/withdrawal-helpers.ts @@ -77,7 +77,7 @@ async function setRevert( Tester.prototype.testRecoverETHWithdrawal = async function (from: Wallet, to: Address, amount: BigNumber) { // Make sure that the withdrawal will fail - await setRevert(from.ethSigner, this.syncProvider, to, 'ETH', true); + await setRevert(from.ethSigner(), this.syncProvider, to, 'ETH', true); const balanceBefore = await this.ethProvider.getBalance(to); const withdrawTx = await from.withdrawFromSyncToEthereum({ ethAddress: to, @@ -96,7 +96,7 @@ Tester.prototype.testRecoverETHWithdrawal = async function (from: Wallet, to: Ad expect(balanceBefore.eq(balanceAfter), 'The withdrawal did not fail the first time').to.be.true; // Make sure that the withdrawal will pass now - await setRevert(from.ethSigner, this.syncProvider, to, 'ETH', false); + await setRevert(from.ethSigner(), this.syncProvider, to, 'ETH', false); // Re-try const withdrawPendingTx = await from.withdrawPendingBalance(to, 'ETH'); @@ -115,10 +115,10 @@ Tester.prototype.testRecoverERC20Withdrawal = async function ( amount: BigNumber ) { // Make sure that the withdrawal will be reverted - await setRevert(from.ethSigner, from.provider, to, token, true); + await setRevert(from.ethSigner(), from.provider, to, token, true); const getToBalance = () => - utils.getEthereumBalance(from.ethSigner.provider as ethers.providers.Provider, from.provider, to, token); + utils.getEthereumBalance(from.ethSigner().provider as ethers.providers.Provider, from.provider, to, token); const balanceBefore = await getToBalance(); const withdrawTx = await from.withdrawFromSyncToEthereum({ @@ -139,7 +139,7 @@ Tester.prototype.testRecoverERC20Withdrawal = async function ( expect(balanceBefore.eq(balanceAfter), 'The withdrawal did not fail the first time').to.be.true; // Make sure that the withdrawal will pass now - await setRevert(from.ethSigner, from.provider, to, token, false); + await setRevert(from.ethSigner(), from.provider, to, token, false); // Re-try const withdrawPendingTx = await from.withdrawPendingBalance(to, token); @@ -165,7 +165,7 @@ Tester.prototype.testRecoverMultipleWithdrawals = async function ( // Make sure that all the withdrawal will fall for (let i = 0; i < to.length; i++) { - await setRevert(from.ethSigner, this.syncProvider, to[i], token[i], true); + await setRevert(from.ethSigner(), this.syncProvider, to[i], token[i], true); } // Send the withdrawals and wait until they are sent onchain @@ -194,7 +194,7 @@ Tester.prototype.testRecoverMultipleWithdrawals = async function ( // Make sure that all the withdrawal will pass now for (let i = 0; i < to.length; i++) { - await setRevert(from.ethSigner, this.syncProvider, to[i], token[i], false); + await setRevert(from.ethSigner(), this.syncProvider, to[i], token[i], false); } const handle = await from.withdrawPendingBalances(to, token, { diff --git a/core/tests/ts-tests/tests/withdrawal-helpers.test.ts b/core/tests/ts-tests/tests/withdrawal-helpers.test.ts index cfd80c26f0..9c1c8de53e 100644 --- a/core/tests/ts-tests/tests/withdrawal-helpers.test.ts +++ b/core/tests/ts-tests/tests/withdrawal-helpers.test.ts @@ -36,7 +36,7 @@ const TestSuite = (providerType: 'REST' | 'RPC') => } // This is needed to interact with blockchain - alice.ethSigner.connect(tester.ethProvider); + alice.ethSigner().connect(tester.ethProvider); }); after('disconnect tester', async () => { @@ -71,7 +71,7 @@ const TestSuite = (providerType: 'REST' | 'RPC') => it('forced_exit_request should recover mutiple tokens', async () => { await tester.testForcedExitRequestMultipleTokens( alice, - bob.ethSigner, + bob.ethSigner(), chuck.address(), ['ETH', erc20Token], [TX_AMOUNT, TX_AMOUNT.mul(2)] diff --git a/docker-compose-runner.yml b/docker-compose-runner.yml index 277654a6b1..851fbaca6c 100644 --- a/docker-compose-runner.yml +++ b/docker-compose-runner.yml @@ -34,8 +34,12 @@ services: - .:/usr/src/zksync - /usr/src/cache:/usr/src/cache - /usr/src/keys:/usr/src/keys + - /etc/sa_secret:/etc/sa_secret environment: - IN_DOCKER=true - CACHE_DIR=/usr/src/cache - SCCACHE_CACHE_SIZE=50g + - SCCACHE_GCS_BUCKET=sccache-zksync-ci + - SCCACHE_GCS_KEY_PATH=/etc/sa_secret/sa_key.json + - SCCACHE_GCS_RW_MODE=READ_WRITE - CI=1 diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index 0b1ab0c3fc..e61e056c27 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -34,4 +34,3 @@ ENV PATH="${ZKSYNC_HOME}/bin:${PATH}" ENV CI=1 RUN cargo install sccache ENV RUSTC_WRAPPER=/usr/local/cargo/bin/sccache -ENV SCCACHE_DIR=/usr/src/cache/sccache diff --git a/infrastructure/zk/src/test/test.ts b/infrastructure/zk/src/test/test.ts index bc1b22446b..93642b9829 100644 --- a/infrastructure/zk/src/test/test.ts +++ b/infrastructure/zk/src/test/test.ts @@ -75,7 +75,7 @@ export async function serverRust() { } export async function cryptoRust() { - await circuit(6); + await circuit(25); await rustCryptoTests(); } diff --git a/sdk/zksync.js/package.json b/sdk/zksync.js/package.json index 9501b4bd30..c5965189f3 100644 --- a/sdk/zksync.js/package.json +++ b/sdk/zksync.js/package.json @@ -1,6 +1,6 @@ { "name": "zksync", - "version": "0.11.6", + "version": "0.12.0-alpha.4", "license": "MIT", "main": "build/index.js", "types": "build/index.d.ts", diff --git a/sdk/zksync.js/src/abstract-wallet.ts b/sdk/zksync.js/src/abstract-wallet.ts new file mode 100644 index 0000000000..d5f8c0937d --- /dev/null +++ b/sdk/zksync.js/src/abstract-wallet.ts @@ -0,0 +1,720 @@ +import { BigNumber, BigNumberish, Contract, ContractTransaction, ethers } from 'ethers'; +import { ErrorCode as EthersErrorCode } from '@ethersproject/logger'; +import { EthMessageSigner } from './eth-message-signer'; +import { SyncProvider } from './provider-interface'; +import { BatchBuilder, BatchBuilderInternalTx } from './batch-builder'; +import { + AccountState, + Address, + ChangePubkeyTypes, + NFT, + Nonce, + Order, + PubKeyHash, + SignedTransaction, + TokenLike, + TxEthSignature, + TokenRatio, + WeiRatio, + Toggle2FARequest, + l1ChainId +} from './types'; +import { + ERC20_APPROVE_TRESHOLD, + ERC20_DEPOSIT_GAS_LIMIT, + ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT, + ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT, + getEthereumBalance, + IERC20_INTERFACE, + isTokenETH, + MAX_ERC20_APPROVE_AMOUNT, + SYNC_MAIN_CONTRACT_INTERFACE, + getToggle2FAMessage +} from './utils'; +import { Transaction, ETHOperation } from './operations'; + +export abstract class AbstractWallet { + public provider: SyncProvider; + + protected constructor(public cachedAddress: Address, public accountId?: number) {} + + connect(provider: SyncProvider) { + this.provider = provider; + return this; + } + + // **************** + // Abstract getters + // + + /** + * Returns the current Ethereum signer connected to this wallet. + */ + abstract ethSigner(): ethers.Signer; + + /** + * Returns the current Ethereum **message** signer connected to this wallet. + * + * Ethereum message signer differs from common Ethereum signer in that message signer + * returns Ethereum signatures along with its type (e.g. ECDSA / EIP1271). + */ + abstract ethMessageSigner(): EthMessageSigner; + + /** + * Returns `true` if this wallet instance has a connected L2 signer. + */ + abstract syncSignerConnected(): boolean; + + /** + * Returns the PubKeyHash that current *signer* uses + * (as opposed to the one set in the account). + */ + abstract syncSignerPubKeyHash(): Promise; + + // ************* + // Basic getters + // + + address(): Address { + return this.cachedAddress; + } + + async getCurrentPubKeyHash(): Promise { + return (await this.provider.getState(this.address())).committed.pubKeyHash; + } + + async getNonce(nonce: Nonce = 'committed'): Promise { + if (nonce === 'committed') { + return (await this.provider.getState(this.address())).committed.nonce; + } else if (typeof nonce === 'number') { + return nonce; + } + } + + async getAccountId(): Promise { + return (await this.getAccountState()).id; + } + + async getAccountState(): Promise { + return await this.provider.getState(this.address()); + } + + async resolveAccountId(): Promise { + if (this.accountId !== undefined) { + return this.accountId; + } else { + const accountState = await this.getAccountState(); + if (!accountState.id) { + throw new Error("Can't resolve account id from the zkSync node"); + } + return accountState.id; + } + } + + async isCorrespondingSigningKeySet(): Promise { + if (!this.syncSignerConnected()) { + throw new Error('ZKSync signer is required for current pubkey calculation.'); + } + const currentPubKeyHash = await this.getCurrentPubKeyHash(); + const signerPubKeyHash = await this.syncSignerPubKeyHash(); + return currentPubKeyHash === signerPubKeyHash; + } + + async isSigningKeySet(): Promise { + if (!this.syncSignerConnected()) { + throw new Error('ZKSync signer is required for current pubkey calculation.'); + } + const currentPubKeyHash = await this.getCurrentPubKeyHash(); + const zeroPubKeyHash = 'sync:0000000000000000000000000000000000000000'; + return zeroPubKeyHash !== currentPubKeyHash; + } + + async getNFT(tokenId: number, type: 'committed' | 'verified' = 'committed'): Promise { + const accountState = await this.getAccountState(); + let token: NFT; + if (type === 'committed') { + token = accountState.committed.nfts[tokenId]; + } else { + token = accountState.verified.nfts[tokenId]; + } + return token; + } + + async getBalance(token: TokenLike, type: 'committed' | 'verified' = 'committed'): Promise { + const accountState = await this.getAccountState(); + const tokenSymbol = this.provider.tokenSet.resolveTokenSymbol(token); + let balance: BigNumberish; + if (type === 'committed') { + balance = accountState.committed.balances[tokenSymbol] || '0'; + } else { + balance = accountState.verified.balances[tokenSymbol] || '0'; + } + return BigNumber.from(balance); + } + + async getEthereumBalance(token: TokenLike): Promise { + try { + return await getEthereumBalance(this.ethSigner().provider, this.provider, this.cachedAddress, token); + } catch (e) { + this.modifyEthersError(e); + } + } + + // ********************* + // Batch builder methods + // + + /** + * Creates a batch builder instance. + * + * @param nonce Nonce that should be used as the nonce of the first transaction in the batch. + * @returns Batch builder object + */ + batchBuilder(nonce?: Nonce): BatchBuilder { + return BatchBuilder.fromWallet(this, nonce); + } + + /** + * Internal method used to process transactions created via batch builder. + * Should not be used directly. + */ + abstract processBatchBuilderTransactions( + startNonce: Nonce, + txs: BatchBuilderInternalTx[] + ): Promise<{ txs: SignedTransaction[]; signature?: TxEthSignature }>; + + // ************* + // L2 operations + // + // Operations below each come in three signatures: + // - `getXXX`: get the full transaction with L2 signature. + // - `signXXX`: get the full transaction with both L2 and L1 signatures. + // - `XXX` or `syncXXX`: sign and send the transaction to zkSync. + // + // All these methods accept incomplete transaction data, and if they return signed transaction, this transaction will + // be "completed". "Incomplete transaction data" means that e.g. account IDs are not resolved or tokens are represented + // by their names/addresses rather than by their IDs in the zkSync network. + // + + // Transfer part + + abstract signSyncTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract syncTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise; + + // ChangePubKey part + + abstract signSetSigningKey(changePubKey: { + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + ethAuthType: ChangePubkeyTypes; + batchHash?: string; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract setSigningKey(changePubKey: { + feeToken: TokenLike; + ethAuthType: ChangePubkeyTypes; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise; + + // Withdraw part + + abstract signWithdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract withdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + fastProcessing?: boolean; + validFrom?: number; + validUntil?: number; + }): Promise; + + // Forced exit part + + abstract signSyncForcedExit(forcedExit: { + target: Address; + token: TokenLike; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract syncForcedExit(forcedExit: { + target: Address; + token: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise; + + // Swap part + + async signLimitOrder(order: { + tokenSell: TokenLike; + tokenBuy: TokenLike; + ratio: TokenRatio | WeiRatio; + recipient?: Address; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + return await this.signOrder({ + ...order, + amount: 0 + }); + } + + abstract signOrder(order: { + tokenSell: TokenLike; + tokenBuy: TokenLike; + ratio: TokenRatio | WeiRatio; + amount: BigNumberish; + recipient?: Address; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract signSyncSwap(swap: { + orders: [Order, Order]; + feeToken: number; + amounts: [BigNumberish, BigNumberish]; + nonce: number; + fee: BigNumberish; + }): Promise; + + abstract syncSwap(swap: { + orders: [Order, Order]; + feeToken: TokenLike; + amounts?: [BigNumberish, BigNumberish]; + nonce?: number; + fee?: BigNumberish; + }): Promise; + + // Mint NFT part + + abstract signMintNFT(mintNFT: { + recipient: string; + contentHash: string; + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + }): Promise; + + abstract mintNFT(mintNFT: { + recipient: Address; + contentHash: ethers.BytesLike; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + }): Promise; + + // Withdraw NFT part + + abstract signWithdrawNFT(withdrawNFT: { + to: string; + token: number; + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise; + + abstract withdrawNFT(withdrawNFT: { + to: string; + token: number; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + fastProcessing?: boolean; + validFrom?: number; + validUntil?: number; + }): Promise; + + // Transfer NFT part + + abstract syncTransferNFT(transfer: { + to: Address; + token: NFT; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise; + + // Multi-transfer part + + // Note that in syncMultiTransfer, unlike in syncTransfer, + // users need to specify the fee for each transaction. + // The main reason is that multitransfer enables paying fees + // in multiple tokens, (as long as the total sum + // of fees is enough to cover up the fees for all of the transactions). + // That might bring an inattentive user in a trouble like the following: + // + // A user wants to submit transactions in multiple tokens and + // wants to pay the fees with only some of them. If the user forgets + // to set the fees' value to 0 for transactions with tokens + // he won't pay the fee with, then this user will overpay a lot. + // + // That's why we want the users to be explicit about fees in multitransfers. + abstract syncMultiTransfer( + transfers: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }[] + ): Promise; + + // Toggle 2FA part + + async getToggle2FA(enable: boolean, pubKeyHash?: PubKeyHash): Promise { + const accountId = await this.getAccountId(); + const timestamp = new Date().getTime(); + const signature = await this.ethMessageSigner().getEthMessageSignature( + getToggle2FAMessage(enable, timestamp, pubKeyHash) + ); + + return { + accountId, + signature, + timestamp, + enable, + pubKeyHash + }; + } + + async toggle2FA(enable: boolean, pubKeyHash?: PubKeyHash): Promise { + await this.setRequiredAccountIdFromServer('Toggle 2FA'); + + return await this.provider.toggle2FA(await this.getToggle2FA(enable, pubKeyHash)); + } + + // ************* + // L1 operations + // + // Priority operations, ones that sent through Ethereum. + // + + async approveERC20TokenDeposits( + token: TokenLike, + max_erc20_approve_amount: BigNumber = MAX_ERC20_APPROVE_AMOUNT + ): Promise { + if (isTokenETH(token)) { + throw Error('ETH token does not need approval.'); + } + const tokenAddress = this.provider.tokenSet.resolveTokenAddress(token); + const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner()); + + try { + return erc20contract.approve(this.provider.contractAddress.mainContract, max_erc20_approve_amount); + } catch (e) { + this.modifyEthersError(e); + } + } + + async depositToSyncFromEthereum(deposit: { + depositTo: Address; + token: TokenLike; + amount: BigNumberish; + ethTxOptions?: ethers.providers.TransactionRequest; + approveDepositAmountForERC20?: boolean; + }): Promise { + const gasPrice = await this.ethSigner().provider.getGasPrice(); + + const mainZkSyncContract = this.getZkSyncMainContract(); + + let ethTransaction; + + if (isTokenETH(deposit.token)) { + try { + ethTransaction = await mainZkSyncContract.depositETH(deposit.depositTo, { + value: BigNumber.from(deposit.amount), + gasLimit: BigNumber.from(ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT), + gasPrice, + ...deposit.ethTxOptions + }); + } catch (e) { + this.modifyEthersError(e); + } + } else { + const tokenAddress = this.provider.tokenSet.resolveTokenAddress(deposit.token); + // ERC20 token deposit + const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner()); + let nonce: number; + if (deposit.approveDepositAmountForERC20) { + try { + const approveTx = await erc20contract.approve( + this.provider.contractAddress.mainContract, + deposit.amount + ); + nonce = approveTx.nonce + 1; + } catch (e) { + this.modifyEthersError(e); + } + } + const args = [ + tokenAddress, + deposit.amount, + deposit.depositTo, + { + nonce, + gasPrice, + ...deposit.ethTxOptions + } as ethers.providers.TransactionRequest + ]; + + // We set gas limit only if user does not set it using ethTxOptions. + const txRequest = args[args.length - 1] as ethers.providers.TransactionRequest; + if (txRequest.gasLimit == null) { + try { + const gasEstimate = await mainZkSyncContract.estimateGas.depositERC20(...args).then( + (estimate) => estimate, + () => BigNumber.from('0') + ); + const isMainnet = (await this.ethSigner().getChainId()) == 1; + let recommendedGasLimit = + isMainnet && ERC20_DEPOSIT_GAS_LIMIT[tokenAddress] + ? BigNumber.from(ERC20_DEPOSIT_GAS_LIMIT[tokenAddress]) + : ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT; + txRequest.gasLimit = gasEstimate.gte(recommendedGasLimit) ? gasEstimate : recommendedGasLimit; + args[args.length - 1] = txRequest; + } catch (e) { + this.modifyEthersError(e); + } + } + + try { + ethTransaction = await mainZkSyncContract.depositERC20(...args); + } catch (e) { + this.modifyEthersError(e); + } + } + + return new ETHOperation(ethTransaction, this.provider); + } + + async onchainAuthSigningKey( + nonce: Nonce = 'committed', + ethTxOptions?: ethers.providers.TransactionRequest + ): Promise { + if (!this.syncSignerConnected()) { + throw new Error('ZKSync signer is required for current pubkey calculation.'); + } + + const currentPubKeyHash = await this.getCurrentPubKeyHash(); + const newPubKeyHash = await this.syncSignerPubKeyHash(); + + if (currentPubKeyHash === newPubKeyHash) { + throw new Error('Current PubKeyHash is the same as new'); + } + + const numNonce = await this.getNonce(nonce); + + const mainZkSyncContract = this.getZkSyncMainContract(); + + try { + return mainZkSyncContract.setAuthPubkeyHash(newPubKeyHash.replace('sync:', '0x'), numNonce, { + gasLimit: BigNumber.from('200000'), + ...ethTxOptions + }); + } catch (e) { + this.modifyEthersError(e); + } + } + + async emergencyWithdraw(withdraw: { + token: TokenLike; + accountId?: number; + ethTxOptions?: ethers.providers.TransactionRequest; + }): Promise { + const gasPrice = await this.ethSigner().provider.getGasPrice(); + + let accountId: number = withdraw.accountId != null ? withdraw.accountId : await this.resolveAccountId(); + + const mainZkSyncContract = this.getZkSyncMainContract(); + + const tokenAddress = this.provider.tokenSet.resolveTokenAddress(withdraw.token); + try { + const ethTransaction = await mainZkSyncContract.requestFullExit(accountId, tokenAddress, { + gasLimit: BigNumber.from('500000'), + gasPrice, + ...withdraw.ethTxOptions + }); + return new ETHOperation(ethTransaction, this.provider); + } catch (e) { + this.modifyEthersError(e); + } + } + + async emergencyWithdrawNFT(withdrawNFT: { + tokenId: number; + accountId?: number; + ethTxOptions?: ethers.providers.TransactionRequest; + }): Promise { + const gasPrice = await this.ethSigner().provider.getGasPrice(); + + let accountId: number = withdrawNFT.accountId != null ? withdrawNFT.accountId : await this.resolveAccountId(); + + const mainZkSyncContract = this.getZkSyncMainContract(); + + try { + const ethTransaction = await mainZkSyncContract.requestFullExitNFT(accountId, withdrawNFT.tokenId, { + gasLimit: BigNumber.from('500000'), + gasPrice, + ...withdrawNFT.ethTxOptions + }); + return new ETHOperation(ethTransaction, this.provider); + } catch (e) { + this.modifyEthersError(e); + } + } + + async signRegisterFactory(factoryAddress: Address): Promise<{ + signature: TxEthSignature; + accountId: number; + accountAddress: Address; + }> { + await this.setRequiredAccountIdFromServer('Sign register factory'); + const signature = await this.ethMessageSigner().ethSignRegisterFactoryMessage( + factoryAddress, + this.accountId, + this.address() + ); + return { + signature, + accountId: this.accountId, + accountAddress: this.address() + }; + } + + // ********** + // L1 getters + // + // Getter methods that query information from Web3. + // + + async isOnchainAuthSigningKeySet(nonce: Nonce = 'committed'): Promise { + const mainZkSyncContract = this.getZkSyncMainContract(); + + const numNonce = await this.getNonce(nonce); + try { + const onchainAuthFact = await mainZkSyncContract.authFacts(this.address(), numNonce); + return onchainAuthFact !== '0x0000000000000000000000000000000000000000000000000000000000000000'; + } catch (e) { + this.modifyEthersError(e); + } + } + + async isERC20DepositsApproved( + token: TokenLike, + erc20ApproveThreshold: BigNumber = ERC20_APPROVE_TRESHOLD + ): Promise { + if (isTokenETH(token)) { + throw Error('ETH token does not need approval.'); + } + const tokenAddress = this.provider.tokenSet.resolveTokenAddress(token); + const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner()); + try { + const currentAllowance = await erc20contract.allowance( + this.address(), + this.provider.contractAddress.mainContract + ); + return BigNumber.from(currentAllowance).gte(erc20ApproveThreshold); + } catch (e) { + this.modifyEthersError(e); + } + } + + getZkSyncMainContract() { + return new ethers.Contract( + this.provider.contractAddress.mainContract, + SYNC_MAIN_CONTRACT_INTERFACE, + this.ethSigner() + ); + } + + // **************** + // Internal methods + // + + protected async verifyNetworks() { + if (this.provider.network != undefined && this.ethSigner().provider != undefined) { + const ethNetwork = await this.ethSigner().provider.getNetwork(); + if (l1ChainId(this.provider.network) !== ethNetwork.chainId) { + throw new Error( + `ETH network ${ethNetwork.name} and ZkSync network ${this.provider.network} don't match` + ); + } + } + } + + protected modifyEthersError(error: any): never { + if (this.ethSigner instanceof ethers.providers.JsonRpcSigner) { + // List of errors that can be caused by user's actions, which have to be forwarded as-is. + const correct_errors = [ + EthersErrorCode.NONCE_EXPIRED, + EthersErrorCode.INSUFFICIENT_FUNDS, + EthersErrorCode.REPLACEMENT_UNDERPRICED, + EthersErrorCode.UNPREDICTABLE_GAS_LIMIT + ]; + if (!correct_errors.includes(error.code)) { + // This is an error which we don't expect + error.message = `Ethereum smart wallet JSON RPC server returned the following error while executing an operation: "${error.message}". Please contact your smart wallet support for help.`; + } + } + + throw error; + } + + protected async setRequiredAccountIdFromServer(actionName: string) { + if (this.accountId === undefined) { + const accountIdFromServer = await this.getAccountId(); + if (accountIdFromServer == null) { + throw new Error(`Failed to ${actionName}: Account does not exist in the zkSync network`); + } else { + this.accountId = accountIdFromServer; + } + } + } +} diff --git a/sdk/zksync.js/src/batch-builder.ts b/sdk/zksync.js/src/batch-builder.ts index c40db23011..d330597e37 100644 --- a/sdk/zksync.js/src/batch-builder.ts +++ b/sdk/zksync.js/src/batch-builder.ts @@ -3,7 +3,6 @@ import { Address, TokenLike, Nonce, - ChangePubKey, ChangePubKeyFee, SignedTransaction, TxEthSignature, @@ -12,12 +11,12 @@ import { Order } from './types'; import { MAX_TIMESTAMP } from './utils'; -import { Wallet } from './wallet'; +import { AbstractWallet } from './abstract-wallet'; /** * Used by `BatchBuilder` to store transactions until the `build()` call. */ -interface InternalTx { +export interface BatchBuilderInternalTx { type: 'Withdraw' | 'Transfer' | 'ChangePubKey' | 'ForcedExit' | 'MintNFT' | 'WithdrawNFT' | 'Swap'; tx: any; feeType: @@ -41,9 +40,13 @@ interface InternalTx { * Provides interface for constructing batches of transactions. */ export class BatchBuilder { - private constructor(private wallet: Wallet, private nonce: Nonce, private txs: InternalTx[] = []) {} + private constructor( + private wallet: AbstractWallet, + private nonce: Nonce, + private txs: BatchBuilderInternalTx[] = [] + ) {} - static fromWallet(wallet: Wallet, nonce?: Nonce): BatchBuilder { + static fromWallet(wallet: AbstractWallet, nonce?: Nonce): BatchBuilder { return new BatchBuilder(wallet, nonce, []); } @@ -55,7 +58,7 @@ export class BatchBuilder { */ async build( feeToken?: TokenLike - ): Promise<{ txs: SignedTransaction[]; signature: TxEthSignature; totalFee: TotalFee }> { + ): Promise<{ txs: SignedTransaction[]; signature?: TxEthSignature; totalFee: TotalFee }> { if (this.txs.length == 0) { throw new Error('Transaction batch cannot be empty'); } @@ -75,9 +78,7 @@ export class BatchBuilder { totalFee.set(token, curr.add(fee)); } - const { txs, message } = await this.processTransactions(); - - let signature = await this.wallet.getEthMessageSignature(message); + const { txs, signature } = await this.processTransactions(); return { txs, @@ -311,76 +312,7 @@ export class BatchBuilder { /** * Sets transactions nonces, assembles the batch and constructs the message to be signed by user. */ - private async processTransactions(): Promise<{ txs: SignedTransaction[]; message: string }> { - const processedTxs: SignedTransaction[] = []; - let messages: string[] = []; - let nonce: number = await this.wallet.getNonce(this.nonce); - const batchNonce = nonce; - for (const tx of this.txs) { - tx.tx.nonce = nonce++; - switch (tx.type) { - case 'Withdraw': - messages.push(this.wallet.getWithdrawEthMessagePart(tx.tx)); - const withdraw = { tx: await this.wallet.getWithdrawFromSyncToEthereum(tx.tx) }; - processedTxs.push(withdraw); - break; - case 'Transfer': - messages.push(await this.wallet.getTransferEthMessagePart(tx.tx)); - const transfer = { tx: await this.wallet.getTransfer(tx.tx) }; - processedTxs.push(transfer); - break; - case 'ChangePubKey': - // ChangePubKey requires its own Ethereum signature, we either expect - // it to be signed already or do it here. - const changePubKey: ChangePubKey = tx.alreadySigned - ? tx.tx - : (await this.wallet.signSetSigningKey(tx.tx)).tx; - const currentPubKeyHash = await this.wallet.getCurrentPubKeyHash(); - if (currentPubKeyHash === changePubKey.newPkHash) { - throw new Error('Current signing key is already set'); - } - messages.push( - this.wallet.getChangePubKeyEthMessagePart({ - pubKeyHash: changePubKey.newPkHash, - feeToken: tx.token, - fee: changePubKey.fee - }) - ); - processedTxs.push({ tx: changePubKey }); - break; - case 'ForcedExit': - messages.push(this.wallet.getForcedExitEthMessagePart(tx.tx)); - const forcedExit = { tx: await this.wallet.getForcedExit(tx.tx) }; - processedTxs.push(forcedExit); - break; - case 'MintNFT': - messages.push(this.wallet.getMintNFTEthMessagePart(tx.tx)); - const mintNft = { tx: await this.wallet.getMintNFT(tx.tx) }; - processedTxs.push(mintNft); - break; - case 'Swap': - messages.push(this.wallet.getSwapEthSignMessagePart(tx.tx)); - const swap = { - tx: await this.wallet.getSwap(tx.tx), - ethereumSignature: [ - null, - tx.tx.orders[0].ethSignature || null, - tx.tx.orders[1].ethSignature || null - ] - }; - processedTxs.push(swap); - break; - case 'WithdrawNFT': - messages.push(this.wallet.getWithdrawNFTEthMessagePart(tx.tx)); - const withdrawNft = { tx: await this.wallet.getWithdrawNFT(tx.tx) }; - processedTxs.push(withdrawNft); - break; - } - } - messages.push(`Nonce: ${batchNonce}`); - return { - txs: processedTxs, - message: messages.filter((part) => part.length != 0).join('\n') - }; + private async processTransactions(): Promise<{ txs: SignedTransaction[]; signature?: TxEthSignature }> { + return await this.wallet.processBatchBuilderTransactions(this.nonce, this.txs); } } diff --git a/sdk/zksync.js/src/index.ts b/sdk/zksync.js/src/index.ts index 41f12c6166..595de4ec80 100644 --- a/sdk/zksync.js/src/index.ts +++ b/sdk/zksync.js/src/index.ts @@ -1,4 +1,5 @@ export { Wallet, Transaction, ETHOperation, submitSignedTransaction, submitSignedTransactionsBatch } from './wallet'; +export { RemoteWallet } from './remote-wallet'; export { Provider, ETHProxy, getDefaultProvider } from './provider'; export { RestProvider, getDefaultRestProvider } from './rest-provider'; export { SyncProvider } from './provider-interface'; @@ -10,4 +11,5 @@ export * as wallet from './wallet'; export * as types from './types'; export * as utils from './utils'; export * as crypto from './crypto'; +export * as operations from './operations'; import './withdraw-helpers'; diff --git a/sdk/zksync.js/src/operations.ts b/sdk/zksync.js/src/operations.ts new file mode 100644 index 0000000000..07c6ffa4b6 --- /dev/null +++ b/sdk/zksync.js/src/operations.ts @@ -0,0 +1,154 @@ +import { BigNumber, ContractTransaction } from 'ethers'; +import { SyncProvider } from './provider-interface'; +import { PriorityOperationReceipt, SignedTransaction, TransactionReceipt, TxEthSignature } from './types'; +import { SYNC_MAIN_CONTRACT_INTERFACE } from './utils'; + +export class ZKSyncTxError extends Error { + constructor(message: string, public value: PriorityOperationReceipt | TransactionReceipt) { + super(message); + } +} + +export class ETHOperation { + state: 'Sent' | 'Mined' | 'Committed' | 'Verified' | 'Failed'; + error?: ZKSyncTxError; + priorityOpId?: BigNumber; + + constructor(public ethTx: ContractTransaction, public zkSyncProvider: SyncProvider) { + this.state = 'Sent'; + } + + async awaitEthereumTxCommit() { + if (this.state !== 'Sent') return; + + const txReceipt = await this.ethTx.wait(); + for (const log of txReceipt.logs) { + try { + const priorityQueueLog = SYNC_MAIN_CONTRACT_INTERFACE.parseLog(log); + if (priorityQueueLog && priorityQueueLog.args.serialId != null) { + this.priorityOpId = priorityQueueLog.args.serialId; + } + } catch {} + } + if (!this.priorityOpId) { + throw new Error('Failed to parse tx logs'); + } + + this.state = 'Mined'; + return txReceipt; + } + + async awaitReceipt(): Promise { + this.throwErrorIfFailedState(); + + await this.awaitEthereumTxCommit(); + if (this.state !== 'Mined') return; + + let query: number | string; + if (this.zkSyncProvider.providerType === 'RPC') { + query = this.priorityOpId.toNumber(); + } else { + query = this.ethTx.hash; + } + const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'COMMIT'); + + if (!receipt.executed) { + this.setErrorState(new ZKSyncTxError('Priority operation failed', receipt)); + this.throwErrorIfFailedState(); + } + + this.state = 'Committed'; + return receipt; + } + + async awaitVerifyReceipt(): Promise { + await this.awaitReceipt(); + if (this.state !== 'Committed') return; + + let query: number | string; + if (this.zkSyncProvider.providerType === 'RPC') { + query = this.priorityOpId.toNumber(); + } else { + query = this.ethTx.hash; + } + const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'VERIFY'); + + this.state = 'Verified'; + + return receipt; + } + + private setErrorState(error: ZKSyncTxError) { + this.state = 'Failed'; + this.error = error; + } + + private throwErrorIfFailedState() { + if (this.state === 'Failed') throw this.error; + } +} + +export class Transaction { + state: 'Sent' | 'Committed' | 'Verified' | 'Failed'; + error?: ZKSyncTxError; + + constructor(public txData, public txHash: string, public sidechainProvider: SyncProvider) { + this.state = 'Sent'; + } + + async awaitReceipt(): Promise { + this.throwErrorIfFailedState(); + + if (this.state !== 'Sent') return; + + const receipt = await this.sidechainProvider.notifyTransaction(this.txHash, 'COMMIT'); + + if (!receipt.success) { + this.setErrorState(new ZKSyncTxError(`zkSync transaction failed: ${receipt.failReason}`, receipt)); + this.throwErrorIfFailedState(); + } + + this.state = 'Committed'; + return receipt; + } + + async awaitVerifyReceipt(): Promise { + await this.awaitReceipt(); + const receipt = await this.sidechainProvider.notifyTransaction(this.txHash, 'VERIFY'); + + this.state = 'Verified'; + return receipt; + } + + private setErrorState(error: ZKSyncTxError) { + this.state = 'Failed'; + this.error = error; + } + + private throwErrorIfFailedState() { + if (this.state === 'Failed') throw this.error; + } +} + +export async function submitSignedTransaction( + signedTx: SignedTransaction, + provider: SyncProvider, + fastProcessing?: boolean +): Promise { + const transactionHash = await provider.submitTx(signedTx.tx, signedTx.ethereumSignature, fastProcessing); + return new Transaction(signedTx, transactionHash, provider); +} + +export async function submitSignedTransactionsBatch( + provider: SyncProvider, + signedTxs: SignedTransaction[], + ethSignatures?: TxEthSignature[] +): Promise { + const transactionHashes = await provider.submitTxsBatch( + signedTxs.map((tx) => { + return { tx: tx.tx, signature: tx.ethereumSignature }; + }), + ethSignatures + ); + return transactionHashes.map((txHash, idx) => new Transaction(signedTxs[idx], txHash, provider)); +} diff --git a/sdk/zksync.js/src/provider.ts b/sdk/zksync.js/src/provider.ts index bab39f8c19..649bdc2884 100644 --- a/sdk/zksync.js/src/provider.ts +++ b/sdk/zksync.js/src/provider.ts @@ -306,7 +306,7 @@ export class Provider extends SyncProvider { return await this.transport.request('get_nft_id_by_tx_hash', [txHash]); } - async disconnect() { + override async disconnect() { return await this.transport.disconnect(); } } diff --git a/sdk/zksync.js/src/remote-wallet.ts b/sdk/zksync.js/src/remote-wallet.ts new file mode 100644 index 0000000000..03a7c2ffe0 --- /dev/null +++ b/sdk/zksync.js/src/remote-wallet.ts @@ -0,0 +1,492 @@ +import { BigNumber, BigNumberish, ethers } from 'ethers'; +import { EthMessageSigner } from './eth-message-signer'; +import { SyncProvider } from './provider-interface'; +import { BatchBuilderInternalTx } from './batch-builder'; +import { + Address, + ChangePubkeyTypes, + EthSignerType, + NFT, + Nonce, + Order, + PubKeyHash, + SignedTransaction, + TokenLike, + TxEthSignature, + TokenRatio, + WeiRatio +} from './types'; +import { Transaction, submitSignedTransaction, submitSignedTransactionsBatch } from './operations'; +import { AbstractWallet } from './abstract-wallet'; + +export { Transaction, ETHOperation, submitSignedTransaction, submitSignedTransactionsBatch } from './operations'; + +export class RemoteWallet extends AbstractWallet { + private web3Signer: ethers.Signer; + + protected constructor( + private web3Provider: ethers.providers.Web3Provider, + private _ethMessageSigner: EthMessageSigner, + cachedAddress: Address, + accountId?: number + ) { + super(cachedAddress, accountId); + this.web3Signer = web3Provider.getSigner(); + } + + // ************ + // Constructors + // + + static async fromEthSigner( + web3Provider: ethers.providers.Web3Provider, + provider: SyncProvider, + accountId?: number + ): Promise { + // Since this wallet implementation requires the signer to support custom RPC method, + // we can assume that eth signer type is a constant to avoid requesting a signature each time + // user connects. + const ethSignerType: EthSignerType = { + verificationMethod: 'ERC-1271', + isSignedMsgPrefixed: true + }; + + const ethMessageSigner = new EthMessageSigner(web3Provider.getSigner(), ethSignerType); + const wallet = new RemoteWallet( + web3Provider, + ethMessageSigner, + await web3Provider.getSigner().getAddress(), + accountId + ); + wallet.connect(provider); + await wallet.verifyNetworks(); + return wallet; + } + + // **************** + // Abstract getters + // + + override ethSigner(): ethers.Signer { + return this.web3Signer; + } + + override ethMessageSigner(): EthMessageSigner { + return this._ethMessageSigner; + } + + override syncSignerConnected(): boolean { + // Sync signer is the Eth signer, which is always connected. + return true; + } + + override async syncSignerPubKeyHash(): Promise { + return await this.callExtSignerPubKeyHash(); + } + + // ********************* + // Batch builder methods + // + + override async processBatchBuilderTransactions( + startNonce: Nonce, + txs: BatchBuilderInternalTx[] + ): Promise<{ txs: SignedTransaction[]; signature?: TxEthSignature }> { + let nonce: number = await this.getNonce(startNonce); + // Collect transaction bodies and set nonces in it. + const txsToSign = txs.map((tx) => { + tx.tx.nonce = nonce; + nonce += 1; + return { type: tx.type, ...tx.tx }; + }); + const signedTransactions = await this.callExtSignZkSyncBatch(txsToSign); + // Each transaction will have its own Ethereum signature, if it's required. + // There will be no umbrella signature for the whole batch. + return { txs: signedTransactions }; + } + + // ************** + // L2 operations + // + + override async signSyncTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'Transfer', ...transfer }]); + return signed[0]; + } + + override async syncTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.signSyncTransfer(transfer as any); + return submitSignedTransaction(signed, this.provider); + } + + // ChangePubKey part + + override async signSetSigningKey(changePubKey: { + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + ethAuthType: ChangePubkeyTypes; + batchHash?: string; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'ChangePubKey', ...changePubKey }]); + return signed[0]; + } + + override async setSigningKey(changePubKey: { + feeToken: TokenLike; + ethAuthType: ChangePubkeyTypes; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.signSetSigningKey(changePubKey as any); + return submitSignedTransaction(signed, this.provider); + } + + // Withdraw part + + override async signWithdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'Withdraw', ...withdraw }]); + return signed[0]; + } + + override async withdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + fastProcessing?: boolean; + validFrom?: number; + validUntil?: number; + }): Promise { + const fastProcessing = withdraw.fastProcessing; + const signed = await this.signWithdrawFromSyncToEthereum(withdraw as any); + return submitSignedTransaction(signed, this.provider, fastProcessing); + } + + // Forced exit part + + override async signSyncForcedExit(forcedExit: { + target: Address; + token: TokenLike; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'ForcedExit', ...forcedExit }]); + return signed[0]; + } + + override async syncForcedExit(forcedExit: { + target: Address; + token: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.signSyncForcedExit(forcedExit as any); + return submitSignedTransaction(signed, this.provider); + } + + // Swap part + + override async signOrder(order: { + tokenSell: TokenLike; + tokenBuy: TokenLike; + ratio: TokenRatio | WeiRatio; + amount: BigNumberish; + recipient?: Address; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + return await this.callExtSignOrder({ type: 'Order', ...order }); + } + + override async signSyncSwap(swap: { + orders: [Order, Order]; + feeToken: number; + amounts: [BigNumberish, BigNumberish]; + nonce: number; + fee: BigNumberish; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'Swap', ...swap }]); + return signed[0]; + } + + override async syncSwap(swap: { + orders: [Order, Order]; + feeToken: TokenLike; + amounts?: [BigNumberish, BigNumberish]; + nonce?: number; + fee?: BigNumberish; + }): Promise { + const signed = await this.signSyncSwap(swap as any); + return submitSignedTransaction(signed, this.provider); + } + + // Mint NFT part + + override async signMintNFT(mintNFT: { + recipient: string; + contentHash: string; + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'MintNFT', ...mintNFT }]); + return signed[0]; + } + + override async mintNFT(mintNFT: { + recipient: Address; + contentHash: ethers.BytesLike; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + }): Promise { + const signed = await this.signMintNFT(mintNFT as any); + return submitSignedTransaction(signed, this.provider); + } + + // Withdraw NFT part + + override async signWithdrawNFT(withdrawNFT: { + to: string; + token: number; + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + validFrom?: number; + validUntil?: number; + }): Promise { + const signed = await this.callExtSignZkSyncBatch([{ type: 'WithdrawNFT', ...withdrawNFT }]); + return signed[0]; + } + + override async withdrawNFT(withdrawNFT: { + to: string; + token: number; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + fastProcessing?: boolean; + validFrom?: number; + validUntil?: number; + }): Promise { + const fastProcessing = withdrawNFT.fastProcessing; + const signed = await this.signWithdrawNFT(withdrawNFT as any); + return submitSignedTransaction(signed, this.provider, fastProcessing); + } + + // Transfer NFT part + + override async syncTransferNFT(transfer: { + to: Address; + token: NFT; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + transfer.nonce = transfer.nonce != null ? await this.getNonce(transfer.nonce) : await this.getNonce(); + + let fee: BigNumberish; + if (transfer.fee == null) { + fee = await this.provider.getTransactionsBatchFee( + ['Transfer', 'Transfer'], + [transfer.to, this.address()], + transfer.feeToken + ); + } else { + fee = transfer.fee; + } + + const txNFT = { + to: transfer.to, + token: transfer.token.id, + amount: 1, + fee: 0 + }; + const txFee = { + to: this.address(), + token: transfer.feeToken, + amount: 0, + fee + }; + + return await this.syncMultiTransfer([txNFT, txFee]); + } + + // Multi-transfer part + + // Note: this method signature requires to specify fee in each transaction. + // For details, see the comment on this method in `AbstractWallet` class. + override async syncMultiTransfer( + _transfers: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }[] + ): Promise { + const transfers = _transfers.map((transfer) => { + return { + type: 'Transfer', + ...transfer + }; + }); + const signed = await this.callExtSignZkSyncBatch(transfers); + return submitSignedTransactionsBatch(this.provider, signed); + } + + // **************** + // Internal methods + // + + /** + * + * Makes all fields that represent amount to be of `string` type + * and all fields that represent tokens to be token ids i.e. of `number` type. + * + * @param txs A list of transactions + * + * @protected A list of prepared transactions + */ + protected prepareTxsBeforeSending(txs: any[]): any[] { + const amountFields = ['amount', 'fee']; + const tokenFields = ['token', 'feeToken', 'tokenSell', 'tokenBuy']; + return txs.map((tx) => { + for (const field of amountFields) { + if (field in tx) { + tx[field] = BigNumber.from(tx[field]).toString(); + } + } + for (const field of tokenFields) { + if (field in tx) { + tx[field] = this.provider.tokenSet.resolveTokenId(tx[field]); + } + } + if ('amounts' in tx) { + tx.amounts = [BigNumber.from(tx.amounts[0]).toString(), BigNumber.from(tx.amounts[1]).toString()]; + } + return tx; + }); + } + + /** + * Performs an RPC call to the custom `zkSync_signBatch` method. + * This method is specified here: https://github.com/argentlabs/argent-contracts-l2/discussions/4 + * + * Basically, it's an addition to the WalletConnect server that accepts intentionally incomplete + * transactions (e.g. with no account IDs resolved), and returns transactions with both L1 and L2 + * signatures. + * + * @param txs A list of transactions to be signed. + * + * @returns A list of singed transactions. + */ + protected async callExtSignZkSyncBatch(txs: any[]): Promise { + try { + const preparedTxs = this.prepareTxsBeforeSending(txs); + // Response must be an array of signed transactions. + // Transactions are flattened (ethereum signatures are on the same level as L2 signatures), + // so we need to "unflat" each one. + const response: any[] = await this.web3Provider.send('zkSync_signBatch', [preparedTxs]); + + const transactions = response.map((tx) => { + const ethereumSignature = tx['ethereumSignature']; + // Remove the L1 signature from the transaction data. + delete tx['ethereumSignature']; + return { + tx, + ethereumSignature + }; + }); + + return transactions; + } catch (e) { + console.error(`Received an error performing 'zkSync_signBatch' request: ${e.toString()}`); + throw new Error('Wallet server returned a malformed response to the sign batch request'); + } + } + + /** + * Performs an RPC call to the custom `zkSync_signBatch` method. + * + * @param txs An order data to be signed. + * + * @returns The completed and signed offer. + */ + protected async callExtSignOrder(order: any): Promise { + try { + const preparedOrder = this.prepareTxsBeforeSending([order]); + // For now, we assume that the same method will be used for both signing transactions and orders. + const signedOrder: any = (await this.web3Provider.send('zkSync_signBatch', preparedOrder))[0]; + + // Sanity check + if (!signedOrder['signature']) { + throw new Error('Wallet server returned a malformed response to the sign order request'); + } + + return signedOrder as Order; + } catch (e) { + // TODO: Catching general error is a bad idea, as a lot of things can throw an exception. + console.error(`Received an error performing 'zkSync_signOrder' request: ${e.toString()}`); + throw new Error('Wallet server returned a malformed response to the sign order request'); + } + } + + /** + * Performs an RPC call to the custom `zkSync_signerPubKeyHash` method. + * + * This method should return a public key hash associated with the wallet + */ + protected async callExtSignerPubKeyHash(): Promise { + try { + const response = await this.web3Provider.send('zkSync_signerPubKeyHash', null); + if (!response['pubKeyHash']) { + throw new Error('Wallet server returned a malformed response to the PubKeyHash request'); + } + return response['pubKeyHash']; + } catch (e) { + // TODO: Catching general error is a bad idea, as a lot of things can throw an exception. + console.error(`Received an error performing 'zkSync_signerPubKeyHash' request: ${e.toString()}`); + throw new Error('Wallet server returned a malformed response to the PubKeyHash request'); + } + } +} diff --git a/sdk/zksync.js/src/transport.ts b/sdk/zksync.js/src/transport.ts index 7283e8a658..641d379b44 100644 --- a/sdk/zksync.js/src/transport.ts +++ b/sdk/zksync.js/src/transport.ts @@ -106,11 +106,16 @@ export class WSTransport extends AbstractJSONRPCTransport { return transport; } - subscriptionsSupported(): boolean { + override subscriptionsSupported(): boolean { return true; } - async subscribe(subMethod: string, subParams, unsubMethod: string, cb: (data: any) => void): Promise { + override async subscribe( + subMethod: string, + subParams, + unsubMethod: string, + cb: (data: any) => void + ): Promise { const req = { jsonrpc: '2.0', method: subMethod, params: subParams }; const sub = await this.ws.sendRequest(req); diff --git a/sdk/zksync.js/src/types.ts b/sdk/zksync.js/src/types.ts index 2cbbcab972..8bc2d0a8bd 100644 --- a/sdk/zksync.js/src/types.ts +++ b/sdk/zksync.js/src/types.ts @@ -19,22 +19,22 @@ export type Nonce = number | 'committed'; export type Network = 'localhost' | 'rinkeby' | 'ropsten' | 'mainnet' | 'rinkeby-beta' | 'ropsten-beta'; const MAINNET_NETWORK_CHAIN_ID = 1; -const RINKEBY_NETWORK_CHAIN_ID = 3; -const ROPSTEN_NETWROK_CHAIN_ID = 4; -const LOCALHOST_NETWROK_CHAIN_ID = 9; +const ROPSTEN_NETWORK_CHAIN_ID = 3; +const RINKEBY_NETWORK_CHAIN_ID = 4; +const LOCALHOST_NETWORK_CHAIN_ID = 9; export function l1ChainId(network?: Network): number { if (network === 'rinkeby' || network === 'rinkeby-beta') { return RINKEBY_NETWORK_CHAIN_ID; } if (network === 'ropsten' || network === 'ropsten-beta') { - return ROPSTEN_NETWROK_CHAIN_ID; + return ROPSTEN_NETWORK_CHAIN_ID; } if (network === 'mainnet') { return MAINNET_NETWORK_CHAIN_ID; } if (network === 'localhost') { - return LOCALHOST_NETWROK_CHAIN_ID; + return LOCALHOST_NETWORK_CHAIN_ID; } throw new Error('Unsupported netwrok'); } diff --git a/sdk/zksync.js/src/wallet.ts b/sdk/zksync.js/src/wallet.ts index 794f2530dd..31cea60c94 100644 --- a/sdk/zksync.js/src/wallet.ts +++ b/sdk/zksync.js/src/wallet.ts @@ -1,11 +1,9 @@ -import { BigNumber, BigNumberish, Contract, ContractTransaction, ethers } from 'ethers'; -import { ErrorCode } from '@ethersproject/logger'; +import { BigNumber, BigNumberish, ethers } from 'ethers'; import { EthMessageSigner } from './eth-message-signer'; import { SyncProvider } from './provider-interface'; import { Create2WalletSigner, Signer, unableToSign } from './signer'; -import { BatchBuilder } from './batch-builder'; +import { BatchBuilderInternalTx } from './batch-builder'; import { - AccountState, Address, ChangePubKey, ChangePubKeyCREATE2, @@ -19,76 +17,39 @@ import { NFT, Nonce, Order, - PriorityOperationReceipt, PubKeyHash, Ratio, SignedTransaction, Swap, TokenLike, - TransactionReceipt, Transfer, TxEthSignature, Withdraw, WithdrawNFT, TokenRatio, - WeiRatio, - Toggle2FARequest, - l1ChainId + WeiRatio } from './types'; -import { - ERC20_APPROVE_TRESHOLD, - ERC20_DEPOSIT_GAS_LIMIT, - ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT, - ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT, - getChangePubkeyLegacyMessage, - getChangePubkeyMessage, - getEthereumBalance, - getSignedBytesFromMessage, - IERC20_INTERFACE, - isTokenETH, - MAX_ERC20_APPROVE_AMOUNT, - MAX_TIMESTAMP, - signMessagePersonalAPI, - isNFT, - SYNC_MAIN_CONTRACT_INTERFACE, - getToggle2FAMessage -} from './utils'; - -const EthersErrorCode = ErrorCode; - -export class ZKSyncTxError extends Error { - constructor(message: string, public value: PriorityOperationReceipt | TransactionReceipt) { - super(message); - } -} +import { getChangePubkeyLegacyMessage, getChangePubkeyMessage, MAX_TIMESTAMP, isNFT } from './utils'; +import { Transaction, submitSignedTransaction } from './operations'; +import { AbstractWallet } from './abstract-wallet'; -export class Wallet { - public provider: SyncProvider; +export { Transaction, ETHOperation, submitSignedTransaction, submitSignedTransactionsBatch } from './operations'; - private constructor( - public ethSigner: ethers.Signer, - public ethMessageSigner: EthMessageSigner, - public cachedAddress: Address, +export class Wallet extends AbstractWallet { + protected constructor( + public _ethSigner: ethers.Signer, + private _ethMessageSigner: EthMessageSigner, + cachedAddress: Address, public signer?: Signer, - public accountId?: number, + accountId?: number, public ethSignerType?: EthSignerType - ) {} - - connect(provider: SyncProvider) { - this.provider = provider; - return this; + ) { + super(cachedAddress, accountId); } - async verifyNetworks() { - if (this.provider.network != undefined && this.ethSigner.provider != undefined) { - const ethNetwork = await this.ethSigner.provider.getNetwork(); - if (l1ChainId(this.provider.network) !== ethNetwork.chainId) { - throw new Error( - `ETH network ${ethNetwork.name} and ZkSync network ${this.provider.network} don't match` - ); - } - } - } + // ************ + // Constructors + // static async fromEthSigner( ethWallet: ethers.Signer, @@ -165,58 +126,114 @@ export class Wallet { }); } - async getEthMessageSignature(message: ethers.utils.BytesLike): Promise { - if (this.ethSignerType == null) { - throw new Error('ethSignerType is unknown'); - } - - const signedBytes = getSignedBytesFromMessage(message, !this.ethSignerType.isSignedMsgPrefixed); + // **************** + // Abstract getters + // - const signature = await signMessagePersonalAPI(this.ethSigner, signedBytes); + override ethSigner(): ethers.Signer { + return this._ethSigner; + } - return { - type: this.ethSignerType.verificationMethod === 'ECDSA' ? 'EthereumSignature' : 'EIP1271Signature', - signature - }; + override ethMessageSigner(): EthMessageSigner { + return this._ethMessageSigner; } - batchBuilder(nonce?: Nonce): BatchBuilder { - return BatchBuilder.fromWallet(this, nonce); + override syncSignerConnected(): boolean { + return this.signer !== null; } - async getTransfer(transfer: { - to: Address; - token: TokenLike; - amount: BigNumberish; - fee: BigNumberish; - nonce: number; - validFrom: number; - validUntil: number; - }): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } + override async syncSignerPubKeyHash(): Promise { + return await this.signer.pubKeyHash(); + } - await this.setRequiredAccountIdFromServer('Transfer funds'); + // ********************* + // Batch builder methods + // - const tokenId = this.provider.tokenSet.resolveTokenId(transfer.token); + override async processBatchBuilderTransactions( + startNonce: Nonce, + txs: BatchBuilderInternalTx[] + ): Promise<{ txs: SignedTransaction[]; signature?: TxEthSignature }> { + const processedTxs: SignedTransaction[] = []; + let messages: string[] = []; + let nonce: number = await this.getNonce(startNonce); + const batchNonce = nonce; + for (const tx of txs) { + tx.tx.nonce = nonce++; + switch (tx.type) { + case 'Withdraw': + messages.push(this.getWithdrawEthMessagePart(tx.tx)); + const withdraw = { tx: await this.getWithdrawFromSyncToEthereum(tx.tx) }; + processedTxs.push(withdraw); + break; + case 'Transfer': + messages.push(await this.getTransferEthMessagePart(tx.tx)); + const transfer = { tx: await this.getTransfer(tx.tx) }; + processedTxs.push(transfer); + break; + case 'ChangePubKey': + // ChangePubKey requires its own Ethereum signature, we either expect + // it to be signed already or do it here. + const changePubKey: ChangePubKey = tx.alreadySigned + ? tx.tx + : (await this.signSetSigningKey(tx.tx)).tx; + const currentPubKeyHash = await this.getCurrentPubKeyHash(); + if (currentPubKeyHash === changePubKey.newPkHash) { + throw new Error('Current signing key is already set'); + } + messages.push( + this.getChangePubKeyEthMessagePart({ + pubKeyHash: changePubKey.newPkHash, + feeToken: tx.token, + fee: changePubKey.fee + }) + ); + processedTxs.push({ tx: changePubKey }); + break; + case 'ForcedExit': + messages.push(this.getForcedExitEthMessagePart(tx.tx)); + const forcedExit = { tx: await this.getForcedExit(tx.tx) }; + processedTxs.push(forcedExit); + break; + case 'MintNFT': + messages.push(this.getMintNFTEthMessagePart(tx.tx)); + const mintNft = { tx: await this.getMintNFT(tx.tx) }; + processedTxs.push(mintNft); + break; + case 'Swap': + messages.push(this.getSwapEthSignMessagePart(tx.tx)); + const swap = { + tx: await this.getSwap(tx.tx), + ethereumSignature: [ + null, + tx.tx.orders[0].ethSignature || null, + tx.tx.orders[1].ethSignature || null + ] + }; + processedTxs.push(swap); + break; + case 'WithdrawNFT': + messages.push(this.getWithdrawNFTEthMessagePart(tx.tx)); + const withdrawNft = { tx: await this.getWithdrawNFT(tx.tx) }; + processedTxs.push(withdrawNft); + break; + } + } + messages.push(`Nonce: ${batchNonce}`); - const transactionData = { - accountId: this.accountId, - from: this.address(), - to: transfer.to, - tokenId, - amount: transfer.amount, - fee: transfer.fee, - nonce: transfer.nonce, - validFrom: transfer.validFrom, - validUntil: transfer.validUntil + const message = messages.filter((part) => part.length != 0).join('\n'); + const signature = await this.ethMessageSigner().getEthMessageSignature(message); + return { + txs: processedTxs, + signature }; - - return this.signer.signSyncTransfer(transactionData); } - async signSyncTransfer(transfer: { + // ************** + // L2 operations + // + + override async signSyncTransfer(transfer: { to: Address; token: TokenLike; amount: BigNumberish; @@ -236,9 +253,9 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(transfer.token, transfer.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(transfer.token); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignTransfer({ + : await this.ethMessageSigner().ethSignTransfer({ stringAmount, stringFee, stringToken, @@ -252,53 +269,196 @@ export class Wallet { }; } - async signRegisterFactory(factoryAddress: Address): Promise<{ - signature: TxEthSignature; - accountId: number; - accountAddress: Address; - }> { - await this.setRequiredAccountIdFromServer('Sign register factory'); - const signature = await this.ethMessageSigner.ethSignRegisterFactoryMessage( - factoryAddress, - this.accountId, - this.address() - ); + override async syncTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + transfer.nonce = transfer.nonce != null ? await this.getNonce(transfer.nonce) : await this.getNonce(); + + if (transfer.fee == null) { + const fullFee = await this.provider.getTransactionFee('Transfer', transfer.to, transfer.token); + transfer.fee = fullFee.totalFee; + } + const signedTransferTransaction = await this.signSyncTransfer(transfer as any); + return submitSignedTransaction(signedTransferTransaction, this.provider); + } + + // ChangePubKey part + + override async signSetSigningKey(changePubKey: { + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + ethAuthType: ChangePubkeyTypes; + batchHash?: string; + validFrom?: number; + validUntil?: number; + }): Promise { + const newPubKeyHash = await this.signer.pubKeyHash(); + + let ethAuthData; + let ethSignature; + if (changePubKey.ethAuthType === 'Onchain') { + ethAuthData = { + type: 'Onchain' + }; + } else if (changePubKey.ethAuthType === 'ECDSA') { + await this.setRequiredAccountIdFromServer('ChangePubKey authorized by ECDSA.'); + const changePubKeyMessage = getChangePubkeyMessage( + newPubKeyHash, + changePubKey.nonce, + this.accountId, + changePubKey.batchHash + ); + const ethSignature = (await this.ethMessageSigner().getEthMessageSignature(changePubKeyMessage)).signature; + ethAuthData = { + type: 'ECDSA', + ethSignature, + batchHash: changePubKey.batchHash + }; + } else if (changePubKey.ethAuthType === 'CREATE2') { + const ethSigner = this.ethSigner(); + if (ethSigner instanceof Create2WalletSigner) { + const create2data = ethSigner.create2WalletData; + ethAuthData = { + type: 'CREATE2', + creatorAddress: create2data.creatorAddress, + saltArg: create2data.saltArg, + codeHash: create2data.codeHash + }; + } else { + throw new Error('CREATE2 wallet authentication is only available for CREATE2 wallets'); + } + } else if (changePubKey.ethAuthType === 'ECDSALegacyMessage') { + await this.setRequiredAccountIdFromServer('ChangePubKey authorized by ECDSALegacyMessage.'); + const changePubKeyMessage = getChangePubkeyLegacyMessage(newPubKeyHash, changePubKey.nonce, this.accountId); + ethSignature = (await this.ethMessageSigner().getEthMessageSignature(changePubKeyMessage)).signature; + } else { + throw new Error('Unsupported SetSigningKey type'); + } + + const changePubkeyTxUnsigned = Object.assign(changePubKey, { ethAuthData, ethSignature }); + changePubkeyTxUnsigned.validFrom = changePubKey.validFrom || 0; + changePubkeyTxUnsigned.validUntil = changePubKey.validUntil || MAX_TIMESTAMP; + const changePubKeyTx = await this.getChangePubKey(changePubkeyTxUnsigned as any); + return { - signature, - accountId: this.accountId, - accountAddress: this.address() + tx: changePubKeyTx }; } - async getForcedExit(forcedExit: { - target: Address; + override async setSigningKey(changePubKey: { + feeToken: TokenLike; + ethAuthType: ChangePubkeyTypes; + fee?: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + changePubKey.nonce = + changePubKey.nonce != null ? await this.getNonce(changePubKey.nonce) : await this.getNonce(); + + if (changePubKey.fee == null) { + changePubKey.fee = 0; + + if (changePubKey.ethAuthType === 'ECDSALegacyMessage') { + const feeType = { + ChangePubKey: { + onchainPubkeyAuth: false + } + }; + const fullFee = await this.provider.getTransactionFee(feeType, this.address(), changePubKey.feeToken); + changePubKey.fee = fullFee.totalFee; + } else { + const feeType = { + ChangePubKey: changePubKey.ethAuthType + }; + const fullFee = await this.provider.getTransactionFee(feeType, this.address(), changePubKey.feeToken); + changePubKey.fee = fullFee.totalFee; + } + } + + const txData = await this.signSetSigningKey(changePubKey as any); + + const currentPubKeyHash = await this.getCurrentPubKeyHash(); + if (currentPubKeyHash === (txData.tx as ChangePubKey).newPkHash) { + throw new Error('Current signing key is already set'); + } + + return submitSignedTransaction(txData, this.provider); + } + + // Withdraw part + + override async signWithdrawFromSyncToEthereum(withdraw: { + ethAddress: string; token: TokenLike; + amount: BigNumberish; fee: BigNumberish; nonce: number; validFrom?: number; validUntil?: number; - }): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } - await this.setRequiredAccountIdFromServer('perform a Forced Exit'); + }): Promise { + withdraw.validFrom = withdraw.validFrom || 0; + withdraw.validUntil = withdraw.validUntil || MAX_TIMESTAMP; + const signedWithdrawTransaction = await this.getWithdrawFromSyncToEthereum(withdraw as any); - const tokenId = this.provider.tokenSet.resolveTokenId(forcedExit.token); + const stringAmount = BigNumber.from(withdraw.amount).isZero() + ? null + : this.provider.tokenSet.formatToken(withdraw.token, withdraw.amount); + const stringFee = BigNumber.from(withdraw.fee).isZero() + ? null + : this.provider.tokenSet.formatToken(withdraw.token, withdraw.fee); + const stringToken = this.provider.tokenSet.resolveTokenSymbol(withdraw.token); + const ethereumSignature = unableToSign(this.ethSigner()) + ? null + : await this.ethMessageSigner().ethSignWithdraw({ + stringAmount, + stringFee, + stringToken, + ethAddress: withdraw.ethAddress, + nonce: withdraw.nonce, + accountId: this.accountId + }); - const transactionData = { - initiatorAccountId: this.accountId, - target: forcedExit.target, - tokenId, - fee: forcedExit.fee, - nonce: forcedExit.nonce, - validFrom: forcedExit.validFrom || 0, - validUntil: forcedExit.validUntil || MAX_TIMESTAMP + return { + tx: signedWithdrawTransaction, + ethereumSignature }; + } - return await this.signer.signSyncForcedExit(transactionData); + override async withdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee?: BigNumberish; + nonce?: Nonce; + fastProcessing?: boolean; + validFrom?: number; + validUntil?: number; + }): Promise { + withdraw.nonce = withdraw.nonce != null ? await this.getNonce(withdraw.nonce) : await this.getNonce(); + + if (withdraw.fee == null) { + const feeType = withdraw.fastProcessing === true ? 'FastWithdraw' : 'Withdraw'; + + const fullFee = await this.provider.getTransactionFee(feeType, withdraw.ethAddress, withdraw.token); + withdraw.fee = fullFee.totalFee; + } + + const signedWithdrawTransaction = await this.signWithdrawFromSyncToEthereum(withdraw as any); + + return submitSignedTransaction(signedWithdrawTransaction, this.provider, withdraw.fastProcessing); } - async signSyncForcedExit(forcedExit: { + // Forced exit part + + override async signSyncForcedExit(forcedExit: { target: Address; token: TokenLike; fee: BigNumberish; @@ -312,9 +472,9 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(forcedExit.token, forcedExit.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(forcedExit.token); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignForcedExit({ + : await this.ethMessageSigner().ethSignForcedExit({ stringToken, stringFee, target: forcedExit.target, @@ -327,7 +487,7 @@ export class Wallet { }; } - async syncForcedExit(forcedExit: { + override async syncForcedExit(forcedExit: { target: Address; token: TokenLike; fee?: BigNumberish; @@ -345,127 +505,9 @@ export class Wallet { return submitSignedTransaction(signedForcedExitTransaction, this.provider); } - // Note that in syncMultiTransfer, unlike in syncTransfer, - // users need to specify the fee for each transaction. - // The main reason is that multitransfer enables paying fees - // in multiple tokens, (as long as the total sum - // of fees is enough to cover up the fees for all of the transactions). - // That might bring an inattentive user in a trouble like the following: - // - // A user wants to submit transactions in multiple tokens and - // wants to pay the fees with only some of them. If the user forgets - // to set the fees' value to 0 for transactions with tokens - // he won't pay the fee with, then this user will overpay a lot. - // - // That's why we want the users to be explicit about fees in multitransfers. - async syncMultiTransfer( - transfers: { - to: Address; - token: TokenLike; - amount: BigNumberish; - fee: BigNumberish; - nonce?: Nonce; - validFrom?: number; - validUntil?: number; - }[] - ): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } - - if (transfers.length == 0) return []; - - await this.setRequiredAccountIdFromServer('Transfer funds'); - - let batch = []; - let messages: string[] = []; + // Swap part - let nextNonce = transfers[0].nonce != null ? await this.getNonce(transfers[0].nonce) : await this.getNonce(); - const batchNonce = nextNonce; - - for (let i = 0; i < transfers.length; i++) { - const transfer = transfers[i]; - const nonce = nextNonce; - nextNonce += 1; - - const tx: Transfer = await this.getTransfer({ - to: transfer.to, - token: transfer.token, - amount: transfer.amount, - fee: transfer.fee, - nonce, - validFrom: transfer.validFrom || 0, - validUntil: transfer.validUntil || MAX_TIMESTAMP - }); - const message = await this.getTransferEthMessagePart(transfer); - messages.push(message); - batch.push({ tx, signature: null }); - } - - messages.push(`Nonce: ${batchNonce}`); - const message = messages.filter((part) => part.length != 0).join('\n'); - const ethSignatures = unableToSign(this.ethSigner) - ? [] - : [await this.ethMessageSigner.getEthMessageSignature(message)]; - - const transactionHashes = await this.provider.submitTxsBatch(batch, ethSignatures); - return transactionHashes.map((txHash, idx) => new Transaction(batch[idx], txHash, this.provider)); - } - - async syncTransferNFT(transfer: { - to: Address; - token: NFT; - feeToken: TokenLike; - fee?: BigNumberish; - nonce?: Nonce; - validFrom?: number; - validUntil?: number; - }): Promise { - transfer.nonce = transfer.nonce != null ? await this.getNonce(transfer.nonce) : await this.getNonce(); - - let fee: BigNumberish; - if (transfer.fee == null) { - fee = await this.provider.getTransactionsBatchFee( - ['Transfer', 'Transfer'], - [transfer.to, this.address()], - transfer.feeToken - ); - } else { - fee = transfer.fee; - } - - const txNFT = { - to: transfer.to, - token: transfer.token.id, - amount: 1, - fee: 0 - }; - const txFee = { - to: this.address(), - token: transfer.feeToken, - amount: 0, - fee - }; - - return await this.syncMultiTransfer([txNFT, txFee]); - } - - async getLimitOrder(order: { - tokenSell: TokenLike; - tokenBuy: TokenLike; - ratio: TokenRatio | WeiRatio; - recipient?: Address; - nonce?: Nonce; - validFrom?: number; - validUntil?: number; - }): Promise { - return this.getOrder({ - ...order, - amount: 0 - }); - } - - async getOrder(order: { + override async signOrder(orderData: { tokenSell: TokenLike; tokenBuy: TokenLike; ratio: TokenRatio | WeiRatio; @@ -475,54 +517,16 @@ export class Wallet { validFrom?: number; validUntil?: number; }): Promise { - if (!this.signer) { - throw new Error('zkSync signer is required for signing an order'); - } - await this.setRequiredAccountIdFromServer('Swap order'); - const nonce = order.nonce != null ? await this.getNonce(order.nonce) : await this.getNonce(); - const recipient = order.recipient || this.address(); - - let ratio: Ratio; - const sell = order.tokenSell; - const buy = order.tokenBuy; - - if (!order.ratio[sell] || !order.ratio[buy]) { - throw new Error(`Wrong tokens in the ratio object: should be ${sell} and ${buy}`); - } - - if (order.ratio.type == 'Wei') { - ratio = [order.ratio[sell], order.ratio[buy]]; - } else if (order.ratio.type == 'Token') { - ratio = [ - this.provider.tokenSet.parseToken(sell, order.ratio[sell].toString()), - this.provider.tokenSet.parseToken(buy, order.ratio[buy].toString()) - ]; - } - - const signedOrder = await this.signer.signSyncOrder({ - accountId: this.accountId, - recipient, - nonce, - amount: order.amount || BigNumber.from(0), - tokenSell: this.provider.tokenSet.resolveTokenId(order.tokenSell), - tokenBuy: this.provider.tokenSet.resolveTokenId(order.tokenBuy), - validFrom: order.validFrom || 0, - validUntil: order.validUntil || MAX_TIMESTAMP, - ratio - }); - - return this.signOrder(signedOrder); - } + const order = await this.getPartialOrder(orderData); - async signOrder(order: Order): Promise { const stringAmount = BigNumber.from(order.amount).isZero() ? null : this.provider.tokenSet.formatToken(order.tokenSell, order.amount); const stringTokenSell = await this.provider.getTokenSymbol(order.tokenSell); const stringTokenBuy = await this.provider.getTokenSymbol(order.tokenBuy); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignOrder({ + : await this.ethMessageSigner().ethSignOrder({ amount: stringAmount, tokenSell: stringTokenSell, tokenBuy: stringTokenBuy, @@ -534,28 +538,7 @@ export class Wallet { return order; } - async getSwap(swap: { - orders: [Order, Order]; - feeToken: number; - amounts: [BigNumberish, BigNumberish]; - nonce: number; - fee: BigNumberish; - }): Promise { - if (!this.signer) { - throw new Error('zkSync signer is required for swapping funds'); - } - await this.setRequiredAccountIdFromServer('Swap submission'); - const feeToken = this.provider.tokenSet.resolveTokenId(swap.feeToken); - - return this.signer.signSyncSwap({ - ...swap, - submitterId: await this.getAccountId(), - submitterAddress: this.address(), - feeToken - }); - } - - async signSyncSwap(swap: { + override async signSyncSwap(swap: { orders: [Order, Order]; feeToken: number; amounts: [BigNumberish, BigNumberish]; @@ -567,9 +550,9 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(swap.feeToken, swap.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(swap.feeToken); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignSwap({ + : await this.ethMessageSigner().ethSignSwap({ fee: stringFee, feeToken: stringToken, nonce: swap.nonce @@ -585,7 +568,7 @@ export class Wallet { }; } - async syncSwap(swap: { + override async syncSwap(swap: { orders: [Order, Order]; feeToken: TokenLike; amounts?: [BigNumberish, BigNumberish]; @@ -612,113 +595,9 @@ export class Wallet { return submitSignedTransaction(signedSwapTransaction, this.provider); } - async syncTransfer(transfer: { - to: Address; - token: TokenLike; - amount: BigNumberish; - fee?: BigNumberish; - nonce?: Nonce; - validFrom?: number; - validUntil?: number; - }): Promise { - transfer.nonce = transfer.nonce != null ? await this.getNonce(transfer.nonce) : await this.getNonce(); - - if (transfer.fee == null) { - const fullFee = await this.provider.getTransactionFee('Transfer', transfer.to, transfer.token); - transfer.fee = fullFee.totalFee; - } - const signedTransferTransaction = await this.signSyncTransfer(transfer as any); - return submitSignedTransaction(signedTransferTransaction, this.provider); - } - - async getMintNFT(mintNFT: { - recipient: string; - contentHash: string; - feeToken: TokenLike; - fee: BigNumberish; - nonce: number; - }): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } - await this.setRequiredAccountIdFromServer('MintNFT'); - - const feeTokenId = this.provider.tokenSet.resolveTokenId(mintNFT.feeToken); - const transactionData = { - creatorId: this.accountId, - creatorAddress: this.address(), - recipient: mintNFT.recipient, - contentHash: mintNFT.contentHash, - feeTokenId, - fee: mintNFT.fee, - nonce: mintNFT.nonce - }; - - return await this.signer.signMintNFT(transactionData); - } - - async getWithdrawNFT(withdrawNFT: { - to: string; - token: TokenLike; - feeToken: TokenLike; - fee: BigNumberish; - nonce: number; - validFrom: number; - validUntil: number; - }): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } - await this.setRequiredAccountIdFromServer('WithdrawNFT'); - - const tokenId = this.provider.tokenSet.resolveTokenId(withdrawNFT.token); - const feeTokenId = this.provider.tokenSet.resolveTokenId(withdrawNFT.feeToken); - const transactionData = { - accountId: this.accountId, - from: this.address(), - to: withdrawNFT.to, - tokenId, - feeTokenId, - fee: withdrawNFT.fee, - nonce: withdrawNFT.nonce, - validFrom: withdrawNFT.validFrom, - validUntil: withdrawNFT.validUntil - }; - - return await this.signer.signWithdrawNFT(transactionData); - } - - async getWithdrawFromSyncToEthereum(withdraw: { - ethAddress: string; - token: TokenLike; - amount: BigNumberish; - fee: BigNumberish; - nonce: number; - validFrom: number; - validUntil: number; - }): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for sending zksync transactions.'); - } - await this.setRequiredAccountIdFromServer('Withdraw funds'); - - const tokenId = this.provider.tokenSet.resolveTokenId(withdraw.token); - const transactionData = { - accountId: this.accountId, - from: this.address(), - ethAddress: withdraw.ethAddress, - tokenId, - amount: withdraw.amount, - fee: withdraw.fee, - nonce: withdraw.nonce, - validFrom: withdraw.validFrom, - validUntil: withdraw.validUntil - }; - - return await this.signer.signSyncWithdraw(transactionData); - } + // Mint NFT part - async signMintNFT(mintNFT: { + override async signMintNFT(mintNFT: { recipient: string; contentHash: string; feeToken: TokenLike; @@ -731,9 +610,9 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(mintNFT.feeToken, mintNFT.fee); const stringFeeToken = this.provider.tokenSet.resolveTokenSymbol(mintNFT.feeToken); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignMintNFT({ + : await this.ethMessageSigner().ethSignMintNFT({ stringFeeToken, stringFee, recipient: mintNFT.recipient, @@ -747,7 +626,29 @@ export class Wallet { }; } - async signWithdrawNFT(withdrawNFT: { + override async mintNFT(mintNFT: { + recipient: Address; + contentHash: ethers.BytesLike; + feeToken: TokenLike; + fee?: BigNumberish; + nonce?: Nonce; + }): Promise { + mintNFT.nonce = mintNFT.nonce != null ? await this.getNonce(mintNFT.nonce) : await this.getNonce(); + mintNFT.contentHash = ethers.utils.hexlify(mintNFT.contentHash); + + if (mintNFT.fee == null) { + const fullFee = await this.provider.getTransactionFee('MintNFT', mintNFT.recipient, mintNFT.feeToken); + mintNFT.fee = fullFee.totalFee; + } + + const signedMintNFTTransaction = await this.signMintNFT(mintNFT as any); + + return submitSignedTransaction(signedMintNFTTransaction, this.provider, false); + } + + // Withdraw NFT part + + override async signWithdrawNFT(withdrawNFT: { to: string; token: number; feeToken: TokenLike; @@ -764,9 +665,9 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(withdrawNFT.feeToken, withdrawNFT.fee); const stringFeeToken = this.provider.tokenSet.resolveTokenSymbol(withdrawNFT.feeToken); - const ethereumSignature = unableToSign(this.ethSigner) + const ethereumSignature = unableToSign(this.ethSigner()) ? null - : await this.ethMessageSigner.ethSignWithdrawNFT({ + : await this.ethMessageSigner().ethSignWithdrawNFT({ token: withdrawNFT.token, to: withdrawNFT.to, stringFee, @@ -780,64 +681,7 @@ export class Wallet { }; } - async signWithdrawFromSyncToEthereum(withdraw: { - ethAddress: string; - token: TokenLike; - amount: BigNumberish; - fee: BigNumberish; - nonce: number; - validFrom?: number; - validUntil?: number; - }): Promise { - withdraw.validFrom = withdraw.validFrom || 0; - withdraw.validUntil = withdraw.validUntil || MAX_TIMESTAMP; - const signedWithdrawTransaction = await this.getWithdrawFromSyncToEthereum(withdraw as any); - - const stringAmount = BigNumber.from(withdraw.amount).isZero() - ? null - : this.provider.tokenSet.formatToken(withdraw.token, withdraw.amount); - const stringFee = BigNumber.from(withdraw.fee).isZero() - ? null - : this.provider.tokenSet.formatToken(withdraw.token, withdraw.fee); - const stringToken = this.provider.tokenSet.resolveTokenSymbol(withdraw.token); - const ethereumSignature = unableToSign(this.ethSigner) - ? null - : await this.ethMessageSigner.ethSignWithdraw({ - stringAmount, - stringFee, - stringToken, - ethAddress: withdraw.ethAddress, - nonce: withdraw.nonce, - accountId: this.accountId - }); - - return { - tx: signedWithdrawTransaction, - ethereumSignature - }; - } - - async mintNFT(mintNFT: { - recipient: Address; - contentHash: ethers.BytesLike; - feeToken: TokenLike; - fee?: BigNumberish; - nonce?: Nonce; - }): Promise { - mintNFT.nonce = mintNFT.nonce != null ? await this.getNonce(mintNFT.nonce) : await this.getNonce(); - mintNFT.contentHash = ethers.utils.hexlify(mintNFT.contentHash); - - if (mintNFT.fee == null) { - const fullFee = await this.provider.getTransactionFee('MintNFT', mintNFT.recipient, mintNFT.feeToken); - mintNFT.fee = fullFee.totalFee; - } - - const signedMintNFTTransaction = await this.signMintNFT(mintNFT as any); - - return submitSignedTransaction(signedMintNFTTransaction, this.provider, false); - } - - async withdrawNFT(withdrawNFT: { + override async withdrawNFT(withdrawNFT: { to: string; token: number; feeToken: TokenLike; @@ -864,49 +708,141 @@ export class Wallet { return submitSignedTransaction(signedWithdrawNFTTransaction, this.provider, withdrawNFT.fastProcessing); } - async withdrawFromSyncToEthereum(withdraw: { - ethAddress: string; - token: TokenLike; - amount: BigNumberish; + // Transfer NFT part + + override async syncTransferNFT(transfer: { + to: Address; + token: NFT; + feeToken: TokenLike; fee?: BigNumberish; nonce?: Nonce; - fastProcessing?: boolean; validFrom?: number; validUntil?: number; - }): Promise { - withdraw.nonce = withdraw.nonce != null ? await this.getNonce(withdraw.nonce) : await this.getNonce(); - - if (withdraw.fee == null) { - const feeType = withdraw.fastProcessing === true ? 'FastWithdraw' : 'Withdraw'; + }): Promise { + transfer.nonce = transfer.nonce != null ? await this.getNonce(transfer.nonce) : await this.getNonce(); - const fullFee = await this.provider.getTransactionFee(feeType, withdraw.ethAddress, withdraw.token); - withdraw.fee = fullFee.totalFee; + let fee: BigNumberish; + if (transfer.fee == null) { + fee = await this.provider.getTransactionsBatchFee( + ['Transfer', 'Transfer'], + [transfer.to, this.address()], + transfer.feeToken + ); + } else { + fee = transfer.fee; } - const signedWithdrawTransaction = await this.signWithdrawFromSyncToEthereum(withdraw as any); + const txNFT = { + to: transfer.to, + token: transfer.token.id, + amount: 1, + fee: 0 + }; + const txFee = { + to: this.address(), + token: transfer.feeToken, + amount: 0, + fee + }; - return submitSignedTransaction(signedWithdrawTransaction, this.provider, withdraw.fastProcessing); + return await this.syncMultiTransfer([txNFT, txFee]); } - async isCorrespondingSigningKeySet(): Promise { + // Multi-transfer part + + // Note: this method signature requires to specify fee in each transaction. + // For details, see the comment on this method in `AbstractWallet` class. + override async syncMultiTransfer( + transfers: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }[] + ): Promise { if (!this.signer) { - throw new Error('ZKSync signer is required for current pubkey calculation.'); + throw new Error('ZKSync signer is required for sending zksync transactions.'); } - const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const signerPubKeyHash = await this.signer.pubKeyHash(); - return currentPubKeyHash === signerPubKeyHash; + + if (transfers.length == 0) return []; + + await this.setRequiredAccountIdFromServer('Transfer funds'); + + let batch = []; + let messages: string[] = []; + + let nextNonce = transfers[0].nonce != null ? await this.getNonce(transfers[0].nonce) : await this.getNonce(); + const batchNonce = nextNonce; + + for (let i = 0; i < transfers.length; i++) { + const transfer = transfers[i]; + const nonce = nextNonce; + nextNonce += 1; + + const tx: Transfer = await this.getTransfer({ + to: transfer.to, + token: transfer.token, + amount: transfer.amount, + fee: transfer.fee, + nonce, + validFrom: transfer.validFrom || 0, + validUntil: transfer.validUntil || MAX_TIMESTAMP + }); + const message = await this.getTransferEthMessagePart(transfer); + messages.push(message); + batch.push({ tx, signature: null }); + } + + messages.push(`Nonce: ${batchNonce}`); + const message = messages.filter((part) => part.length != 0).join('\n'); + const ethSignatures = unableToSign(this.ethSigner()) + ? [] + : [await this.ethMessageSigner().getEthMessageSignature(message)]; + + const transactionHashes = await this.provider.submitTxsBatch(batch, ethSignatures); + return transactionHashes.map((txHash, idx) => new Transaction(batch[idx], txHash, this.provider)); } - async isSigningKeySet(): Promise { + // **************** + // Internal methods + // + + protected async getTransfer(transfer: { + to: Address; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom: number; + validUntil: number; + }): Promise { if (!this.signer) { - throw new Error('ZKSync signer is required for current pubkey calculation.'); + throw new Error('ZKSync signer is required for sending zksync transactions.'); } - const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const zeroPubKeyHash = 'sync:0000000000000000000000000000000000000000'; - return zeroPubKeyHash !== currentPubKeyHash; + + await this.setRequiredAccountIdFromServer('Transfer funds'); + + const tokenId = this.provider.tokenSet.resolveTokenId(transfer.token); + + const transactionData = { + accountId: this.accountId, + from: this.address(), + to: transfer.to, + tokenId, + amount: transfer.amount, + fee: transfer.fee, + nonce: transfer.nonce, + validFrom: transfer.validFrom, + validUntil: transfer.validUntil + }; + + return this.signer.signSyncTransfer(transactionData); } - async getChangePubKey(changePubKey: { + protected async getChangePubKey(changePubKey: { feeToken: TokenLike; fee: BigNumberish; nonce: number; @@ -940,127 +876,142 @@ export class Wallet { return changePubKeyTx; } - async getToggle2FA(enable: boolean, pubKeyHash?: PubKeyHash): Promise { - const accountId = await this.getAccountId(); - const timestamp = new Date().getTime(); - const signature = await this.getEthMessageSignature(getToggle2FAMessage(enable, timestamp, pubKeyHash)); + protected async getWithdrawFromSyncToEthereum(withdraw: { + ethAddress: string; + token: TokenLike; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + validFrom: number; + validUntil: number; + }): Promise { + if (!this.signer) { + throw new Error('ZKSync signer is required for sending zksync transactions.'); + } + await this.setRequiredAccountIdFromServer('Withdraw funds'); - return { - accountId, - signature, - timestamp, - enable, - pubKeyHash + const tokenId = this.provider.tokenSet.resolveTokenId(withdraw.token); + const transactionData = { + accountId: this.accountId, + from: this.address(), + ethAddress: withdraw.ethAddress, + tokenId, + amount: withdraw.amount, + fee: withdraw.fee, + nonce: withdraw.nonce, + validFrom: withdraw.validFrom, + validUntil: withdraw.validUntil }; - } - - async toggle2FA(enable: boolean, pubKeyHash?: PubKeyHash): Promise { - await this.setRequiredAccountIdFromServer('Toggle 2FA'); - return await this.provider.toggle2FA(await this.getToggle2FA(enable, pubKeyHash)); + return await this.signer.signSyncWithdraw(transactionData); } - async signSetSigningKey(changePubKey: { - feeToken: TokenLike; + protected async getForcedExit(forcedExit: { + target: Address; + token: TokenLike; fee: BigNumberish; nonce: number; - ethAuthType: ChangePubkeyTypes; - batchHash?: string; validFrom?: number; validUntil?: number; - }): Promise { - const newPubKeyHash = await this.signer.pubKeyHash(); + }): Promise { + if (!this.signer) { + throw new Error('ZKSync signer is required for sending zksync transactions.'); + } + await this.setRequiredAccountIdFromServer('perform a Forced Exit'); - let ethAuthData; - let ethSignature; - if (changePubKey.ethAuthType === 'Onchain') { - ethAuthData = { - type: 'Onchain' - }; - } else if (changePubKey.ethAuthType === 'ECDSA') { - await this.setRequiredAccountIdFromServer('ChangePubKey authorized by ECDSA.'); - const changePubKeyMessage = getChangePubkeyMessage( - newPubKeyHash, - changePubKey.nonce, - this.accountId, - changePubKey.batchHash - ); - const ethSignature = (await this.getEthMessageSignature(changePubKeyMessage)).signature; - ethAuthData = { - type: 'ECDSA', - ethSignature, - batchHash: changePubKey.batchHash - }; - } else if (changePubKey.ethAuthType === 'CREATE2') { - if (this.ethSigner instanceof Create2WalletSigner) { - const create2data = this.ethSigner.create2WalletData; - ethAuthData = { - type: 'CREATE2', - creatorAddress: create2data.creatorAddress, - saltArg: create2data.saltArg, - codeHash: create2data.codeHash - }; - } else { - throw new Error('CREATE2 wallet authentication is only available for CREATE2 wallets'); - } - } else if (changePubKey.ethAuthType === 'ECDSALegacyMessage') { - await this.setRequiredAccountIdFromServer('ChangePubKey authorized by ECDSALegacyMessage.'); - const changePubKeyMessage = getChangePubkeyLegacyMessage(newPubKeyHash, changePubKey.nonce, this.accountId); - ethSignature = (await this.getEthMessageSignature(changePubKeyMessage)).signature; - } else { - throw new Error('Unsupported SetSigningKey type'); + const tokenId = this.provider.tokenSet.resolveTokenId(forcedExit.token); + + const transactionData = { + initiatorAccountId: this.accountId, + target: forcedExit.target, + tokenId, + fee: forcedExit.fee, + nonce: forcedExit.nonce, + validFrom: forcedExit.validFrom || 0, + validUntil: forcedExit.validUntil || MAX_TIMESTAMP + }; + + return await this.signer.signSyncForcedExit(transactionData); + } + + protected async getSwap(swap: { + orders: [Order, Order]; + feeToken: number; + amounts: [BigNumberish, BigNumberish]; + nonce: number; + fee: BigNumberish; + }): Promise { + if (!this.signer) { + throw new Error('zkSync signer is required for swapping funds'); + } + await this.setRequiredAccountIdFromServer('Swap submission'); + const feeToken = this.provider.tokenSet.resolveTokenId(swap.feeToken); + + return this.signer.signSyncSwap({ + ...swap, + submitterId: await this.getAccountId(), + submitterAddress: this.address(), + feeToken + }); + } + + protected async getMintNFT(mintNFT: { + recipient: string; + contentHash: string; + feeToken: TokenLike; + fee: BigNumberish; + nonce: number; + }): Promise { + if (!this.signer) { + throw new Error('ZKSync signer is required for sending zksync transactions.'); } + await this.setRequiredAccountIdFromServer('MintNFT'); - const changePubkeyTxUnsigned = Object.assign(changePubKey, { ethAuthData, ethSignature }); - changePubkeyTxUnsigned.validFrom = changePubKey.validFrom || 0; - changePubkeyTxUnsigned.validUntil = changePubKey.validUntil || MAX_TIMESTAMP; - const changePubKeyTx = await this.getChangePubKey(changePubkeyTxUnsigned as any); - - return { - tx: changePubKeyTx + const feeTokenId = this.provider.tokenSet.resolveTokenId(mintNFT.feeToken); + const transactionData = { + creatorId: this.accountId, + creatorAddress: this.address(), + recipient: mintNFT.recipient, + contentHash: mintNFT.contentHash, + feeTokenId, + fee: mintNFT.fee, + nonce: mintNFT.nonce }; + + return await this.signer.signMintNFT(transactionData); } - async setSigningKey(changePubKey: { + protected async getWithdrawNFT(withdrawNFT: { + to: string; + token: TokenLike; feeToken: TokenLike; - ethAuthType: ChangePubkeyTypes; - fee?: BigNumberish; - nonce?: Nonce; - validFrom?: number; - validUntil?: number; - }): Promise { - changePubKey.nonce = - changePubKey.nonce != null ? await this.getNonce(changePubKey.nonce) : await this.getNonce(); - - if (changePubKey.fee == null) { - changePubKey.fee = 0; - - if (changePubKey.ethAuthType === 'ECDSALegacyMessage') { - const feeType = { - ChangePubKey: { - onchainPubkeyAuth: false - } - }; - const fullFee = await this.provider.getTransactionFee(feeType, this.address(), changePubKey.feeToken); - changePubKey.fee = fullFee.totalFee; - } else { - const feeType = { - ChangePubKey: changePubKey.ethAuthType - }; - const fullFee = await this.provider.getTransactionFee(feeType, this.address(), changePubKey.feeToken); - changePubKey.fee = fullFee.totalFee; - } + fee: BigNumberish; + nonce: number; + validFrom: number; + validUntil: number; + }): Promise { + if (!this.signer) { + throw new Error('ZKSync signer is required for sending zksync transactions.'); } + await this.setRequiredAccountIdFromServer('WithdrawNFT'); - const txData = await this.signSetSigningKey(changePubKey as any); - - const currentPubKeyHash = await this.getCurrentPubKeyHash(); - if (currentPubKeyHash === (txData.tx as ChangePubKey).newPkHash) { - throw new Error('Current signing key is already set'); - } + const tokenId = this.provider.tokenSet.resolveTokenId(withdrawNFT.token); + const feeTokenId = this.provider.tokenSet.resolveTokenId(withdrawNFT.feeToken); + const transactionData = { + accountId: this.accountId, + from: this.address(), + to: withdrawNFT.to, + tokenId, + feeTokenId, + fee: withdrawNFT.fee, + nonce: withdrawNFT.nonce, + validFrom: withdrawNFT.validFrom, + validUntil: withdrawNFT.validUntil + }; - return submitSignedTransaction(txData, this.provider); + return await this.signer.signWithdrawNFT(transactionData); } + getWithdrawNFTEthMessagePart(withdrawNFT: { to: string; token: number; @@ -1071,7 +1022,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(withdrawNFT.feeToken, withdrawNFT.fee); const stringFeeToken = this.provider.tokenSet.resolveTokenSymbol(withdrawNFT.feeToken); - return this.ethMessageSigner.getWithdrawNFTEthMessagePart({ + return this.ethMessageSigner().getWithdrawNFTEthMessagePart({ token: withdrawNFT.token, to: withdrawNFT.to, stringFee, @@ -1097,7 +1048,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(transfer.token, transfer.fee); const stringToken = await this.provider.getTokenSymbol(transfer.token); - return this.ethMessageSigner.getTransferEthMessagePart({ + return this.ethMessageSigner().getTransferEthMessagePart({ stringAmount, stringFee, stringToken, @@ -1118,7 +1069,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(withdraw.token, withdraw.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(withdraw.token); - return this.ethMessageSigner.getWithdrawEthMessagePart({ + return this.ethMessageSigner().getWithdrawEthMessagePart({ stringAmount, stringFee, stringToken, @@ -1135,7 +1086,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(changePubKey.feeToken, changePubKey.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(changePubKey.feeToken); - return this.ethMessageSigner.getChangePubKeyEthMessagePart({ + return this.ethMessageSigner().getChangePubKeyEthMessagePart({ pubKeyHash: changePubKey.pubKeyHash, stringToken, stringFee @@ -1152,7 +1103,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(mintNFT.feeToken, mintNFT.fee); const stringFeeToken = this.provider.tokenSet.resolveTokenSymbol(mintNFT.feeToken); - return this.ethMessageSigner.getMintNFTEthMessagePart({ + return this.ethMessageSigner().getMintNFTEthMessagePart({ stringFeeToken, stringFee, recipient: mintNFT.recipient, @@ -1165,7 +1116,7 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(swap.feeToken, swap.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(swap.feeToken); - return this.ethMessageSigner.getSwapEthSignMessagePart({ + return this.ethMessageSigner().getSwapEthSignMessagePart({ fee: stringFee, feeToken: stringToken }); @@ -1176,464 +1127,59 @@ export class Wallet { ? null : this.provider.tokenSet.formatToken(forcedExit.token, forcedExit.fee); const stringToken = this.provider.tokenSet.resolveTokenSymbol(forcedExit.token); - return this.ethMessageSigner.getForcedExitEthMessagePart({ + return this.ethMessageSigner().getForcedExitEthMessagePart({ stringToken, stringFee, target: forcedExit.target }); } - async isOnchainAuthSigningKeySet(nonce: Nonce = 'committed'): Promise { - const mainZkSyncContract = this.getZkSyncMainContract(); - - const numNonce = await this.getNonce(nonce); - try { - const onchainAuthFact = await mainZkSyncContract.authFacts(this.address(), numNonce); - return onchainAuthFact !== '0x0000000000000000000000000000000000000000000000000000000000000000'; - } catch (e) { - this.modifyEthersError(e); - } - } - - async onchainAuthSigningKey( - nonce: Nonce = 'committed', - ethTxOptions?: ethers.providers.TransactionRequest - ): Promise { - if (!this.signer) { - throw new Error('ZKSync signer is required for current pubkey calculation.'); - } - - const currentPubKeyHash = await this.getCurrentPubKeyHash(); - const newPubKeyHash = await this.signer.pubKeyHash(); - - if (currentPubKeyHash === newPubKeyHash) { - throw new Error('Current PubKeyHash is the same as new'); - } - - const numNonce = await this.getNonce(nonce); - - const mainZkSyncContract = this.getZkSyncMainContract(); - - try { - return mainZkSyncContract.setAuthPubkeyHash(newPubKeyHash.replace('sync:', '0x'), numNonce, { - gasLimit: BigNumber.from('200000'), - ...ethTxOptions - }); - } catch (e) { - this.modifyEthersError(e); - } - } - - async getCurrentPubKeyHash(): Promise { - return (await this.provider.getState(this.address())).committed.pubKeyHash; - } - - async getNonce(nonce: Nonce = 'committed'): Promise { - if (nonce === 'committed') { - return (await this.provider.getState(this.address())).committed.nonce; - } else if (typeof nonce === 'number') { - return nonce; - } - } - - async getAccountId(): Promise { - return (await this.provider.getState(this.address())).id; - } - - address(): Address { - return this.cachedAddress; - } - - async getAccountState(): Promise { - return this.provider.getState(this.address()); - } - - async getNFT(tokenId: number, type: 'committed' | 'verified' = 'committed'): Promise { - const accountState = await this.getAccountState(); - let token: NFT; - if (type === 'committed') { - token = accountState.committed.nfts[tokenId]; - } else { - token = accountState.verified.nfts[tokenId]; - } - return token; - } - - async getBalance(token: TokenLike, type: 'committed' | 'verified' = 'committed'): Promise { - const accountState = await this.getAccountState(); - const tokenSymbol = this.provider.tokenSet.resolveTokenSymbol(token); - let balance: BigNumberish; - if (type === 'committed') { - balance = accountState.committed.balances[tokenSymbol] || '0'; - } else { - balance = accountState.verified.balances[tokenSymbol] || '0'; - } - return BigNumber.from(balance); - } - - async getEthereumBalance(token: TokenLike): Promise { - try { - return await getEthereumBalance(this.ethSigner.provider, this.provider, this.cachedAddress, token); - } catch (e) { - this.modifyEthersError(e); - } - } - - async isERC20DepositsApproved( - token: TokenLike, - erc20ApproveThreshold: BigNumber = ERC20_APPROVE_TRESHOLD - ): Promise { - if (isTokenETH(token)) { - throw Error('ETH token does not need approval.'); - } - const tokenAddress = this.provider.tokenSet.resolveTokenAddress(token); - const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner); - try { - const currentAllowance = await erc20contract.allowance( - this.address(), - this.provider.contractAddress.mainContract - ); - return BigNumber.from(currentAllowance).gte(erc20ApproveThreshold); - } catch (e) { - this.modifyEthersError(e); - } - } - - async approveERC20TokenDeposits( - token: TokenLike, - max_erc20_approve_amount: BigNumber = MAX_ERC20_APPROVE_AMOUNT - ): Promise { - if (isTokenETH(token)) { - throw Error('ETH token does not need approval.'); - } - const tokenAddress = this.provider.tokenSet.resolveTokenAddress(token); - const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner); - - try { - return erc20contract.approve(this.provider.contractAddress.mainContract, max_erc20_approve_amount); - } catch (e) { - this.modifyEthersError(e); - } - } - - async depositToSyncFromEthereum(deposit: { - depositTo: Address; - token: TokenLike; + async getPartialOrder(order: { + tokenSell: TokenLike; + tokenBuy: TokenLike; + ratio: TokenRatio | WeiRatio; amount: BigNumberish; - ethTxOptions?: ethers.providers.TransactionRequest; - approveDepositAmountForERC20?: boolean; - }): Promise { - const gasPrice = await this.ethSigner.provider.getGasPrice(); - - const mainZkSyncContract = this.getZkSyncMainContract(); - - let ethTransaction; - - if (isTokenETH(deposit.token)) { - try { - ethTransaction = await mainZkSyncContract.depositETH(deposit.depositTo, { - value: BigNumber.from(deposit.amount), - gasLimit: BigNumber.from(ETH_RECOMMENDED_DEPOSIT_GAS_LIMIT), - gasPrice, - ...deposit.ethTxOptions - }); - } catch (e) { - this.modifyEthersError(e); - } - } else { - const tokenAddress = this.provider.tokenSet.resolveTokenAddress(deposit.token); - // ERC20 token deposit - const erc20contract = new Contract(tokenAddress, IERC20_INTERFACE, this.ethSigner); - let nonce: number; - if (deposit.approveDepositAmountForERC20) { - try { - const approveTx = await erc20contract.approve( - this.provider.contractAddress.mainContract, - deposit.amount - ); - nonce = approveTx.nonce + 1; - } catch (e) { - this.modifyEthersError(e); - } - } - const args = [ - tokenAddress, - deposit.amount, - deposit.depositTo, - { - nonce, - gasPrice, - ...deposit.ethTxOptions - } as ethers.providers.TransactionRequest - ]; - - // We set gas limit only if user does not set it using ethTxOptions. - const txRequest = args[args.length - 1] as ethers.providers.TransactionRequest; - if (txRequest.gasLimit == null) { - try { - const gasEstimate = await mainZkSyncContract.estimateGas.depositERC20(...args).then( - (estimate) => estimate, - () => BigNumber.from('0') - ); - const isMainnet = (await this.ethSigner.getChainId()) == 1; - let recommendedGasLimit = - isMainnet && ERC20_DEPOSIT_GAS_LIMIT[tokenAddress] - ? BigNumber.from(ERC20_DEPOSIT_GAS_LIMIT[tokenAddress]) - : ERC20_RECOMMENDED_DEPOSIT_GAS_LIMIT; - txRequest.gasLimit = gasEstimate.gte(recommendedGasLimit) ? gasEstimate : recommendedGasLimit; - args[args.length - 1] = txRequest; - } catch (e) { - this.modifyEthersError(e); - } - } - - try { - ethTransaction = await mainZkSyncContract.depositERC20(...args); - } catch (e) { - this.modifyEthersError(e); - } - } - - return new ETHOperation(ethTransaction, this.provider); - } - - async resolveAccountId(): Promise { - if (this.accountId !== undefined) { - return this.accountId; - } else { - const accountState = await this.getAccountState(); - if (!accountState.id) { - throw new Error("Can't resolve account id from the zkSync node"); - } - return accountState.id; - } - } - - async emergencyWithdraw(withdraw: { - token: TokenLike; - accountId?: number; - ethTxOptions?: ethers.providers.TransactionRequest; - }): Promise { - const gasPrice = await this.ethSigner.provider.getGasPrice(); - - let accountId: number = withdraw.accountId != null ? withdraw.accountId : await this.resolveAccountId(); - - const mainZkSyncContract = this.getZkSyncMainContract(); - - const tokenAddress = this.provider.tokenSet.resolveTokenAddress(withdraw.token); - try { - const ethTransaction = await mainZkSyncContract.requestFullExit(accountId, tokenAddress, { - gasLimit: BigNumber.from('500000'), - gasPrice, - ...withdraw.ethTxOptions - }); - return new ETHOperation(ethTransaction, this.provider); - } catch (e) { - this.modifyEthersError(e); + recipient?: Address; + nonce?: Nonce; + validFrom?: number; + validUntil?: number; + }): Promise { + if (!this.signer) { + throw new Error('zkSync signer is required for signing an order'); } - } - - async emergencyWithdrawNFT(withdrawNFT: { - tokenId: number; - accountId?: number; - ethTxOptions?: ethers.providers.TransactionRequest; - }): Promise { - const gasPrice = await this.ethSigner.provider.getGasPrice(); - - let accountId: number = withdrawNFT.accountId != null ? withdrawNFT.accountId : await this.resolveAccountId(); + await this.setRequiredAccountIdFromServer('Swap order'); + const nonce = order.nonce != null ? await this.getNonce(order.nonce) : await this.getNonce(); + const recipient = order.recipient || this.address(); - const mainZkSyncContract = this.getZkSyncMainContract(); + let ratio: Ratio; + const sell = order.tokenSell; + const buy = order.tokenBuy; - try { - const ethTransaction = await mainZkSyncContract.requestFullExitNFT(accountId, withdrawNFT.tokenId, { - gasLimit: BigNumber.from('500000'), - gasPrice, - ...withdrawNFT.ethTxOptions - }); - return new ETHOperation(ethTransaction, this.provider); - } catch (e) { - this.modifyEthersError(e); + if (!order.ratio[sell] || !order.ratio[buy]) { + throw new Error(`Wrong tokens in the ratio object: should be ${sell} and ${buy}`); } - } - - getZkSyncMainContract() { - return new ethers.Contract( - this.provider.contractAddress.mainContract, - SYNC_MAIN_CONTRACT_INTERFACE, - this.ethSigner - ); - } - private modifyEthersError(error: any): never { - if (this.ethSigner instanceof ethers.providers.JsonRpcSigner) { - // List of errors that can be caused by user's actions, which have to be forwarded as-is. - const correct_errors = [ - EthersErrorCode.NONCE_EXPIRED, - EthersErrorCode.INSUFFICIENT_FUNDS, - EthersErrorCode.REPLACEMENT_UNDERPRICED, - EthersErrorCode.UNPREDICTABLE_GAS_LIMIT + if (order.ratio.type == 'Wei') { + ratio = [order.ratio[sell], order.ratio[buy]]; + } else if (order.ratio.type == 'Token') { + ratio = [ + this.provider.tokenSet.parseToken(sell, order.ratio[sell].toString()), + this.provider.tokenSet.parseToken(buy, order.ratio[buy].toString()) ]; - if (!correct_errors.includes(error.code)) { - // This is an error which we don't expect - error.message = `Ethereum smart wallet JSON RPC server returned the following error while executing an operation: "${error.message}". Please contact your smart wallet support for help.`; - } - } - - throw error; - } - - private async setRequiredAccountIdFromServer(actionName: string) { - if (this.accountId === undefined) { - const accountIdFromServer = await this.getAccountId(); - if (accountIdFromServer == null) { - throw new Error(`Failed to ${actionName}: Account does not exist in the zkSync network`); - } else { - this.accountId = accountIdFromServer; - } - } - } -} - -export class ETHOperation { - state: 'Sent' | 'Mined' | 'Committed' | 'Verified' | 'Failed'; - error?: ZKSyncTxError; - priorityOpId?: BigNumber; - - constructor(public ethTx: ContractTransaction, public zkSyncProvider: SyncProvider) { - this.state = 'Sent'; - } - - async awaitEthereumTxCommit() { - if (this.state !== 'Sent') return; - - const txReceipt = await this.ethTx.wait(); - for (const log of txReceipt.logs) { - try { - const priorityQueueLog = SYNC_MAIN_CONTRACT_INTERFACE.parseLog(log); - if (priorityQueueLog && priorityQueueLog.args.serialId != null) { - this.priorityOpId = priorityQueueLog.args.serialId; - } - } catch {} - } - if (!this.priorityOpId) { - throw new Error('Failed to parse tx logs'); - } - - this.state = 'Mined'; - return txReceipt; - } - - async awaitReceipt(): Promise { - this.throwErrorIfFailedState(); - - await this.awaitEthereumTxCommit(); - if (this.state !== 'Mined') return; - - let query: number | string; - if (this.zkSyncProvider.providerType === 'RPC') { - query = this.priorityOpId.toNumber(); - } else { - query = this.ethTx.hash; - } - const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'COMMIT'); - - if (!receipt.executed) { - this.setErrorState(new ZKSyncTxError('Priority operation failed', receipt)); - this.throwErrorIfFailedState(); - } - - this.state = 'Committed'; - return receipt; - } - - async awaitVerifyReceipt(): Promise { - await this.awaitReceipt(); - if (this.state !== 'Committed') return; - - let query: number | string; - if (this.zkSyncProvider.providerType === 'RPC') { - query = this.priorityOpId.toNumber(); - } else { - query = this.ethTx.hash; - } - const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'VERIFY'); - - this.state = 'Verified'; - - return receipt; - } - - private setErrorState(error: ZKSyncTxError) { - this.state = 'Failed'; - this.error = error; - } - - private throwErrorIfFailedState() { - if (this.state === 'Failed') throw this.error; - } -} - -export class Transaction { - state: 'Sent' | 'Committed' | 'Verified' | 'Failed'; - error?: ZKSyncTxError; - - constructor(public txData, public txHash: string, public sidechainProvider: SyncProvider) { - this.state = 'Sent'; - } - - async awaitReceipt(): Promise { - this.throwErrorIfFailedState(); - - if (this.state !== 'Sent') return; - - const receipt = await this.sidechainProvider.notifyTransaction(this.txHash, 'COMMIT'); - - if (!receipt.success) { - this.setErrorState(new ZKSyncTxError(`zkSync transaction failed: ${receipt.failReason}`, receipt)); - this.throwErrorIfFailedState(); } - this.state = 'Committed'; - return receipt; - } - - async awaitVerifyReceipt(): Promise { - await this.awaitReceipt(); - const receipt = await this.sidechainProvider.notifyTransaction(this.txHash, 'VERIFY'); - - this.state = 'Verified'; - return receipt; - } - - private setErrorState(error: ZKSyncTxError) { - this.state = 'Failed'; - this.error = error; - } + const partialOrder = await this.signer.signSyncOrder({ + accountId: this.accountId, + recipient, + nonce, + amount: order.amount || BigNumber.from(0), + tokenSell: this.provider.tokenSet.resolveTokenId(order.tokenSell), + tokenBuy: this.provider.tokenSet.resolveTokenId(order.tokenBuy), + validFrom: order.validFrom || 0, + validUntil: order.validUntil || MAX_TIMESTAMP, + ratio + }); - private throwErrorIfFailedState() { - if (this.state === 'Failed') throw this.error; + return partialOrder; } } - -export async function submitSignedTransaction( - signedTx: SignedTransaction, - provider: SyncProvider, - fastProcessing?: boolean -): Promise { - const transactionHash = await provider.submitTx(signedTx.tx, signedTx.ethereumSignature, fastProcessing); - return new Transaction(signedTx, transactionHash, provider); -} - -export async function submitSignedTransactionsBatch( - provider: SyncProvider, - signedTxs: SignedTransaction[], - ethSignatures?: TxEthSignature[] -): Promise { - const transactionHashes = await provider.submitTxsBatch( - signedTxs.map((tx) => { - return { tx: tx.tx, signature: tx.ethereumSignature }; - }), - ethSignatures - ); - return transactionHashes.map((txHash, idx) => new Transaction(signedTxs[idx], txHash, provider)); -} diff --git a/sdk/zksync.js/src/withdraw-helpers.ts b/sdk/zksync.js/src/withdraw-helpers.ts index c52960b4e4..eab1467c50 100644 --- a/sdk/zksync.js/src/withdraw-helpers.ts +++ b/sdk/zksync.js/src/withdraw-helpers.ts @@ -53,11 +53,11 @@ Wallet.prototype.withdrawPendingBalance = async function ( token: TokenLike, amount?: BigNumberish ): Promise { - checkEthProvider(this.ethSigner); + checkEthProvider(this.ethSigner()); const zksyncContract = this.getZkSyncMainContract(); - const gasPrice = await this.ethSigner.getGasPrice(); + const gasPrice = await this.ethSigner().getGasPrice(); const tokenAddress = this.provider.tokenSet.resolveTokenAddress(token); const withdrawAmount = amount ? amount : await zksyncContract.getPendingBalance(from, tokenAddress); @@ -75,7 +75,7 @@ Wallet.prototype.withdrawPendingBalances = async function ( multicallParams: MulticallParams, amounts?: BigNumberish[] ): Promise { - checkEthProvider(this.ethSigner); + checkEthProvider(this.ethSigner()); if (tokens.length != addresses.length) { throw new Error('The array of addresses and the tokens should be the same length'); @@ -84,7 +84,7 @@ Wallet.prototype.withdrawPendingBalances = async function ( const multicallAddress = multicallParams.address || getMulticallAddressByNetwork(multicallParams.network); const zksyncContract = this.getZkSyncMainContract(); - const gasPrice = await this.ethSigner.getGasPrice(); + const gasPrice = await this.ethSigner().getGasPrice(); const tokensAddresses = tokens.map((token) => this.provider.tokenSet.resolveTokenAddress(token)); @@ -109,7 +109,7 @@ Wallet.prototype.withdrawPendingBalances = async function ( return [zksyncContract.address, callData]; }); - const multicallContract = new Contract(multicallAddress, MULTICALL_INTERFACE, this.ethSigner); + const multicallContract = new Contract(multicallAddress, MULTICALL_INTERFACE, this.ethSigner()); return multicallContract.aggregate(calls, { gasLimit: multicallParams.gasLimit || BigNumber.from('300000'), diff --git a/sdk/zksync.js/tests/wallet.test.ts b/sdk/zksync.js/tests/wallet.test.ts index fcebce028f..538224796d 100644 --- a/sdk/zksync.js/tests/wallet.test.ts +++ b/sdk/zksync.js/tests/wallet.test.ts @@ -4,7 +4,7 @@ import { Wallet } from '../src/wallet'; import { getTokens } from 'reading-tool'; import { Provider } from '../src/provider'; -import { Network } from '../build/types'; +import { Network } from '../src/types'; describe('Wallet with mock provider', function () { async function getWallet(ethPrivateKey: Uint8Array, network: Network): Promise { diff --git a/yarn.lock b/yarn.lock index 7c800c2f87..fe2351bddc 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8618,9 +8618,9 @@ simple-concat@^1.0.0: integrity sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q== simple-get@^2.7.0: - version "2.8.1" - resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-2.8.1.tgz#0e22e91d4575d87620620bc91308d57a77f44b5d" - integrity sha512-lSSHRSw3mQNUGPAYRqo7xy9dhKmxFXIjLjp4KHpf99GEH2VH7C3AM+Qfx6du6jhfUi6Vm7XnbEVEf7Wb6N8jRw== + version "2.8.2" + resolved "https://registry.yarnpkg.com/simple-get/-/simple-get-2.8.2.tgz#5708fb0919d440657326cd5fe7d2599d07705019" + integrity sha512-Ijd/rV5o+mSBBs4F/x9oDPtTx9Zb6X9brmnXvMW4J7IR15ngi9q5xxqWBKU744jTZiaXtxaPL7uHG6vtN8kUkw== dependencies: decompress-response "^3.3.0" once "^1.3.1" @@ -10650,7 +10650,7 @@ yocto-queue@^0.1.0: integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== "zksync@link:sdk/zksync.js": - version "0.11.6" + version "0.12.0-alpha.4" dependencies: axios "^0.21.2" websocket "^1.0.30"