From 38e58ffc8cad192b8e82ce3edd24140a0ca55319 Mon Sep 17 00:00:00 2001 From: Denis Baryshev Date: Thu, 24 Jun 2021 12:41:31 +0300 Subject: [PATCH 1/7] update deployment workflows --- .github/workflows/deploy-apps.yml | 10 +++++----- .github/workflows/deploy-stage.yml | 28 ++++++++++++++++------------ 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/.github/workflows/deploy-apps.yml b/.github/workflows/deploy-apps.yml index 364419e078..293f2fd941 100644 --- a/.github/workflows/deploy-apps.yml +++ b/.github/workflows/deploy-apps.yml @@ -29,22 +29,22 @@ jobs: { "mainnet": { "KUBECONF": "KUBECONF_MAINNET", - "HFENV": "zksync-prod", + "HFENV": "prod", "RUNNER": "mainnet" }, "rinkeby": { "KUBECONF": "KUBECONF_TESTNET", - "HFENV": "zksync-${{ github.event.deployment.environment }}", + "HFENV": "${{ github.event.deployment.environment }}", "RUNNER": "testnet" }, "ropsten": { "KUBECONF": "KUBECONF_TESTNET", - "HFENV": "zksync-${{ github.event.deployment.environment }}", + "HFENV": "${{ github.event.deployment.environment }}", "RUNNER": "testnet" }, ".*": { "KUBECONF": "KUBECONF_STAGE", - "HFENV": "zksync-${{ github.event.deployment.environment }}", + "HFENV": "${{ github.event.deployment.environment }}", "RUNNER": "stage" } } @@ -71,7 +71,7 @@ jobs: if: ${{ needs.pre.outputs.isTag == 'true' }} container: - image: dysnix/kubectl:v1.16-gcloud + image: dysnix/kubectl:v1.19-gcloud env: IMAGE_TAG: ${{ needs.pre.outputs.shortRev }} diff --git a/.github/workflows/deploy-stage.yml b/.github/workflows/deploy-stage.yml index c42b91c2e0..18910e9026 100644 --- a/.github/workflows/deploy-stage.yml +++ b/.github/workflows/deploy-stage.yml @@ -4,6 +4,7 @@ on: push: branches: - dev + - breaking env: DEPLOY_APPS: -l name=server -l name=prover -l name=explorer @@ -51,20 +52,24 @@ jobs: run: | docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} zk docker push rust - zk docker push nginx deploy: + # TODO: fix stage and enable deployments back. + # NOTE: breaking deployment is not supported. + # if: ${{ github.ref == 'refs/heads/dev' }} + if: ${{ false }} + name: Deploy to the Stage enviroment runs-on: [k8s, deployer, stage] needs: [pre, build-images] container: - image: dysnix/kubectl:v1.16-gcloud + image: dysnix/kubectl:v1.19-gcloud env: KUBECONF: ${{ secrets.KUBECONF_STAGE }} IMAGE_TAG: ${{ needs.pre.outputs.shortRev }} - HFENV: zksync-stage + HFENV: stage steps: - @@ -85,16 +90,15 @@ jobs: with: token: "${{ github.token }}" environment: stage -# TODO fix stage and resume deploy to stage -# - -# name: Deploy apps -# working-directory: helm-infra -# run: | + - + name: Deploy apps + working-directory: helm-infra + run: | # copy helm plugins over (from dysnix/kubectl, don't forget)!!! -# cp -r /dysnix/kubectl/.local /dysnix/kubectl/.cache ~ -# -# UPDATE_REPOS=y helmfile -e $HFENV repos -# helmfile -e $HFENV $DEPLOY_APPS apply --args "timeout 180s" + cp -r /dysnix/kubectl/.local /dysnix/kubectl/.cache ~ + + UPDATE_REPOS=y helmfile -e $HFENV repos + helmfile -e $HFENV $DEPLOY_APPS apply --args "timeout 180s" - name: Update deployment status (success) if: success() From 9e122ebabee552a28f78656cf804bacfb323b796 Mon Sep 17 00:00:00 2001 From: Denis Baryshev Date: Thu, 24 Jun 2021 17:36:30 +0300 Subject: [PATCH 2/7] [github-actions] disable deploy @stage --- .github/workflows/deploy-stage.yml | 133 ++++++++++++++--------------- 1 file changed, 66 insertions(+), 67 deletions(-) diff --git a/.github/workflows/deploy-stage.yml b/.github/workflows/deploy-stage.yml index 18910e9026..268c87c09e 100644 --- a/.github/workflows/deploy-stage.yml +++ b/.github/workflows/deploy-stage.yml @@ -53,75 +53,74 @@ jobs: docker login -u ${{ secrets.DOCKER_USER }} -p ${{ secrets.DOCKER_PASSWORD }} zk docker push rust - deploy: - # TODO: fix stage and enable deployments back. - # NOTE: breaking deployment is not supported. - # if: ${{ github.ref == 'refs/heads/dev' }} - if: ${{ false }} - name: Deploy to the Stage enviroment - runs-on: [k8s, deployer, stage] - needs: [pre, build-images] + # deploy: + # ## TODO: fix stage and enable deployments back. + # ## NOTE: breaking deployment is not supported. - container: - image: dysnix/kubectl:v1.19-gcloud + # name: Deploy to the Stage enviroment + # runs-on: [k8s, deployer, stage] + # needs: [pre, build-images] - env: - KUBECONF: ${{ secrets.KUBECONF_STAGE }} - IMAGE_TAG: ${{ needs.pre.outputs.shortRev }} - HFENV: stage + # container: + # image: dysnix/kubectl:v1.19-gcloud - steps: - - - name: Create ~/.kube/config - run: mkdir -p ~/.kube && echo "$KUBECONF" | base64 -d > ~/.kube/config - - - name: Clone helm-infra - uses: actions/checkout@v2 - with: - repository: matter-labs/helm-infra - path: helm-infra - ref: master - token: ${{ secrets.GH_TOKEN }} - - - uses: chrnorm/deployment-action@releases/v1 - name: Create GitHub deployment - id: deployment - with: - token: "${{ github.token }}" - environment: stage - - - name: Deploy apps - working-directory: helm-infra - run: | - # copy helm plugins over (from dysnix/kubectl, don't forget)!!! - cp -r /dysnix/kubectl/.local /dysnix/kubectl/.cache ~ + # env: + # KUBECONF: ${{ secrets.KUBECONF_STAGE }} + # IMAGE_TAG: ${{ needs.pre.outputs.shortRev }} + # HFENV: stage + + # steps: + # - + # name: Create ~/.kube/config + # run: mkdir -p ~/.kube && echo "$KUBECONF" | base64 -d > ~/.kube/config + # - + # name: Clone helm-infra + # uses: actions/checkout@v2 + # with: + # repository: matter-labs/helm-infra + # path: helm-infra + # ref: master + # token: ${{ secrets.GH_TOKEN }} + # - + # uses: chrnorm/deployment-action@releases/v1 + # name: Create GitHub deployment + # id: deployment + # with: + # token: "${{ github.token }}" + # environment: stage + # - + # name: Deploy apps + # working-directory: helm-infra + # run: | + # # copy helm plugins over (from dysnix/kubectl, don't forget)!!! + # cp -r /dysnix/kubectl/.local /dysnix/kubectl/.cache ~ - UPDATE_REPOS=y helmfile -e $HFENV repos - helmfile -e $HFENV $DEPLOY_APPS apply --args "timeout 180s" - - - name: Update deployment status (success) - if: success() - uses: chrnorm/deployment-status@releases/v1 - with: - token: ${{ github.token }} - state: success - deployment_id: ${{ steps.deployment.outputs.deployment_id }} - - - name: Update deployment status (failure) - if: failure() - uses: chrnorm/deployment-status@releases/v1 - with: - token: ${{ github.token }} - state: failure - deployment_id: ${{ steps.deployment.outputs.deployment_id }} - - - name: Notify to Mattermost (on failure) - uses: tferreira/matterfy@releases/v1 - if: failure() - with: - type: ${{ job.status }} - job_name: '*Deployment to stage failed*' - icon_emoji: octocat - channel: 'matterlabs-alerts' - url: ${{ secrets.MATTERMOST_WEBHOOK }} + # UPDATE_REPOS=y helmfile -e $HFENV repos + # helmfile -e $HFENV $DEPLOY_APPS apply --args "timeout 180s" + # - + # name: Update deployment status (success) + # if: success() + # uses: chrnorm/deployment-status@releases/v1 + # with: + # token: ${{ github.token }} + # state: success + # deployment_id: ${{ steps.deployment.outputs.deployment_id }} + # - + # name: Update deployment status (failure) + # if: failure() + # uses: chrnorm/deployment-status@releases/v1 + # with: + # token: ${{ github.token }} + # state: failure + # deployment_id: ${{ steps.deployment.outputs.deployment_id }} + # - + # name: Notify to Mattermost (on failure) + # uses: tferreira/matterfy@releases/v1 + # if: failure() + # with: + # type: ${{ job.status }} + # job_name: '*Deployment to stage failed*' + # icon_emoji: octocat + # channel: 'matterlabs-alerts' + # url: ${{ secrets.MATTERMOST_WEBHOOK }} From 9d54f4a77c21697bbb6dd30c6269e9508839bdaf Mon Sep 17 00:00:00 2001 From: Denis Baryshev Date: Thu, 24 Jun 2021 17:53:10 +0300 Subject: [PATCH 3/7] [github-actions] remove conditional job execution --- .github/workflows/deploy-apps.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/deploy-apps.yml b/.github/workflows/deploy-apps.yml index 293f2fd941..2fb33d6f3c 100644 --- a/.github/workflows/deploy-apps.yml +++ b/.github/workflows/deploy-apps.yml @@ -68,7 +68,6 @@ jobs: name: Deploy Apps runs-on: [k8s, deployer, "${{ needs.pre.outputs.runner }}"] needs: pre - if: ${{ needs.pre.outputs.isTag == 'true' }} container: image: dysnix/kubectl:v1.19-gcloud @@ -80,9 +79,11 @@ jobs: steps: - + if: ${{ needs.pre.outputs.isTag == 'true' }} name: Create ~/.kube/config run: mkdir -p ~/.kube && echo "$KUBECONF" | base64 -d > ~/.kube/config - + if: ${{ needs.pre.outputs.isTag == 'true' }} name: Clone helm-infra uses: actions/checkout@v2 with: @@ -91,6 +92,7 @@ jobs: ref: master token: ${{ secrets.GH_TOKEN }} - + if: ${{ needs.pre.outputs.isTag == 'true' }} name: Deploy apps working-directory: helm-infra run: | @@ -100,25 +102,25 @@ jobs: UPDATE_REPOS=y helmfile -e $HFENV repos helmfile -e $HFENV $DEPLOY_APPS apply --args "timeout 180s" - + if: success() && needs.pre.outputs.isTag == 'true' name: Update deployment status (success) - if: success() uses: chrnorm/deployment-status@releases/v1 with: token: ${{ github.token }} state: success deployment_id: ${{ github.event.deployment.id }} - + if: failure() && needs.pre.outputs.isTag == 'true' name: Update deployment status (failure) - if: failure() uses: chrnorm/deployment-status@releases/v1 with: token: ${{ github.token }} state: failure deployment_id: ${{ github.event.deployment.id }} - + if: failure() && needs.pre.outputs.isTag == 'true' name: Notify to Mattermost (on failure) uses: tferreira/matterfy@releases/v1 - if: failure() with: type: ${{ job.status }} job_name: '*Deployment to "${{ github.event.deployment.environment }}" failed*' From d81870bf322f62807e75802a2b5babc258819ac2 Mon Sep 17 00:00:00 2001 From: Denis Baryshev Date: Thu, 24 Jun 2021 18:17:13 +0300 Subject: [PATCH 4/7] [github-actions] use strict regex for env mapping --- .github/workflows/deploy-apps.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/deploy-apps.yml b/.github/workflows/deploy-apps.yml index 2fb33d6f3c..5770d1921b 100644 --- a/.github/workflows/deploy-apps.yml +++ b/.github/workflows/deploy-apps.yml @@ -27,17 +27,17 @@ jobs: key: "${{ github.event.deployment.environment }}" map: | { - "mainnet": { + "^mainnet$": { "KUBECONF": "KUBECONF_MAINNET", "HFENV": "prod", "RUNNER": "mainnet" }, - "rinkeby": { + "^rinkeby$": { "KUBECONF": "KUBECONF_TESTNET", "HFENV": "${{ github.event.deployment.environment }}", "RUNNER": "testnet" }, - "ropsten": { + "^ropsten$": { "KUBECONF": "KUBECONF_TESTNET", "HFENV": "${{ github.event.deployment.environment }}", "RUNNER": "testnet" From fd5c605f28d8476756dbc35b8a06abb7a0f04fdc Mon Sep 17 00:00:00 2001 From: deniallugo Date: Fri, 25 Jun 2021 15:24:00 +0300 Subject: [PATCH 5/7] Fix nonce mismatch for mint-nft Signed-off-by: deniallugo --- .../prover/src/plonk_step_by_step_prover.rs | 10 + .../lib/circuit/src/witness/tests/mint_nft.rs | 2 +- core/lib/state/src/handler/mint_nft.rs | 474 +++++----- .../state/src/tests/operations/mint_nft.rs | 886 +++++++++--------- 4 files changed, 708 insertions(+), 664 deletions(-) diff --git a/core/bin/prover/src/plonk_step_by_step_prover.rs b/core/bin/prover/src/plonk_step_by_step_prover.rs index 5264da9e46..cfffe64501 100644 --- a/core/bin/prover/src/plonk_step_by_step_prover.rs +++ b/core/bin/prover/src/plonk_step_by_step_prover.rs @@ -2,6 +2,7 @@ use std::sync::Mutex; // Workspace deps use zksync_config::ChainConfig; +use zksync_crypto::bellman::Circuit; use zksync_crypto::proof::{AggregatedProof, PrecomputedSampleProofs, SingleProof}; use zksync_crypto::Engine; use zksync_prover_utils::aggregated_proofs::{gen_aggregate_proof, prepare_proof_data}; @@ -10,6 +11,7 @@ use zksync_prover_utils::{PlonkVerificationKey, SetupForStepByStepProver}; use zksync_utils::parse_env; // Local deps use crate::{ProverConfig, ProverImpl}; +use zksync_crypto::franklin_crypto::circuit::test::TestConstraintSystem; use zksync_prover_utils::fs_utils::load_precomputed_proofs; /// We prepare some data before making proof for each block size, so we cache it in case next block @@ -56,6 +58,14 @@ impl PlonkStepByStepProver { block_size: usize, ) -> anyhow::Result { // we do this way here so old precomp is dropped + let mut cs = TestConstraintSystem::::new(); + witness.clone().synthesize(&mut cs).unwrap(); + + if let Some(err) = cs.which_is_unsatisfied() { + println!("unconstrained: {}", cs.find_unconstrained()); + println!("number of constraints {}", cs.num_constraints()); + println!("Unsatisfied {:?}", err); + } let valid_cached_precomp = { self.prepared_computations .lock() diff --git a/core/lib/circuit/src/witness/tests/mint_nft.rs b/core/lib/circuit/src/witness/tests/mint_nft.rs index 9474374b5a..313d44bc87 100644 --- a/core/lib/circuit/src/witness/tests/mint_nft.rs +++ b/core/lib/circuit/src/witness/tests/mint_nft.rs @@ -1,6 +1,7 @@ use num::BigUint; use zksync_crypto::franklin_crypto::bellman::pairing::bn256::Bn256; +use zksync_crypto::params::{MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ID, NFT_TOKEN_ID}; use zksync_state::handler::TxHandler; use zksync_state::state::{CollectedFee, ZkSyncState}; use zksync_types::{AccountId, MintNFT, MintNFTOp, TokenId, H256}; @@ -10,7 +11,6 @@ use crate::witness::tests::test_utils::{ WitnessTestAccount, }; use crate::witness::{utils::WitnessBuilder, MintNFTWitness, SigDataInput}; -use zksync_crypto::params::{MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ID, NFT_TOKEN_ID}; /// Basic check for execution of `MintNFT` operation in circuit. /// Here we create two accounts and perform a mintNFT operation. diff --git a/core/lib/state/src/handler/mint_nft.rs b/core/lib/state/src/handler/mint_nft.rs index 4ba5387a3c..cc70f280c5 100644 --- a/core/lib/state/src/handler/mint_nft.rs +++ b/core/lib/state/src/handler/mint_nft.rs @@ -1,236 +1,238 @@ -use num::{BigUint, ToPrimitive, Zero}; -use std::time::Instant; - -use zksync_types::{ - operations::MintNFTOp, - tokens::NFT, - tx::{calculate_token_address, calculate_token_data, calculate_token_hash}, - Account, AccountUpdate, AccountUpdates, Address, MintNFT, Nonce, PubKeyHash, TokenId, ZkSyncOp, -}; - -use zksync_crypto::params::{ - max_processable_token, MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ADDRESS, NFT_STORAGE_ACCOUNT_ID, - NFT_TOKEN_ID, -}; - -use crate::{ - handler::{error::MintNFTOpError, TxHandler}, - state::{CollectedFee, OpSuccess, ZkSyncState}, -}; - -impl TxHandler for ZkSyncState { - type Op = MintNFTOp; - type OpError = MintNFTOpError; - - fn create_op(&self, tx: MintNFT) -> Result { - invariant!( - tx.fee_token <= max_processable_token(), - MintNFTOpError::InvalidTokenId - ); - invariant!( - tx.recipient != Address::zero(), - MintNFTOpError::RecipientAccountIncorrect - ); - let creator = self - .get_account(tx.creator_id) - .ok_or(MintNFTOpError::CreatorAccountNotFound)?; - invariant!( - creator.pub_key_hash != PubKeyHash::default(), - MintNFTOpError::CreatorAccountIsLocked - ); - - if let Some((pub_key_hash, _)) = tx.verify_signature() { - if pub_key_hash != creator.pub_key_hash { - return Err(MintNFTOpError::InvalidSignature); - } - } - - let (recipient, _) = self - .get_account_by_address(&tx.recipient) - .ok_or(MintNFTOpError::RecipientAccountNotFound)?; - - let op = MintNFTOp { - creator_account_id: tx.creator_id, - recipient_account_id: recipient, - tx, - }; - - Ok(op) - } - - fn apply_tx(&mut self, tx: MintNFT) -> Result { - let op = self.create_op(tx)?; - - let (fee, updates) = >::apply_op(self, &op)?; - let result = OpSuccess { - fee, - updates, - executed_op: ZkSyncOp::MintNFTOp(Box::new(op)), - }; - - Ok(result) - } - - fn apply_op( - &mut self, - op: &Self::Op, - ) -> Result<(Option, AccountUpdates), Self::OpError> { - let start = Instant::now(); - let mut updates = Vec::new(); - - // The creator must pay fee for generating NFT. - let mut creator_account = self - .get_account(op.creator_account_id) - .ok_or(MintNFTOpError::CreatorAccountNotFound)?; - let old_balance = creator_account.get_balance(op.tx.fee_token); - let nonce = creator_account.nonce; - invariant!( - old_balance >= op.tx.fee, - MintNFTOpError::InsufficientBalance - ); - creator_account.sub_balance(op.tx.fee_token, &op.tx.fee); - let new_balance = creator_account.get_balance(op.tx.fee_token); - *creator_account.nonce += 1; - updates.push(( - op.creator_account_id, - AccountUpdate::UpdateBalance { - balance_update: (op.tx.fee_token, old_balance, new_balance), - old_nonce: nonce, - new_nonce: creator_account.nonce, - }, - )); - self.insert_account(op.creator_account_id, creator_account.clone()); - - // Serial ID is a counter in a special balance for NFT_TOKEN, which shows how many nft were generated by this creator - let old_balance = creator_account.get_balance(NFT_TOKEN_ID); - let old_nonce = creator_account.nonce; - let serial_id = old_balance.to_u32().unwrap_or_default(); - creator_account.add_balance(NFT_TOKEN_ID, &BigUint::from(1u32)); - let new_balance = creator_account.get_balance(NFT_TOKEN_ID); - updates.push(( - op.creator_account_id, - AccountUpdate::UpdateBalance { - balance_update: (NFT_TOKEN_ID, old_balance, new_balance), - old_nonce, - new_nonce: creator_account.nonce, - }, - )); - self.insert_account(op.creator_account_id, creator_account.clone()); - - // The address for the nft token is generated based on `creator_account_id`,` serial_id` and `content_hash` - // Generate token id. We have a special NFT account, which stores the next token id for nft in balance of NFT_TOKEN - let (mut nft_account, account_updates) = self.get_or_create_nft_account_token_id(); - updates.extend(account_updates); - - let new_token_id = nft_account.get_balance(NFT_TOKEN_ID); - nft_account.add_balance(NFT_TOKEN_ID, &BigUint::from(1u32)); - let next_token_id = nft_account.get_balance(NFT_TOKEN_ID); - updates.push(( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - balance_update: (NFT_TOKEN_ID, new_token_id.clone(), next_token_id), - old_nonce: Nonce(0), - new_nonce: Nonce(0), - }, - )); - self.insert_account(NFT_STORAGE_ACCOUNT_ID, nft_account.clone()); - - // Mint NFT with precalculated token_id, serial_id and address - let token_id = TokenId(new_token_id.to_u32().expect("Should be correct u32")); - let token_hash = calculate_token_hash(op.tx.creator_id, serial_id, op.tx.content_hash); - let token_address = calculate_token_address(&token_hash); - let token = NFT::new( - token_id, - serial_id, - op.tx.creator_id, - creator_account.address, - token_address, - None, - op.tx.content_hash, - ); - updates.push(( - op.creator_account_id, - AccountUpdate::MintNFT { - token: token.clone(), - }, - )); - self.nfts.insert(token_id, token); - self.insert_account(op.creator_account_id, creator_account); - - // Token data is a special balance for NFT_STORAGE_ACCOUNT, - // which represent last 16 bytes of hash of (account_id, serial_id, content_hash) for storing this data in circuit - let token_data = calculate_token_data(&token_hash); - let old_balance = nft_account.get_balance(token_id); - assert_eq!( - old_balance, - BigUint::zero(), - "The balance of nft token must be zero" - ); - nft_account.add_balance(token_id, &token_data); - updates.push(( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - balance_update: (token_id, BigUint::zero(), token_data), - old_nonce: nft_account.nonce, - new_nonce: nft_account.nonce, - }, - )); - self.insert_account(NFT_STORAGE_ACCOUNT_ID, nft_account); - - // Add this token to recipient account - let mut recipient_account = self - .get_account(op.recipient_account_id) - .ok_or(MintNFTOpError::RecipientAccountNotFound)?; - let old_amount = recipient_account.get_balance(token_id); - invariant!( - old_amount == BigUint::zero(), - MintNFTOpError::TokenIsAlreadyInAccount - ); - let old_nonce = recipient_account.nonce; - recipient_account.add_balance(token_id, &BigUint::from(1u32)); - updates.push(( - op.recipient_account_id, - AccountUpdate::UpdateBalance { - balance_update: (token_id, BigUint::zero(), BigUint::from(1u32)), - old_nonce, - new_nonce: recipient_account.nonce, - }, - )); - self.insert_account(op.recipient_account_id, recipient_account); - - let fee = CollectedFee { - token: op.tx.fee_token, - amount: op.tx.fee.clone(), - }; - - metrics::histogram!("state.mint_nft", start.elapsed()); - Ok((Some(fee), updates)) - } -} -impl ZkSyncState { - /// Get or create special account with special balance for enforcing uniqueness of token_id - fn get_or_create_nft_account_token_id(&mut self) -> (Account, AccountUpdates) { - let mut updates = vec![]; - let account = self.get_account(NFT_STORAGE_ACCOUNT_ID).unwrap_or_else(|| { - vlog::error!("NFT Account is not defined in account tree, add it manually"); - let balance = BigUint::from(MIN_NFT_TOKEN_ID); - let (mut account, upd) = - Account::create_account(NFT_STORAGE_ACCOUNT_ID, *NFT_STORAGE_ACCOUNT_ADDRESS); - updates.extend(upd.into_iter()); - account.add_balance(NFT_TOKEN_ID, &BigUint::from(MIN_NFT_TOKEN_ID)); - - self.insert_account(NFT_STORAGE_ACCOUNT_ID, account.clone()); - - updates.push(( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), - old_nonce: Nonce(0), - new_nonce: Nonce(0), - }, - )); - account - }); - (account, updates) - } -} +use num::{BigUint, ToPrimitive, Zero}; +use std::time::Instant; + +use zksync_types::{ + operations::MintNFTOp, + tokens::NFT, + tx::{calculate_token_address, calculate_token_data, calculate_token_hash}, + Account, AccountUpdate, AccountUpdates, Address, MintNFT, Nonce, PubKeyHash, TokenId, ZkSyncOp, +}; + +use zksync_crypto::params::{ + max_processable_token, MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ADDRESS, NFT_STORAGE_ACCOUNT_ID, + NFT_TOKEN_ID, +}; + +use crate::{ + handler::{error::MintNFTOpError, TxHandler}, + state::{CollectedFee, OpSuccess, ZkSyncState}, +}; + +impl TxHandler for ZkSyncState { + type Op = MintNFTOp; + type OpError = MintNFTOpError; + + fn create_op(&self, tx: MintNFT) -> Result { + invariant!( + tx.fee_token <= max_processable_token(), + MintNFTOpError::InvalidTokenId + ); + invariant!( + tx.recipient != Address::zero(), + MintNFTOpError::RecipientAccountIncorrect + ); + let creator = self + .get_account(tx.creator_id) + .ok_or(MintNFTOpError::CreatorAccountNotFound)?; + invariant!( + creator.pub_key_hash != PubKeyHash::default(), + MintNFTOpError::CreatorAccountIsLocked + ); + + if let Some((pub_key_hash, _)) = tx.verify_signature() { + if pub_key_hash != creator.pub_key_hash { + return Err(MintNFTOpError::InvalidSignature); + } + } + + let (recipient, _) = self + .get_account_by_address(&tx.recipient) + .ok_or(MintNFTOpError::RecipientAccountNotFound)?; + + let op = MintNFTOp { + creator_account_id: tx.creator_id, + recipient_account_id: recipient, + tx, + }; + + Ok(op) + } + + fn apply_tx(&mut self, tx: MintNFT) -> Result { + let op = self.create_op(tx)?; + + let (fee, updates) = >::apply_op(self, &op)?; + let result = OpSuccess { + fee, + updates, + executed_op: ZkSyncOp::MintNFTOp(Box::new(op)), + }; + + Ok(result) + } + + fn apply_op( + &mut self, + op: &Self::Op, + ) -> Result<(Option, AccountUpdates), Self::OpError> { + let start = Instant::now(); + let mut updates = Vec::new(); + + // The creator must pay fee for generating NFT. + let mut creator_account = self + .get_account(op.creator_account_id) + .ok_or(MintNFTOpError::CreatorAccountNotFound)?; + let old_balance = creator_account.get_balance(op.tx.fee_token); + let nonce = creator_account.nonce; + invariant!(nonce == op.tx.nonce, MintNFTOpError::NonceMismatch); + + invariant!( + old_balance >= op.tx.fee, + MintNFTOpError::InsufficientBalance + ); + creator_account.sub_balance(op.tx.fee_token, &op.tx.fee); + let new_balance = creator_account.get_balance(op.tx.fee_token); + *creator_account.nonce += 1; + updates.push(( + op.creator_account_id, + AccountUpdate::UpdateBalance { + balance_update: (op.tx.fee_token, old_balance, new_balance), + old_nonce: nonce, + new_nonce: creator_account.nonce, + }, + )); + self.insert_account(op.creator_account_id, creator_account.clone()); + + // Serial ID is a counter in a special balance for NFT_TOKEN, which shows how many nft were generated by this creator + let old_balance = creator_account.get_balance(NFT_TOKEN_ID); + let old_nonce = creator_account.nonce; + let serial_id = old_balance.to_u32().unwrap_or_default(); + creator_account.add_balance(NFT_TOKEN_ID, &BigUint::from(1u32)); + let new_balance = creator_account.get_balance(NFT_TOKEN_ID); + updates.push(( + op.creator_account_id, + AccountUpdate::UpdateBalance { + balance_update: (NFT_TOKEN_ID, old_balance, new_balance), + old_nonce, + new_nonce: creator_account.nonce, + }, + )); + self.insert_account(op.creator_account_id, creator_account.clone()); + + // The address for the nft token is generated based on `creator_account_id`,` serial_id` and `content_hash` + // Generate token id. We have a special NFT account, which stores the next token id for nft in balance of NFT_TOKEN + let (mut nft_account, account_updates) = self.get_or_create_nft_account_token_id(); + updates.extend(account_updates); + + let new_token_id = nft_account.get_balance(NFT_TOKEN_ID); + nft_account.add_balance(NFT_TOKEN_ID, &BigUint::from(1u32)); + let next_token_id = nft_account.get_balance(NFT_TOKEN_ID); + updates.push(( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + balance_update: (NFT_TOKEN_ID, new_token_id.clone(), next_token_id), + old_nonce: Nonce(0), + new_nonce: Nonce(0), + }, + )); + self.insert_account(NFT_STORAGE_ACCOUNT_ID, nft_account.clone()); + + // Mint NFT with precalculated token_id, serial_id and address + let token_id = TokenId(new_token_id.to_u32().expect("Should be correct u32")); + let token_hash = calculate_token_hash(op.tx.creator_id, serial_id, op.tx.content_hash); + let token_address = calculate_token_address(&token_hash); + let token = NFT::new( + token_id, + serial_id, + op.tx.creator_id, + creator_account.address, + token_address, + None, + op.tx.content_hash, + ); + updates.push(( + op.creator_account_id, + AccountUpdate::MintNFT { + token: token.clone(), + }, + )); + self.nfts.insert(token_id, token); + self.insert_account(op.creator_account_id, creator_account); + + // Token data is a special balance for NFT_STORAGE_ACCOUNT, + // which represent last 16 bytes of hash of (account_id, serial_id, content_hash) for storing this data in circuit + let token_data = calculate_token_data(&token_hash); + let old_balance = nft_account.get_balance(token_id); + assert_eq!( + old_balance, + BigUint::zero(), + "The balance of nft token must be zero" + ); + nft_account.add_balance(token_id, &token_data); + updates.push(( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + balance_update: (token_id, BigUint::zero(), token_data), + old_nonce: nft_account.nonce, + new_nonce: nft_account.nonce, + }, + )); + self.insert_account(NFT_STORAGE_ACCOUNT_ID, nft_account); + + // Add this token to recipient account + let mut recipient_account = self + .get_account(op.recipient_account_id) + .ok_or(MintNFTOpError::RecipientAccountNotFound)?; + let old_amount = recipient_account.get_balance(token_id); + invariant!( + old_amount == BigUint::zero(), + MintNFTOpError::TokenIsAlreadyInAccount + ); + let old_nonce = recipient_account.nonce; + recipient_account.add_balance(token_id, &BigUint::from(1u32)); + updates.push(( + op.recipient_account_id, + AccountUpdate::UpdateBalance { + balance_update: (token_id, BigUint::zero(), BigUint::from(1u32)), + old_nonce, + new_nonce: recipient_account.nonce, + }, + )); + self.insert_account(op.recipient_account_id, recipient_account); + + let fee = CollectedFee { + token: op.tx.fee_token, + amount: op.tx.fee.clone(), + }; + + metrics::histogram!("state.mint_nft", start.elapsed()); + Ok((Some(fee), updates)) + } +} +impl ZkSyncState { + /// Get or create special account with special balance for enforcing uniqueness of token_id + fn get_or_create_nft_account_token_id(&mut self) -> (Account, AccountUpdates) { + let mut updates = vec![]; + let account = self.get_account(NFT_STORAGE_ACCOUNT_ID).unwrap_or_else(|| { + vlog::error!("NFT Account is not defined in account tree, add it manually"); + let balance = BigUint::from(MIN_NFT_TOKEN_ID); + let (mut account, upd) = + Account::create_account(NFT_STORAGE_ACCOUNT_ID, *NFT_STORAGE_ACCOUNT_ADDRESS); + updates.extend(upd.into_iter()); + account.add_balance(NFT_TOKEN_ID, &BigUint::from(MIN_NFT_TOKEN_ID)); + + self.insert_account(NFT_STORAGE_ACCOUNT_ID, account.clone()); + + updates.push(( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), + old_nonce: Nonce(0), + new_nonce: Nonce(0), + }, + )); + account + }); + (account, updates) + } +} diff --git a/core/lib/state/src/tests/operations/mint_nft.rs b/core/lib/state/src/tests/operations/mint_nft.rs index f509820ce4..7b600b767d 100644 --- a/core/lib/state/src/tests/operations/mint_nft.rs +++ b/core/lib/state/src/tests/operations/mint_nft.rs @@ -1,427 +1,459 @@ -use num::{BigUint, Zero}; -use web3::types::H256; - -use zksync_crypto::params::{ - MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ADDRESS, NFT_STORAGE_ACCOUNT_ID, NFT_TOKEN_ID, -}; -use zksync_types::{ - tokens::NFT, - tx::{calculate_token_address, calculate_token_data, calculate_token_hash}, - AccountUpdate, MintNFT, Nonce, SignedZkSyncTx, TokenId, Transfer, ZkSyncTx, H160, -}; - -use crate::tests::{AccountState::*, PlasmaTestBuilder}; - -/// Check MintNFT operation -#[test] -fn mint_success() { - let fee_token_id = TokenId(0); - let fee = BigUint::from(10u32); - - let mut tb = PlasmaTestBuilder::new(); - - let (creator_account_id, mut creator_account, creator_sk) = tb.add_account(Unlocked); - tb.set_balance(creator_account_id, fee_token_id, 20u32); - - let (to_account_id, to_account, _to_sk) = tb.add_account(Locked); - let content_hash = H256::default(); - let mint_nft = MintNFT::new_signed( - creator_account_id, - creator_account.address, - content_hash, - to_account.address, - fee.clone(), - fee_token_id, - creator_account.nonce, - &creator_sk, - ) - .unwrap(); - - let token_hash = calculate_token_hash(creator_account_id, 0, content_hash); - let token_address = calculate_token_address(&token_hash); - - let balance = BigUint::from(MIN_NFT_TOKEN_ID); - let nft = NFT::new( - TokenId(MIN_NFT_TOKEN_ID), - 0, - creator_account_id, - creator_account.address, - token_address, - None, - content_hash, - ); - - let token_data = calculate_token_data(&token_hash); - tb.test_tx_success( - mint_nft.into(), - &[ - // Pay fee for minting nft - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce, - new_nonce: creator_account.nonce + 1, - balance_update: (fee_token_id, BigUint::from(20u32), BigUint::from(10u32)), - }, - ), - // Increment counter of nft tokens for creator - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce + 1, - new_nonce: creator_account.nonce + 1, - balance_update: (NFT_TOKEN_ID, BigUint::zero(), BigUint::from(1u32)), - }, - ), - // Create special nft storage account - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::Create { - address: *NFT_STORAGE_ACCOUNT_ADDRESS, - nonce: Nonce(0), - }, - ), - // Add Minimum NFT token id to NFT storage account balance - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), - }, - ), - // Increment NFT counter - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: ( - NFT_TOKEN_ID, - BigUint::from(MIN_NFT_TOKEN_ID), - BigUint::from(MIN_NFT_TOKEN_ID + 1), - ), - }, - ), - // Mint nft - ( - creator_account_id, - AccountUpdate::MintNFT { token: nft.clone() }, - ), - // Store part of nft token hash as balance to NFT storage account id - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: to_account.nonce, - new_nonce: to_account.nonce, - balance_update: (nft.id, BigUint::zero(), token_data), - }, - ), - // Deposit nft token to recipient account - ( - to_account_id, - AccountUpdate::UpdateBalance { - old_nonce: to_account.nonce, - new_nonce: to_account.nonce, - balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), - }, - ), - ], - ); - - // Create another nft - creator_account.nonce.0 += 1; - let (to_account_id, to_account, _to_sk) = tb.add_account(Locked); - let content_hash = H256::default(); - let mint_nft = MintNFT::new_signed( - creator_account_id, - creator_account.address, - content_hash, - to_account.address, - fee.clone(), - fee_token_id, - creator_account.nonce, - &creator_sk, - ) - .unwrap(); - - let token_hash = calculate_token_hash(creator_account_id, 1, content_hash); - let token_address = calculate_token_address(&token_hash); - let nft = NFT::new( - TokenId(MIN_NFT_TOKEN_ID + 1), - 1, - creator_account_id, - creator_account.address, - token_address, - None, - content_hash, - ); - - let token_data = calculate_token_data(&token_hash); - tb.test_tx_success( - mint_nft.into(), - &[ - // Pay fee for minting nft - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce, - new_nonce: creator_account.nonce + 1, - balance_update: (fee_token_id, fee, BigUint::zero()), - }, - ), - // Increment counter of nft tokens for creator - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce + 1, - new_nonce: creator_account.nonce + 1, - balance_update: (NFT_TOKEN_ID, BigUint::from(1u32), BigUint::from(2u32)), - }, - ), - // Increment NFT counter - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: ( - NFT_TOKEN_ID, - BigUint::from(MIN_NFT_TOKEN_ID + 1), - BigUint::from(MIN_NFT_TOKEN_ID + 2), - ), - }, - ), - // Mint nft - ( - creator_account_id, - AccountUpdate::MintNFT { token: nft.clone() }, - ), - // Store part of nft token hash as balance to NFT storage account id - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: to_account.nonce, - new_nonce: to_account.nonce, - balance_update: (nft.id, BigUint::zero(), token_data), - }, - ), - // Deposit nft token to recipient account - ( - to_account_id, - AccountUpdate::UpdateBalance { - old_nonce: to_account.nonce, - new_nonce: to_account.nonce, - balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), - }, - ), - ], - ) -} - -#[test] -fn mint_token_to_new_account() { - let fee_token_id = TokenId(0); - let fee = BigUint::from(10u32); - let zero_amount = BigUint::from(0u32); - - let balance_from = BigUint::from(20u32); - - let balance_to = BigUint::from(0u64); - - let mut tb = PlasmaTestBuilder::new(); - - let (creator_account_id, creator_account, sk) = tb.add_account(Unlocked); - tb.set_balance(creator_account_id, fee_token_id, balance_from.clone()); - - let new_address = H160::random(); - - let transfer_1 = Transfer::new_signed( - creator_account_id, - creator_account.address, - new_address, - fee_token_id, - zero_amount, - fee.clone(), - creator_account.nonce, - Default::default(), - &sk, - ) - .unwrap(); - - let signed_zk_sync_tx1 = SignedZkSyncTx { - tx: ZkSyncTx::Transfer(Box::new(transfer_1)), - eth_sign_data: None, - }; - - let new_id = tb.state.get_free_account_id(); - - let content_hash = H256::default(); - let mint_nft = MintNFT::new_signed( - creator_account_id, - creator_account.address, - content_hash, - new_address, - fee.clone(), - fee_token_id, - creator_account.nonce, - &sk, - ) - .unwrap(); - - let token_hash = calculate_token_hash(creator_account_id, 0, content_hash); - let token_address = calculate_token_address(&token_hash); - let balance = BigUint::from(MIN_NFT_TOKEN_ID); - let nft = NFT::new( - TokenId(MIN_NFT_TOKEN_ID), - 0, - creator_account_id, - creator_account.address, - token_address, - None, - content_hash, - ); - - let token_data = calculate_token_data(&token_hash); - - let signed_zk_sync_mint = SignedZkSyncTx { - tx: ZkSyncTx::MintNFT(Box::new(mint_nft)), - eth_sign_data: None, - }; - - tb.test_txs_batch_success( - &[signed_zk_sync_tx1, signed_zk_sync_mint], - &[ - // Create new account - ( - new_id, - AccountUpdate::Create { - address: new_address, - nonce: Nonce(0), - }, - ), - // Pay for for creating account - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce, - new_nonce: creator_account.nonce + 1, - balance_update: (fee_token_id, balance_from, fee), - }, - ), - // Transfer zero token to new account (TransferToNew operation) - ( - new_id, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: (fee_token_id, balance_to.clone(), balance_to), - }, - ), - // Pay fee for minting nft - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce + 1, - new_nonce: creator_account.nonce + 2, - balance_update: (fee_token_id, BigUint::from(10u32), BigUint::from(0u32)), - }, - ), - // Increment counter of nft tokens for creator - ( - creator_account_id, - AccountUpdate::UpdateBalance { - old_nonce: creator_account.nonce + 2, - new_nonce: creator_account.nonce + 2, - balance_update: (NFT_TOKEN_ID, BigUint::zero(), BigUint::from(1u32)), - }, - ), - // Create special nft storage account - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::Create { - address: *NFT_STORAGE_ACCOUNT_ADDRESS, - nonce: Nonce(0), - }, - ), - // Add Minimum NFT token id to NFT storage account balance - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), - }, - ), - // Increment NFT counter - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: ( - NFT_TOKEN_ID, - BigUint::from(MIN_NFT_TOKEN_ID), - BigUint::from(MIN_NFT_TOKEN_ID + 1), - ), - }, - ), - // Mint nft - ( - creator_account_id, - AccountUpdate::MintNFT { token: nft.clone() }, - ), - // Store part of nft token hash as balance to NFT storage account id - ( - NFT_STORAGE_ACCOUNT_ID, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: (nft.id, BigUint::zero(), token_data), - }, - ), - // Deposit nft token to recipient account - ( - new_id, - AccountUpdate::UpdateBalance { - old_nonce: Nonce(0), - new_nonce: Nonce(0), - balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), - }, - ), - ], - ); -} - -/// Check MINT NFT failure if recipient address does not exist -/// does not correspond to accound_id -#[test] -fn mint_already_created_nft() { - let fee_token_id = TokenId(0); - let fee = BigUint::from(10u32); - - let mut tb = PlasmaTestBuilder::new(); - - let (creator_account_id, creator_account, creator_sk) = tb.add_account(Unlocked); - tb.set_balance(creator_account_id, fee_token_id, 20u32); - - let (to_account_id, mut to_account, _to_sk) = tb.add_account(Locked); - - let nft_token_id = TokenId(MIN_NFT_TOKEN_ID); - to_account.set_balance(nft_token_id, BigUint::from(1u32)); - tb.state.insert_account(to_account_id, to_account.clone()); - let content_hash = H256::default(); - let mint_nft = MintNFT::new_signed( - creator_account_id, - creator_account.address, - content_hash, - to_account.address, - fee, - fee_token_id, - creator_account.nonce, - &creator_sk, - ) - .unwrap(); - - tb.test_tx_fail(mint_nft.into(), "NFT token is already in account") -} +use num::{BigUint, Zero}; +use web3::types::H256; + +use zksync_crypto::params::{ + MIN_NFT_TOKEN_ID, NFT_STORAGE_ACCOUNT_ADDRESS, NFT_STORAGE_ACCOUNT_ID, NFT_TOKEN_ID, +}; +use zksync_types::{ + tokens::NFT, + tx::{calculate_token_address, calculate_token_data, calculate_token_hash}, + AccountUpdate, MintNFT, Nonce, SignedZkSyncTx, TokenId, Transfer, ZkSyncTx, H160, +}; + +use crate::tests::{AccountState::*, PlasmaTestBuilder}; + +/// Check MintNFT operation +#[test] +fn mint_success() { + let fee_token_id = TokenId(0); + let fee = BigUint::from(10u32); + + let mut tb = PlasmaTestBuilder::new(); + + let (creator_account_id, mut creator_account, creator_sk) = tb.add_account(Unlocked); + tb.set_balance(creator_account_id, fee_token_id, 20u32); + + let (to_account_id, to_account, _to_sk) = tb.add_account(Locked); + let content_hash = H256::default(); + let mint_nft = MintNFT::new_signed( + creator_account_id, + creator_account.address, + content_hash, + to_account.address, + fee.clone(), + fee_token_id, + creator_account.nonce, + &creator_sk, + ) + .unwrap(); + + let token_hash = calculate_token_hash(creator_account_id, 0, content_hash); + let token_address = calculate_token_address(&token_hash); + + let balance = BigUint::from(MIN_NFT_TOKEN_ID); + let nft = NFT::new( + TokenId(MIN_NFT_TOKEN_ID), + 0, + creator_account_id, + creator_account.address, + token_address, + None, + content_hash, + ); + + let token_data = calculate_token_data(&token_hash); + tb.test_tx_success( + mint_nft.into(), + &[ + // Pay fee for minting nft + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce, + new_nonce: creator_account.nonce + 1, + balance_update: (fee_token_id, BigUint::from(20u32), BigUint::from(10u32)), + }, + ), + // Increment counter of nft tokens for creator + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce + 1, + new_nonce: creator_account.nonce + 1, + balance_update: (NFT_TOKEN_ID, BigUint::zero(), BigUint::from(1u32)), + }, + ), + // Create special nft storage account + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::Create { + address: *NFT_STORAGE_ACCOUNT_ADDRESS, + nonce: Nonce(0), + }, + ), + // Add Minimum NFT token id to NFT storage account balance + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), + }, + ), + // Increment NFT counter + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: ( + NFT_TOKEN_ID, + BigUint::from(MIN_NFT_TOKEN_ID), + BigUint::from(MIN_NFT_TOKEN_ID + 1), + ), + }, + ), + // Mint nft + ( + creator_account_id, + AccountUpdate::MintNFT { token: nft.clone() }, + ), + // Store part of nft token hash as balance to NFT storage account id + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: to_account.nonce, + new_nonce: to_account.nonce, + balance_update: (nft.id, BigUint::zero(), token_data), + }, + ), + // Deposit nft token to recipient account + ( + to_account_id, + AccountUpdate::UpdateBalance { + old_nonce: to_account.nonce, + new_nonce: to_account.nonce, + balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), + }, + ), + ], + ); + + // Create another nft + creator_account.nonce.0 += 1; + let (to_account_id, to_account, _to_sk) = tb.add_account(Locked); + let content_hash = H256::default(); + let mint_nft = MintNFT::new_signed( + creator_account_id, + creator_account.address, + content_hash, + to_account.address, + fee.clone(), + fee_token_id, + creator_account.nonce, + &creator_sk, + ) + .unwrap(); + + let token_hash = calculate_token_hash(creator_account_id, 1, content_hash); + let token_address = calculate_token_address(&token_hash); + let nft = NFT::new( + TokenId(MIN_NFT_TOKEN_ID + 1), + 1, + creator_account_id, + creator_account.address, + token_address, + None, + content_hash, + ); + + let token_data = calculate_token_data(&token_hash); + tb.test_tx_success( + mint_nft.into(), + &[ + // Pay fee for minting nft + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce, + new_nonce: creator_account.nonce + 1, + balance_update: (fee_token_id, fee, BigUint::zero()), + }, + ), + // Increment counter of nft tokens for creator + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce + 1, + new_nonce: creator_account.nonce + 1, + balance_update: (NFT_TOKEN_ID, BigUint::from(1u32), BigUint::from(2u32)), + }, + ), + // Increment NFT counter + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: ( + NFT_TOKEN_ID, + BigUint::from(MIN_NFT_TOKEN_ID + 1), + BigUint::from(MIN_NFT_TOKEN_ID + 2), + ), + }, + ), + // Mint nft + ( + creator_account_id, + AccountUpdate::MintNFT { token: nft.clone() }, + ), + // Store part of nft token hash as balance to NFT storage account id + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: to_account.nonce, + new_nonce: to_account.nonce, + balance_update: (nft.id, BigUint::zero(), token_data), + }, + ), + // Deposit nft token to recipient account + ( + to_account_id, + AccountUpdate::UpdateBalance { + old_nonce: to_account.nonce, + new_nonce: to_account.nonce, + balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), + }, + ), + ], + ) +} + +#[test] +fn mint_token_to_new_account() { + let fee_token_id = TokenId(0); + let fee = BigUint::from(10u32); + let zero_amount = BigUint::from(0u32); + + let balance_from = BigUint::from(20u32); + + let balance_to = BigUint::from(0u64); + + let mut tb = PlasmaTestBuilder::new(); + + let (creator_account_id, creator_account, sk) = tb.add_account(Unlocked); + tb.set_balance(creator_account_id, fee_token_id, balance_from.clone()); + + let new_address = H160::random(); + + let transfer_1 = Transfer::new_signed( + creator_account_id, + creator_account.address, + new_address, + fee_token_id, + zero_amount, + fee.clone(), + creator_account.nonce, + Default::default(), + &sk, + ) + .unwrap(); + + let signed_zk_sync_tx1 = SignedZkSyncTx { + tx: ZkSyncTx::Transfer(Box::new(transfer_1)), + eth_sign_data: None, + }; + + let new_id = tb.state.get_free_account_id(); + + let content_hash = H256::default(); + let mint_nft = MintNFT::new_signed( + creator_account_id, + creator_account.address, + content_hash, + new_address, + fee.clone(), + fee_token_id, + creator_account.nonce, + &sk, + ) + .unwrap(); + + let token_hash = calculate_token_hash(creator_account_id, 0, content_hash); + let token_address = calculate_token_address(&token_hash); + let balance = BigUint::from(MIN_NFT_TOKEN_ID); + let nft = NFT::new( + TokenId(MIN_NFT_TOKEN_ID), + 0, + creator_account_id, + creator_account.address, + token_address, + None, + content_hash, + ); + + let token_data = calculate_token_data(&token_hash); + + let signed_zk_sync_mint = SignedZkSyncTx { + tx: ZkSyncTx::MintNFT(Box::new(mint_nft)), + eth_sign_data: None, + }; + + tb.test_txs_batch_success( + &[signed_zk_sync_tx1, signed_zk_sync_mint], + &[ + // Create new account + ( + new_id, + AccountUpdate::Create { + address: new_address, + nonce: Nonce(0), + }, + ), + // Pay for for creating account + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce, + new_nonce: creator_account.nonce + 1, + balance_update: (fee_token_id, balance_from, fee), + }, + ), + // Transfer zero token to new account (TransferToNew operation) + ( + new_id, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: (fee_token_id, balance_to.clone(), balance_to), + }, + ), + // Pay fee for minting nft + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce + 1, + new_nonce: creator_account.nonce + 2, + balance_update: (fee_token_id, BigUint::from(10u32), BigUint::from(0u32)), + }, + ), + // Increment counter of nft tokens for creator + ( + creator_account_id, + AccountUpdate::UpdateBalance { + old_nonce: creator_account.nonce + 2, + new_nonce: creator_account.nonce + 2, + balance_update: (NFT_TOKEN_ID, BigUint::zero(), BigUint::from(1u32)), + }, + ), + // Create special nft storage account + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::Create { + address: *NFT_STORAGE_ACCOUNT_ADDRESS, + nonce: Nonce(0), + }, + ), + // Add Minimum NFT token id to NFT storage account balance + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: (NFT_TOKEN_ID, BigUint::zero(), balance), + }, + ), + // Increment NFT counter + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: ( + NFT_TOKEN_ID, + BigUint::from(MIN_NFT_TOKEN_ID), + BigUint::from(MIN_NFT_TOKEN_ID + 1), + ), + }, + ), + // Mint nft + ( + creator_account_id, + AccountUpdate::MintNFT { token: nft.clone() }, + ), + // Store part of nft token hash as balance to NFT storage account id + ( + NFT_STORAGE_ACCOUNT_ID, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: (nft.id, BigUint::zero(), token_data), + }, + ), + // Deposit nft token to recipient account + ( + new_id, + AccountUpdate::UpdateBalance { + old_nonce: Nonce(0), + new_nonce: Nonce(0), + balance_update: (nft.id, BigUint::zero(), BigUint::from(1u32)), + }, + ), + ], + ); +} + +/// Check MINT NFT failure if recipient address does not exist +/// does not correspond to accound_id +#[test] +fn mint_already_created_nft() { + let fee_token_id = TokenId(0); + let fee = BigUint::from(10u32); + + let mut tb = PlasmaTestBuilder::new(); + + let (creator_account_id, creator_account, creator_sk) = tb.add_account(Unlocked); + tb.set_balance(creator_account_id, fee_token_id, 20u32); + + let (to_account_id, mut to_account, _to_sk) = tb.add_account(Locked); + + let nft_token_id = TokenId(MIN_NFT_TOKEN_ID); + to_account.set_balance(nft_token_id, BigUint::from(1u32)); + tb.state.insert_account(to_account_id, to_account.clone()); + let content_hash = H256::default(); + let mint_nft = MintNFT::new_signed( + creator_account_id, + creator_account.address, + content_hash, + to_account.address, + fee, + fee_token_id, + creator_account.nonce, + &creator_sk, + ) + .unwrap(); + + tb.test_tx_fail(mint_nft.into(), "NFT token is already in account") +} + +/// Check MINT NFT failure if nonce mismathced +#[test] +fn nonce_mismatched() { + let fee_token_id = TokenId(0); + let fee = BigUint::from(10u32); + + let mut tb = PlasmaTestBuilder::new(); + + let (creator_account_id, creator_account, creator_sk) = tb.add_account(Unlocked); + tb.set_balance(creator_account_id, fee_token_id, 20u32); + + let (to_account_id, mut to_account, _to_sk) = tb.add_account(Locked); + + let nft_token_id = TokenId(MIN_NFT_TOKEN_ID); + to_account.set_balance(nft_token_id, BigUint::from(1u32)); + tb.state.insert_account(to_account_id, to_account.clone()); + let content_hash = H256::default(); + let mint_nft = MintNFT::new_signed( + creator_account_id, + creator_account.address, + content_hash, + to_account.address, + fee, + fee_token_id, + creator_account.nonce + 1, + &creator_sk, + ) + .unwrap(); + + tb.test_tx_fail(mint_nft.into(), "Nonce mismatch") +} From 498ebc3dac4c4424839f5bd28c44b7c34f3af41c Mon Sep 17 00:00:00 2001 From: deniallugo Date: Fri, 25 Jun 2021 15:50:02 +0300 Subject: [PATCH 6/7] Fix test Signed-off-by: deniallugo --- core/lib/state/src/tests/operations/mint_nft.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/lib/state/src/tests/operations/mint_nft.rs b/core/lib/state/src/tests/operations/mint_nft.rs index 7b600b767d..9e24a9779f 100644 --- a/core/lib/state/src/tests/operations/mint_nft.rs +++ b/core/lib/state/src/tests/operations/mint_nft.rs @@ -264,7 +264,7 @@ fn mint_token_to_new_account() { new_address, fee.clone(), fee_token_id, - creator_account.nonce, + creator_account.nonce + 1, &sk, ) .unwrap(); From 5af42feda22069e0a3084cf05fd2abc1dcb51c54 Mon Sep 17 00:00:00 2001 From: deniallugo Date: Mon, 28 Jun 2021 17:26:43 +0300 Subject: [PATCH 7/7] Fix after merge Signed-off-by: deniallugo --- .github/workflows/ci.yml | 12 +- Cargo.lock | 23 + Cargo.toml | 1 + bin/api_docs | 9 + changelog/core.md | 8 + changelog/infrastructure.md | 3 +- changelog/js-sdk.md | 10 + contracts/scripts/upgrade-testnet.ts | 3 +- core/bin/data_restore/src/tests/mod.rs | 1 + core/bin/data_restore/src/tree_state.rs | 1 + core/bin/zksync_api/Cargo.toml | 1 + core/bin/zksync_api/benches/criterion/lib.rs | 40 +- .../src/api_server/event_notify/state.rs | 1 + core/bin/zksync_api/src/api_server/helpers.rs | 13 +- core/bin/zksync_api/src/api_server/mod.rs | 3 - .../{v1 => forced_exit_requests}/error.rs | 76 +- .../rest/forced_exit_requests/mod.rs | 10 +- .../rest/forced_exit_requests/v01.rs | 23 +- .../zksync_api/src/api_server/rest/helpers.rs | 4 +- .../bin/zksync_api/src/api_server/rest/mod.rs | 12 - .../src/api_server/rest/v01/api_decl.rs | 13 +- .../src/api_server/rest/v01/api_impl.rs | 6 +- .../src/api_server/rest/v02/account.rs | 641 ++++ .../src/api_server/rest/v02/block.rs | 286 ++ .../src/api_server/rest/v02/config.rs | 58 +- .../src/api_server/rest/v02/error.rs | 188 +- .../zksync_api/src/api_server/rest/v02/fee.rs | 135 + .../zksync_api/src/api_server/rest/v02/mod.rs | 39 +- .../src/api_server/rest/v02/paginate_impl.rs | 279 ++ .../src/api_server/rest/v02/paginate_trait.rs | 32 + .../src/api_server/rest/v02/response.rs | 93 +- .../src/api_server/rest/v02/status.rs | 138 + .../api_server/rest/{v1 => v02}/test_utils.rs | 150 +- .../src/api_server/rest/v02/token.rs | 413 ++ .../src/api_server/rest/v02/transaction.rs | 419 ++ .../src/api_server/rest/v1/accounts/mod.rs | 327 -- .../src/api_server/rest/v1/accounts/tests.rs | 517 --- .../src/api_server/rest/v1/accounts/types.rs | 229 -- .../src/api_server/rest/v1/blocks.rs | 286 -- .../src/api_server/rest/v1/config.rs | 96 - .../zksync_api/src/api_server/rest/v1/mod.rs | 56 - .../src/api_server/rest/v1/operations.rs | 283 -- .../src/api_server/rest/v1/search.rs | 116 - .../src/api_server/rest/v1/tokens.rs | 288 -- .../src/api_server/rest/v1/transactions.rs | 978 ----- .../src/api_server/rpc_server/rpc_impl.rs | 57 +- .../src/api_server/rpc_server/rpc_trait.rs | 19 +- .../src/api_server/rpc_server/types.rs | 43 +- .../zksync_api/src/api_server/tx_sender.rs | 42 +- .../zksync_api/src/bin/dev-ticker-server.rs | 97 +- core/bin/zksync_api/src/core_api_client.rs | 42 +- core/bin/zksync_api/src/fee_ticker/mod.rs | 4 +- core/bin/zksync_api/src/fee_ticker/tests.rs | 60 +- .../src/fee_ticker/ticker_api/coingecko.rs | 70 +- .../fee_ticker/ticker_api/coinmarkercap.rs | 9 +- .../src/fee_ticker/ticker_api/mod.rs | 4 +- .../src/utils/block_details_cache.rs | 2 +- core/bin/zksync_core/Cargo.toml | 1 + core/bin/zksync_core/src/eth_watch/mod.rs | 206 +- core/bin/zksync_core/src/eth_watch/tests.rs | 132 +- core/bin/zksync_core/src/private_api.rs | 63 +- .../bin/zksync_core/src/state_keeper/tests.rs | 1 + .../zksync_eth_sender/src/tests/test_data.rs | 1 + .../src/core_interaction_wrapper.rs | 3 +- .../src/tests/prover_server.rs | 1 + core/lib/api_client/Cargo.toml | 8 +- .../api_client/src/rest/{v1 => }/client.rs | 25 - .../lib/api_client/src/rest/{v1 => }/error.rs | 0 .../src/rest/forced_exit_requests/mod.rs | 3 +- core/lib/api_client/src/rest/mod.rs | 4 +- core/lib/api_client/src/rest/v02/account.rs | 59 + core/lib/api_client/src/rest/v02/block.rs | 39 + core/lib/api_client/src/rest/v02/config.rs | 10 + core/lib/api_client/src/rest/v02/fee.rs | 39 + core/lib/api_client/src/rest/v02/mod.rs | 9 + core/lib/api_client/src/rest/v02/status.rs | 10 + core/lib/api_client/src/rest/v02/token.rs | 33 + .../api_client/src/rest/v02/transaction.rs | 59 + core/lib/api_client/src/rest/v1/accounts.rs | 365 -- core/lib/api_client/src/rest/v1/blocks.rs | 77 - core/lib/api_client/src/rest/v1/config.rs | 35 - core/lib/api_client/src/rest/v1/mod.rs | 180 - core/lib/api_client/src/rest/v1/operations.rs | 123 - core/lib/api_client/src/rest/v1/search.rs | 67 - core/lib/api_client/src/rest/v1/tokens.rs | 51 - .../api_client/src/rest/v1/transactions.rs | 210 - core/lib/api_types/Cargo.toml | 25 + core/lib/api_types/src/lib.rs | 16 + core/lib/api_types/src/v02/account.rs | 36 + core/lib/api_types/src/v02/block.rs | 25 + core/lib/api_types/src/v02/fee.rs | 82 + core/lib/api_types/src/v02/mod.rs | 51 + core/lib/api_types/src/v02/pagination.rs | 139 + core/lib/api_types/src/v02/status.rs | 11 + core/lib/api_types/src/v02/token.rs | 35 + core/lib/api_types/src/v02/transaction.rs | 245 ++ core/lib/storage/Cargo.toml | 1 + .../down.sql | 5 + .../up.sql | 42 + .../down.sql | 17 + .../up.sql | 17 + core/lib/storage/sqlx-data.json | 1667 +++++--- core/lib/storage/src/chain/account/mod.rs | 66 +- core/lib/storage/src/chain/account/records.rs | 11 + .../lib/storage/src/chain/block/conversion.rs | 57 +- core/lib/storage/src/chain/block/mod.rs | 314 +- core/lib/storage/src/chain/block/records.rs | 14 +- core/lib/storage/src/chain/mempool/mod.rs | 120 +- core/lib/storage/src/chain/mempool/records.rs | 6 + core/lib/storage/src/chain/operations/mod.rs | 14 +- .../storage/src/chain/operations/records.rs | 8 +- .../src/chain/operations_ext/conversion.rs | 143 + .../storage/src/chain/operations_ext/mod.rs | 914 +++-- .../src/chain/operations_ext/records.rs | 31 +- core/lib/storage/src/event/mod.rs | 2 +- core/lib/storage/src/tests/chain/accounts.rs | 34 +- core/lib/storage/src/tests/chain/block.rs | 251 +- core/lib/storage/src/tests/chain/mempool.rs | 28 + .../lib/storage/src/tests/chain/operations.rs | 50 + .../src/tests/chain/operations_ext/mod.rs | 1039 ++--- .../src/tests/chain/operations_ext/setup.rs | 59 +- core/lib/storage/src/tokens/mod.rs | 136 +- core/lib/types/src/fee.rs | 24 +- core/lib/types/src/lib.rs | 2 +- core/lib/types/src/priority_ops/mod.rs | 18 + core/lib/types/src/priority_ops/tests.rs | 2 + core/lib/types/src/tests/utils.rs | 1 + .../types/src/tx/primitives/eth_signature.rs | 12 + core/lib/types/src/tx/primitives/tx_hash.rs | 18 +- core/lib/utils/src/serde_wrappers.rs | 2 +- core/tests/loadtest/src/api/data_pool.rs | 17 - core/tests/loadtest/src/api/mod.rs | 2 - .../loadtest/src/api/rest_api_v1_tests.rs | 280 -- core/tests/ts-tests/tests/api.test.ts | 149 +- core/tests/ts-tests/tests/change-pub-key.ts | 4 + core/tests/ts-tests/tests/main.test.ts | 45 +- core/tests/ts-tests/tests/misc.ts | 45 +- core/tests/ts-tests/tests/tester.ts | 11 +- core/tests/ts-tests/tests/transfer.ts | 38 +- .../ts-tests/tests/withdrawal-helpers.test.ts | 11 +- .../ts-tests/tests/withdrawal-helpers.ts | 6 +- docker-compose-runner.yml | 2 + docker-compose.yml | 2 + docker/dev-ticker/Dockerfile | 1 + docker/zk-environment/Dockerfile | 6 +- infrastructure/api-docs/.gitignore | 4 + .../api-docs/blueprint/groups/accounts.apib | 58 + .../api-docs/blueprint/groups/batches.apib | 34 + .../api-docs/blueprint/groups/blocks.apib | 55 + .../api-docs/blueprint/groups/config.apib | 13 + .../api-docs/blueprint/groups/fee.apib | 36 + .../api-docs/blueprint/groups/status.apib | 13 + .../api-docs/blueprint/groups/tokens.apib | 51 + .../blueprint/groups/transactions.apib | 49 + .../api-docs/blueprint/template.apib | 48 + .../api-docs/blueprint/types/accounts.apib | 12 + .../api-docs/blueprint/types/batches.apib | 15 + .../api-docs/blueprint/types/blocks.apib | 9 + .../api-docs/blueprint/types/config.apib | 6 + .../api-docs/blueprint/types/fee.apib | 35 + .../api-docs/blueprint/types/pagination.apib | 11 + .../api-docs/blueprint/types/receipt.apib | 20 + .../api-docs/blueprint/types/status.apib | 11 + .../api-docs/blueprint/types/tokens.apib | 17 + .../blueprint/types/transactions.apib | 174 + infrastructure/api-docs/dredd.yml | 32 + infrastructure/api-docs/package.json | 18 + infrastructure/api-docs/src/compile.ts | 159 + infrastructure/api-docs/src/index.ts | 40 + infrastructure/api-docs/src/utils.ts | 14 + infrastructure/api-docs/tsconfig.json | 15 + infrastructure/zk/src/api-docs.ts | 10 + infrastructure/zk/src/index.ts | 2 + infrastructure/zk/src/run/run.ts | 1 + infrastructure/zk/src/test/integration.ts | 19 + package.json | 4 +- sdk/zksync.js/src/batch-builder.ts | 12 +- sdk/zksync.js/src/index.ts | 2 + sdk/zksync.js/src/provider-interface.ts | 53 + sdk/zksync.js/src/provider.ts | 52 +- sdk/zksync.js/src/rest-provider.ts | 613 +++ sdk/zksync.js/src/signer.ts | 4 +- sdk/zksync.js/src/types.ts | 236 +- sdk/zksync.js/src/utils.ts | 6 +- sdk/zksync.js/src/wallet.ts | 44 +- yarn.lock | 3394 ++++++++--------- 186 files changed, 11962 insertions(+), 8431 deletions(-) create mode 100755 bin/api_docs rename core/bin/zksync_api/src/api_server/rest/{v1 => forced_exit_requests}/error.rs (56%) create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/account.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/block.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/fee.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/paginate_impl.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/paginate_trait.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/status.rs rename core/bin/zksync_api/src/api_server/rest/{v1 => v02}/test_utils.rs (80%) create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/token.rs create mode 100644 core/bin/zksync_api/src/api_server/rest/v02/transaction.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/accounts/mod.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/blocks.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/config.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/mod.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/operations.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/search.rs delete mode 100644 core/bin/zksync_api/src/api_server/rest/v1/tokens.rs rename core/lib/api_client/src/rest/{v1 => }/client.rs (81%) rename core/lib/api_client/src/rest/{v1 => }/error.rs (100%) create mode 100644 core/lib/api_client/src/rest/v02/account.rs create mode 100644 core/lib/api_client/src/rest/v02/block.rs create mode 100644 core/lib/api_client/src/rest/v02/config.rs create mode 100644 core/lib/api_client/src/rest/v02/fee.rs create mode 100644 core/lib/api_client/src/rest/v02/mod.rs create mode 100644 core/lib/api_client/src/rest/v02/status.rs create mode 100644 core/lib/api_client/src/rest/v02/token.rs create mode 100644 core/lib/api_client/src/rest/v02/transaction.rs delete mode 100644 core/lib/api_client/src/rest/v1/blocks.rs delete mode 100644 core/lib/api_client/src/rest/v1/config.rs delete mode 100644 core/lib/api_client/src/rest/v1/mod.rs delete mode 100644 core/lib/api_client/src/rest/v1/operations.rs delete mode 100644 core/lib/api_client/src/rest/v1/search.rs delete mode 100644 core/lib/api_client/src/rest/v1/tokens.rs create mode 100644 core/lib/api_types/Cargo.toml create mode 100644 core/lib/api_types/src/lib.rs create mode 100644 core/lib/api_types/src/v02/account.rs create mode 100644 core/lib/api_types/src/v02/block.rs create mode 100644 core/lib/api_types/src/v02/fee.rs create mode 100644 core/lib/api_types/src/v02/mod.rs create mode 100644 core/lib/api_types/src/v02/pagination.rs create mode 100644 core/lib/api_types/src/v02/status.rs create mode 100644 core/lib/api_types/src/v02/token.rs create mode 100644 core/lib/api_types/src/v02/transaction.rs create mode 100644 core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/down.sql create mode 100644 core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/up.sql create mode 100644 core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/down.sql create mode 100644 core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/up.sql create mode 100644 core/lib/storage/src/chain/operations_ext/conversion.rs delete mode 100644 core/tests/loadtest/src/api/rest_api_v1_tests.rs create mode 100755 infrastructure/api-docs/.gitignore create mode 100755 infrastructure/api-docs/blueprint/groups/accounts.apib create mode 100755 infrastructure/api-docs/blueprint/groups/batches.apib create mode 100755 infrastructure/api-docs/blueprint/groups/blocks.apib create mode 100755 infrastructure/api-docs/blueprint/groups/config.apib create mode 100755 infrastructure/api-docs/blueprint/groups/fee.apib create mode 100755 infrastructure/api-docs/blueprint/groups/status.apib create mode 100755 infrastructure/api-docs/blueprint/groups/tokens.apib create mode 100755 infrastructure/api-docs/blueprint/groups/transactions.apib create mode 100755 infrastructure/api-docs/blueprint/template.apib create mode 100755 infrastructure/api-docs/blueprint/types/accounts.apib create mode 100755 infrastructure/api-docs/blueprint/types/batches.apib create mode 100755 infrastructure/api-docs/blueprint/types/blocks.apib create mode 100755 infrastructure/api-docs/blueprint/types/config.apib create mode 100755 infrastructure/api-docs/blueprint/types/fee.apib create mode 100644 infrastructure/api-docs/blueprint/types/pagination.apib create mode 100755 infrastructure/api-docs/blueprint/types/receipt.apib create mode 100755 infrastructure/api-docs/blueprint/types/status.apib create mode 100755 infrastructure/api-docs/blueprint/types/tokens.apib create mode 100755 infrastructure/api-docs/blueprint/types/transactions.apib create mode 100644 infrastructure/api-docs/dredd.yml create mode 100644 infrastructure/api-docs/package.json create mode 100644 infrastructure/api-docs/src/compile.ts create mode 100644 infrastructure/api-docs/src/index.ts create mode 100644 infrastructure/api-docs/src/utils.ts create mode 100644 infrastructure/api-docs/tsconfig.json create mode 100644 infrastructure/zk/src/api-docs.ts create mode 100644 sdk/zksync.js/src/provider-interface.ts create mode 100644 sdk/zksync.js/src/rest-provider.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb408892b2..6df3dc2c34 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,8 +65,8 @@ jobs: ci_run zk db basic-setup ci_run zk run yarn - - name: liquidity-token - run: docker-compose -f docker-compose-runner.yml restart dev-liquidity-token-watcher + - name: restart dev-liquidity-token-watcher and dev-ticker + run: docker-compose -f docker-compose-runner.yml restart dev-liquidity-token-watcher dev-ticker - name: contracts-unit-tests run: ci_run zk test contracts @@ -102,8 +102,8 @@ jobs: ci_run zk dummy-prover enable --no-redeploy ci_run zk init - - name: liquidity-token - run: docker-compose -f docker-compose-runner.yml restart dev-liquidity-token-watcher + - name: restart dev-liquidity-token-watcher and dev-ticker + run: docker-compose -f docker-compose-runner.yml restart dev-liquidity-token-watcher dev-ticker - name: run-services run: | @@ -116,7 +116,9 @@ jobs: run: ci_run zk test i server - name: integration-api - run: ci_run zk test i api + run: | + ci_run zk test i api + ci_run zk test i api-docs - name: integration-zcli run: ci_run zk test i zcli diff --git a/Cargo.lock b/Cargo.lock index a70e18a4a3..83948cbbb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6178,6 +6178,7 @@ dependencies = [ "vlog", "web3", "zksync_api_client", + "zksync_api_types", "zksync_balancer", "zksync_config", "zksync_contracts", @@ -6204,6 +6205,26 @@ dependencies = [ "serde", "serde_json", "thiserror", + "zksync_api_types", + "zksync_config", + "zksync_crypto", + "zksync_types", + "zksync_utils", +] + +[[package]] +name = "zksync_api_types" +version = "1.0.0" +dependencies = [ + "bigdecimal", + "chrono", + "either", + "hex", + "num", + "serde", + "serde_json", + "thiserror", + "zksync_config", "zksync_crypto", "zksync_types", "zksync_utils", @@ -6294,6 +6315,7 @@ dependencies = [ "tokio 0.2.22", "vlog", "web3", + "zksync_api_types", "zksync_balancer", "zksync_config", "zksync_contracts", @@ -6642,6 +6664,7 @@ dependencies = [ "thiserror", "tokio 0.2.22", "vlog", + "zksync_api_types", "zksync_basic_types", "zksync_config", "zksync_crypto", diff --git a/Cargo.toml b/Cargo.toml index 309373b66b..f716d9ed30 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ members = [ "core/lib/contracts", "core/lib/api_client", "core/lib/notifier", + "core/lib/api_types", "core/lib/balancer", # Test infrastructure diff --git a/bin/api_docs b/bin/api_docs new file mode 100755 index 0000000000..d5702463fb --- /dev/null +++ b/bin/api_docs @@ -0,0 +1,9 @@ +#!/bin/bash + +if [ -z "$1" ]; then + cd $ZKSYNC_HOME + yarn && yarn api-docs build +else + # can't start this with yarn since it has quirks with `--` as an argument + node -- $ZKSYNC_HOME/infrastructure/api-docs/build/index.js "$@" +fi diff --git a/changelog/core.md b/changelog/core.md index bfbd750fc5..31c4f03d01 100644 --- a/changelog/core.md +++ b/changelog/core.md @@ -6,6 +6,8 @@ All notable changes to the core components will be documented in this file. ### Removed +- (`api_server`): REST API v1.0. + ### Changed - (`loadtest`): `zksync_fee` has been moved to `[main_wallet]` section from the `[network]` section. @@ -16,6 +18,7 @@ All notable changes to the core components will be documented in this file. - (`api_server`): Make `submit_txs_batch` send only one signature request. - Fast withdrawals now can trigger aggregated block execution. - Replaced `anyhow` errors with typed errors in `lib/state`, `lib/crypto` and `lib/types`. +- (`fee-ticker`): Batch fee now includes `zkp_fee` and `gas_fee`. ### Added @@ -30,6 +33,11 @@ All notable changes to the core components will be documented in this file. - (`api_server`): Support for accounts that don't have to pay fees (e.g. network service accounts) was added. - Added `BlockMetadata` structure and corresponding table to track block data that is not related to protocol. - (`block_revert`): CLI that calls `revertBlocks` smart contract function and updates the database respectively. +- (`api_server`): Added REST API v0.2. +- (`api_client`): Client for REST API v0.2. +- (`api_types`): Crate for storing types that are used in API. +- Added hashes for batches and additional hashes for priority operations. +- Added `ForcedExit` fee type to REST API v0.2 and JSON RPC API. ### Fixed diff --git a/changelog/infrastructure.md b/changelog/infrastructure.md index 097a50e20b..b43d83346d 100644 --- a/changelog/infrastructure.md +++ b/changelog/infrastructure.md @@ -15,8 +15,9 @@ components, the logs will have the following format: ### Added +- (`api-docs`): tool for generating and testing API documentation. Docs are generated from a bunch of .apib files where + API endpoints and their inputs/outputs are defined. - (`token_list_manager`): CLI for updating to new version of a previously saved list of trusted tokens. - - (`loadnext`): Crate, a new implementation of the loadtest for zkSync. ### Fixed diff --git a/changelog/js-sdk.md b/changelog/js-sdk.md index 35084d03a8..ca73ec11f7 100644 --- a/changelog/js-sdk.md +++ b/changelog/js-sdk.md @@ -9,9 +9,19 @@ All notable changes to `zksync.js` will be documented in this file. - Methods for working with NFTs. You can read more [here](https://zksync.io/dev/nfts.html). - Methods for working with atomic swaps/limit orders. You can read more [here](https://zksync.io/dev/swaps.html). +- `RestProvider` class, that is used for queriing REST API v0.2. +- `SyncProvider` interface: common interface for API v0.2 `RestProvider` and JSON RPC `Provider`. +- Types for REST API v0.2. + ### Changed - `zksync-crypto` to support atomic swaps/limit orders functionality. +- Changed type of `provider` field in `Wallet` class from `Provider` to `SyncProvider`. +- `ForcedExit` fee type is used for `ForcedExit` transactions instead of `Withdraw` fee type. + +### Deprecated + +### Fixed ## Version 0.10.9 (13.04.2021) diff --git a/contracts/scripts/upgrade-testnet.ts b/contracts/scripts/upgrade-testnet.ts index 811318ef08..d4417d531a 100644 --- a/contracts/scripts/upgrade-testnet.ts +++ b/contracts/scripts/upgrade-testnet.ts @@ -26,7 +26,8 @@ async function main() { }); parser.addArgument('--initArgs', { required: false, - help: 'Upgrade function parameters comma-separated, RLP serialized in hex (Governance,Verifier,ZkSync): 0xaa..aa,0xbb..bb,0xcc..c or zero by default.', + help: + 'Upgrade function parameters comma-separated, RLP serialized in hex (Governance,Verifier,ZkSync): 0xaa..aa,0xbb..bb,0xcc..c or zero by default.', defaultValue: '0x,0x,0x' }); parser.addArgument('--cancelPreviousUpgrade', { diff --git a/core/bin/data_restore/src/tests/mod.rs b/core/bin/data_restore/src/tests/mod.rs index c626730d5a..ee484e64f2 100644 --- a/core/bin/data_restore/src/tests/mod.rs +++ b/core/bin/data_restore/src/tests/mod.rs @@ -85,6 +85,7 @@ fn create_deposit(from: Address, to: Address, amount: u32) -> ExecutedOperations deadline_block: 0, eth_hash: H256::zero(), eth_block: 0, + eth_block_index: None, }; let executed_deposit_op = ExecutedPriorityOp { priority_op: priority_operation, diff --git a/core/bin/data_restore/src/tree_state.rs b/core/bin/data_restore/src/tree_state.rs index 30ecb7be2d..15973a606e 100644 --- a/core/bin/data_restore/src/tree_state.rs +++ b/core/bin/data_restore/src/tree_state.rs @@ -461,6 +461,7 @@ impl TreeState { deadline_block: 0, eth_hash: H256::zero(), eth_block: 0, + eth_block_index: None, }, block_index, created_at: chrono::Utc::now(), diff --git a/core/bin/zksync_api/Cargo.toml b/core/bin/zksync_api/Cargo.toml index 32bc2cdd5a..20666de83f 100644 --- a/core/bin/zksync_api/Cargo.toml +++ b/core/bin/zksync_api/Cargo.toml @@ -25,6 +25,7 @@ zksync_contracts = { path = "../../lib/contracts", version = "1.0" } zksync_eth_client = { path = "../../lib/eth_client", version = "1.0" } zksync_eth_signer = { path = "../../lib/eth_signer", version = "1.0" } zksync_api_client = { path = "../../lib/api_client", version = "0.1" } +zksync_api_types = { path = "../../lib/api_types", version = "1.0" } zksync_prometheus_exporter = { path = "../../lib/prometheus_exporter", version = "1.0" } zksync_balancer = { path = "../../lib/balancer", version = "1.0" } zksync_gateway_watcher = { path = "../../lib/gateway_watcher", version = "1.0" } diff --git a/core/bin/zksync_api/benches/criterion/lib.rs b/core/bin/zksync_api/benches/criterion/lib.rs index 04f5b7bbcd..dfd76b9d85 100644 --- a/core/bin/zksync_api/benches/criterion/lib.rs +++ b/core/bin/zksync_api/benches/criterion/lib.rs @@ -2,41 +2,43 @@ use criterion::{criterion_group, criterion_main, Criterion}; use ethabi::Address; use reqwest::{blocking::Client, StatusCode}; -use zksync_api_client::rest::v1::{IncomingTxBatchForFee, IncomingTxForFee}; -use zksync_types::{TokenLike, TxFeeTypes}; +use zksync_api_types::v02::fee::{ + ApiTxFeeTypes, BatchFeeRequest, TxFeeRequest, TxInBatchFeeRequest, +}; +use zksync_types::{TokenId, TokenLike}; -fn generate_transactions(number: usize) -> IncomingTxBatchForFee { - let mut tx_types = vec![]; - let mut addresses = vec![]; +fn generate_transactions(number: usize) -> BatchFeeRequest { + let mut transactions = Vec::new(); for _ in 0..number { - tx_types.push(TxFeeTypes::Withdraw); - addresses.push(Address::random()); + transactions.push(TxInBatchFeeRequest { + tx_type: ApiTxFeeTypes::Withdraw, + address: Address::random(), + }); } - IncomingTxBatchForFee { - tx_types, - addresses, - token_like: TokenLike::Symbol("wBTC".to_string()), + BatchFeeRequest { + transactions, + token_like: TokenLike::Id(TokenId(2)), // id of wBTC on localhost } } -fn get_txs_batch_fee(client: Client, url: String, transaction: IncomingTxBatchForFee) { +fn get_txs_batch_fee(client: Client, url: String, batch_fee_request: BatchFeeRequest) { let res = client - .post(format!("{}/api/v1/transactions/fee/batch", url).as_str()) - .json(&transaction) + .post(format!("{}/api/v0.2/fee/batch", url).as_str()) + .json(&batch_fee_request) .send() .unwrap(); assert_eq!(res.status(), StatusCode::OK) } fn get_txs_fee(client: Client, url: String) { - let transaction = IncomingTxForFee { - tx_type: TxFeeTypes::Withdraw, + let transaction = TxFeeRequest { + tx_type: ApiTxFeeTypes::Withdraw, address: Address::random(), - token_like: TokenLike::Symbol("wBTC".to_string()), + token_like: TokenLike::Id(TokenId(2)), // id of wBTC on localhost }; let res = client - .post(format!("{}/api/v1/transactions/fee", url).as_str()) + .post(format!("{}/api/v0.2/fee", url).as_str()) .json(&transaction) .send() .unwrap(); @@ -47,7 +49,7 @@ fn bench_fee(c: &mut Criterion) { let url = std::env::var("API_REST_URL").unwrap(); let client = reqwest::blocking::Client::new(); let transaction = generate_transactions(100); - c.bench_function("get_txs_batch_fee_new_version", |b| { + c.bench_function("get_txs_batch_fee", |b| { b.iter(|| get_txs_batch_fee(client.clone(), url.clone(), transaction.clone())) }); c.bench_function("get_txs_fee", |b| { diff --git a/core/bin/zksync_api/src/api_server/event_notify/state.rs b/core/bin/zksync_api/src/api_server/event_notify/state.rs index 9d5303b098..a1b9dac3a2 100644 --- a/core/bin/zksync_api/src/api_server/event_notify/state.rs +++ b/core/bin/zksync_api/src/api_server/event_notify/state.rs @@ -204,6 +204,7 @@ impl NotifierState { .account_schema() .last_committed_state_for_account(id) .await? + .1 } ActionType::VERIFY => { storage diff --git a/core/bin/zksync_api/src/api_server/helpers.rs b/core/bin/zksync_api/src/api_server/helpers.rs index af8958f58e..757c7b21d8 100644 --- a/core/bin/zksync_api/src/api_server/helpers.rs +++ b/core/bin/zksync_api/src/api_server/helpers.rs @@ -5,7 +5,7 @@ // External uses // Workspace uses -use zksync_types::{tx::TxHash, H256}; +use zksync_types::H256; use zksync_utils::remove_prefix; // Local uses @@ -20,14 +20,3 @@ pub fn try_parse_hash(query: &str) -> Result { Ok(H256::from_slice(&slice)) } - -pub fn try_parse_tx_hash(query: &str) -> Result { - const HASH_SIZE: usize = 32; // 32 bytes - - let mut slice = [0_u8; HASH_SIZE]; - - let tx_hex = remove_prefix(query); - hex::decode_to_slice(&tx_hex, &mut slice)?; - - Ok(TxHash::from_slice(&slice).unwrap()) -} diff --git a/core/bin/zksync_api/src/api_server/mod.rs b/core/bin/zksync_api/src/api_server/mod.rs index 0bfefdd530..b42c4a95ad 100644 --- a/core/bin/zksync_api/src/api_server/mod.rs +++ b/core/bin/zksync_api/src/api_server/mod.rs @@ -4,9 +4,6 @@ //! `mod rpc_server` - JSON rpc via HTTP (for request reply functions) //! `mod rpc_subscriptions` - JSON rpc via WebSocket (for request reply functions and subscriptions) -// Public uses -pub use rest::v1; - // External uses use futures::channel::mpsc; // Workspace uses diff --git a/core/bin/zksync_api/src/api_server/rest/v1/error.rs b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/error.rs similarity index 56% rename from core/bin/zksync_api/src/api_server/rest/v1/error.rs rename to core/bin/zksync_api/src/api_server/rest/forced_exit_requests/error.rs index 3379a3da9d..300cbedd1e 100644 --- a/core/bin/zksync_api/src/api_server/rest/v1/error.rs +++ b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/error.rs @@ -6,20 +6,20 @@ use actix_web::{dev::Body, http::HeaderValue, HttpResponse, ResponseError}; use reqwest::{header::CONTENT_TYPE, StatusCode}; // Workspace uses -pub use zksync_api_client::rest::v1::ErrorBody; - +use zksync_api_client::rest::error::ErrorBody; // Local uses +use crate::api_server::tx_sender::SubmitError; /// An HTTP error structure. #[derive(Debug)] -pub struct Error { +pub struct ApiError { /// HTTP error code. pub http_code: StatusCode, /// HTTP error content serialized into JSON. pub body: ErrorBody, } -impl Error { +impl ApiError { /// Creates a new Error with the BAD_REQUEST (400) status code. pub fn bad_request(title: impl Display) -> Self { Self::with_code(StatusCode::BAD_REQUEST, title) @@ -30,11 +30,6 @@ impl Error { Self::with_code(StatusCode::INTERNAL_SERVER_ERROR, title) } - /// Creates a new Error with the NOT_IMPLEMENTED (501) status code. - pub fn not_implemented(title: impl Display) -> Self { - Self::with_code(StatusCode::NOT_IMPLEMENTED, title) - } - /// Creates a new Error with the NOT_FOUND (404) status code. pub fn not_found(title: impl Display) -> Self { Self::with_code(StatusCode::NOT_FOUND, title) @@ -50,18 +45,6 @@ impl Error { } } - /// Sets error title. - pub fn title(mut self, title: impl Display) -> Self { - self.body.title = title.to_string(); - self - } - - /// Sets error details. - pub fn detail(mut self, detail: impl Display) -> Self { - self.body.detail = detail.to_string(); - self - } - /// Sets error specific code. pub fn code(mut self, code: u64) -> Self { self.body.code = Some(code); @@ -69,13 +52,13 @@ impl Error { } } -impl Display for Error { +impl Display for ApiError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{} ({})", self.body, self.http_code) } } -impl ResponseError for Error { +impl ResponseError for ApiError { fn status_code(&self) -> reqwest::StatusCode { self.http_code } @@ -94,3 +77,50 @@ impl ResponseError for Error { } } } + +#[derive(Debug, Clone, Copy)] +pub enum SumbitErrorCode { + AccountCloseDisabled = 101, + InvalidParams = 102, + UnsupportedFastProcessing = 103, + IncorrectTx = 104, + TxAdd = 105, + InappropriateFeeToken = 106, + + Internal = 110, + CommunicationCoreServer = 111, + Other = 112, +} + +impl SumbitErrorCode { + fn from_err(err: &SubmitError) -> Self { + match err { + SubmitError::AccountCloseDisabled => Self::AccountCloseDisabled, + SubmitError::InvalidParams(_) => Self::InvalidParams, + SubmitError::UnsupportedFastProcessing => Self::UnsupportedFastProcessing, + SubmitError::IncorrectTx(_) => Self::IncorrectTx, + SubmitError::TxAdd(_) => Self::TxAdd, + SubmitError::InappropriateFeeToken => Self::InappropriateFeeToken, + SubmitError::CommunicationCoreServer(_) => Self::CommunicationCoreServer, + SubmitError::Internal(_) => Self::Internal, + SubmitError::Other(_) => Self::Other, + } + } + + fn as_code(self) -> u64 { + self as u64 + } +} + +impl From for ApiError { + fn from(inner: SubmitError) -> Self { + let internal_code = SumbitErrorCode::from_err(&inner).as_code(); + + if let SubmitError::Internal(err) = &inner { + ApiError::internal(err) + } else { + ApiError::bad_request(inner) + } + .code(internal_code) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/mod.rs b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/mod.rs index 73a3475309..6bb32ef44e 100644 --- a/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/mod.rs +++ b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/mod.rs @@ -2,17 +2,19 @@ use actix_web::{web, Scope}; // Workspace uses -pub use zksync_api_client::rest::v1::{ - Client, ClientError, FastProcessingQuery, IncomingTx, IncomingTxBatch, Pagination, - PaginationQuery, Receipt, TxData, MAX_LIMIT, -}; +pub use zksync_api_client::rest::client::{Client, ClientError}; use zksync_config::ZkSyncConfig; use zksync_storage::ConnectionPool; // Local uses use crate::api_server::forced_exit_checker::ForcedExitChecker; +use error::ApiError; + +mod error; mod v01; +pub type JsonResult = std::result::Result, ApiError>; + pub(crate) fn api_scope(connection_pool: ConnectionPool, config: &ZkSyncConfig) -> Scope { let fe_age_checker = ForcedExitChecker::new(&config); web::scope("/api/forced_exit_requests").service(v01::api_scope( diff --git a/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/v01.rs b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/v01.rs index ef999d31d5..5f5dda76ba 100644 --- a/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/v01.rs +++ b/core/bin/zksync_api/src/api_server/rest/forced_exit_requests/v01.rs @@ -17,9 +17,6 @@ use std::{convert::TryInto, ops::Add}; pub use zksync_api_client::rest::forced_exit_requests::{ ForcedExitRegisterRequest, ForcedExitRequestStatus, }; -pub use zksync_api_client::rest::v1::{ - FastProcessingQuery, IncomingTx, IncomingTxBatch, Receipt, TxData, -}; use zksync_api_client::rest::forced_exit_requests::ConfigInfo; use zksync_config::ZkSyncConfig; @@ -33,8 +30,7 @@ use zksync_types::{ }; // Local uses -use crate::api_server::rest::v1::{Error as ApiError, JsonResult}; - +use super::{error::ApiError, JsonResult}; use crate::api_server::forced_exit_checker::ForcedExitAccountAgeChecker; /// Shared data between `/api/forced_exit_requests/v0.1/` endpoints. @@ -255,14 +251,16 @@ mod tests { use num::BigUint; - use zksync_api_client::rest::v1::Client; + use zksync_api_client::rest::client::Client; use zksync_config::ForcedExitRequestsConfig; use zksync_storage::ConnectionPool; use zksync_types::{Address, TokenId}; use super::*; - use crate::api_server::forced_exit_checker::DummyForcedExitChecker; - use crate::api_server::v1::test_utils::TestServerConfig; + use crate::api_server::{ + forced_exit_checker::DummyForcedExitChecker, + rest::v02::{test_utils::TestServerConfig, SharedData}, + }; struct TestServer { api_server: actix_web::test::TestServer, @@ -274,14 +272,17 @@ mod tests { async fn from_config(cfg: TestServerConfig) -> anyhow::Result<(Client, Self)> { let pool = cfg.pool.clone(); - let (api_client, api_server) = - cfg.start_server_with_scope(String::from("api/forced_exit_requests"), move |cfg| { + let (api_client, api_server) = cfg.start_server_with_scope( + String::from("api/forced_exit_requests"), + move |cfg| { api_scope( cfg.pool.clone(), &cfg.config, Box::new(DummyForcedExitChecker {}), ) - }); + }, + Option::::None, + ); Ok((api_client, Self { api_server, pool })) } diff --git a/core/bin/zksync_api/src/api_server/rest/helpers.rs b/core/bin/zksync_api/src/api_server/rest/helpers.rs index 7e1a62a122..c553c3daf7 100644 --- a/core/bin/zksync_api/src/api_server/rest/helpers.rs +++ b/core/bin/zksync_api/src/api_server/rest/helpers.rs @@ -1,6 +1,5 @@ //! Utilities for the REST API. -use crate::core_api_client::EthBlockId; use actix_web::{HttpResponse, Result as ActixResult}; use std::collections::HashMap; use zksync_storage::chain::{ @@ -29,7 +28,6 @@ pub fn block_verified(block: &StorageBlockDetails) -> bool { pub fn deposit_op_to_tx_by_hash( tokens: &HashMap, op: &PriorityOp, - eth_block: EthBlockId, ) -> Option { match &op.data { ZkSyncPriorityOp::Deposit(deposit) => { @@ -54,7 +52,7 @@ pub fn deposit_op_to_tx_by_hash( "token": token_symbol }, "type": "Deposit", - "eth_block_number": eth_block, + "eth_block_number": op.eth_block, }); Some(TxByHashResponse { diff --git a/core/bin/zksync_api/src/api_server/rest/mod.rs b/core/bin/zksync_api/src/api_server/rest/mod.rs index fb86a46a26..0c4888c704 100644 --- a/core/bin/zksync_api/src/api_server/rest/mod.rs +++ b/core/bin/zksync_api/src/api_server/rest/mod.rs @@ -17,7 +17,6 @@ mod forced_exit_requests; mod helpers; mod v01; pub mod v02; -pub mod v1; async fn start_server( api_v01: ApiV01, @@ -28,16 +27,6 @@ async fn start_server( HttpServer::new(move || { let api_v01 = api_v01.clone(); - let api_v1_scope = { - let tx_sender = TxSender::new( - api_v01.connection_pool.clone(), - sign_verifier.clone(), - fee_ticker.clone(), - &api_v01.config, - ); - v1::api_scope(tx_sender, &api_v01.config) - }; - let forced_exit_requests_api_scope = forced_exit_requests::api_scope(api_v01.connection_pool.clone(), &api_v01.config); @@ -54,7 +43,6 @@ async fn start_server( .wrap(Cors::new().send_wildcard().max_age(3600).finish()) .wrap(vlog::actix_middleware()) .service(api_v01.into_scope()) - .service(api_v1_scope) .service(forced_exit_requests_api_scope) .service(api_v02_scope) // Endpoint needed for js isReachable diff --git a/core/bin/zksync_api/src/api_server/rest/v01/api_decl.rs b/core/bin/zksync_api/src/api_server/rest/v01/api_decl.rs index 6471c48c88..2b0df4f9a5 100644 --- a/core/bin/zksync_api/src/api_server/rest/v01/api_decl.rs +++ b/core/bin/zksync_api/src/api_server/rest/v01/api_decl.rs @@ -5,10 +5,11 @@ use crate::{ helpers::*, v01::{caches::Caches, network_status::SharedNetworkStatus}, }, - core_api_client::{CoreApiClient, EthBlockId}, + core_api_client::CoreApiClient, }; use actix_web::{web, HttpResponse, Result as ActixResult}; use futures::channel::mpsc; +use zksync_api_types::PriorityOpLookupQuery; use zksync_config::ZkSyncConfig; use zksync_storage::{ chain::{ @@ -201,7 +202,7 @@ impl ApiV01 { if let Ok(block_details) = transaction .chain() .block_schema() - .load_block_range(block_id, 1) + .load_block_range_desc(block_id, 1) .await { // Unverified blocks can still change, so we can't cache them. @@ -228,7 +229,7 @@ impl ApiV01 { let mut blocks = storage .chain() .block_schema() - .load_block_range(block_id, 1) + .load_block_range_desc(block_id, 1) .await .map_err(|err| { vlog::warn!("Internal Server Error: '{}'; input: {}", err, *block_id); @@ -274,7 +275,9 @@ impl ApiV01 { pub(crate) async fn get_unconfirmed_op_by_hash( &self, eth_tx_hash: H256, - ) -> Result, anyhow::Error> { - self.api_client.get_unconfirmed_op(eth_tx_hash).await + ) -> Result, anyhow::Error> { + self.api_client + .get_unconfirmed_op(PriorityOpLookupQuery::ByEthHash(eth_tx_hash)) + .await } } diff --git a/core/bin/zksync_api/src/api_server/rest/v01/api_impl.rs b/core/bin/zksync_api/src/api_server/rest/v01/api_impl.rs index 78437cd90c..30370af0ef 100644 --- a/core/bin/zksync_api/src/api_server/rest/v01/api_impl.rs +++ b/core/bin/zksync_api/src/api_server/rest/v01/api_impl.rs @@ -379,7 +379,7 @@ impl ApiV01 { // If eth watcher has a priority op with given hash, transform it // to TxByHashResponse and assign it to res. - if let Some((eth_block, priority_op)) = unconfirmed_op { + if let Some(priority_op) = unconfirmed_op { let tokens = self_ .access_storage() .await? @@ -391,7 +391,7 @@ impl ApiV01 { HttpResponse::InternalServerError().finish() })?; - res = deposit_op_to_tx_by_hash(&tokens, &priority_op, eth_block); + res = deposit_op_to_tx_by_hash(&tokens, &priority_op); } metrics::histogram!("api.v01.tx_by_hash", start.elapsed()); @@ -441,7 +441,7 @@ impl ApiV01 { let resp = storage .chain() .block_schema() - .load_block_range(BlockNumber(max_block), limit) + .load_block_range_desc(BlockNumber(max_block), limit) .await .map_err(|err| { vlog::warn!( diff --git a/core/bin/zksync_api/src/api_server/rest/v02/account.rs b/core/bin/zksync_api/src/api_server/rest/v02/account.rs new file mode 100644 index 0000000000..5cb9e0b3e6 --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/account.rs @@ -0,0 +1,641 @@ +//! Account part of API implementation. + +// Built-in uses +use std::collections::BTreeMap; +use std::str::FromStr; + +// External uses +use actix_web::{web, Scope}; + +// Workspace uses +use zksync_api_types::v02::{ + account::{Account, AccountAddressOrId, AccountState}, + pagination::{ + parse_query, AccountTxsRequest, ApiEither, Paginated, PaginationQuery, PendingOpsRequest, + }, + transaction::{Transaction, TxHashSerializeWrapper}, +}; +use zksync_storage::{ConnectionPool, StorageProcessor}; +use zksync_types::{tx::TxHash, AccountId, Address, BlockNumber, SerialId}; + +// Local uses +use super::{ + error::{Error, InvalidDataError}, + paginate_trait::Paginate, + response::ApiResult, +}; +use crate::{ + api_try, core_api_client::CoreApiClient, fee_ticker::PriceError, + utils::token_db_cache::TokenDBCache, +}; + +/// Shared data between `api/v02/accounts` endpoints. +#[derive(Clone)] +struct ApiAccountData { + pool: ConnectionPool, + tokens: TokenDBCache, + core_api_client: CoreApiClient, +} + +impl ApiAccountData { + fn new(pool: ConnectionPool, tokens: TokenDBCache, core_api_client: CoreApiClient) -> Self { + Self { + pool, + tokens, + core_api_client, + } + } + + async fn get_id_by_address_or_id( + &self, + account_address_or_id: AccountAddressOrId, + ) -> Result, Error> { + match account_address_or_id { + AccountAddressOrId::Id(account_id) => Ok(Some(account_id)), + AccountAddressOrId::Address(address) => { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let account_id = storage + .chain() + .account_schema() + .account_id_by_address(address) + .await + .map_err(Error::storage)?; + Ok(account_id) + } + } + } + + async fn get_address_by_address_or_id( + &self, + account_address_or_id: AccountAddressOrId, + ) -> Result { + match account_address_or_id { + AccountAddressOrId::Id(account_id) => { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let address = storage + .chain() + .account_schema() + .account_address_by_id(account_id) + .await + .map_err(Error::storage)?; + address.ok_or_else(|| Error::from(InvalidDataError::AccountNotFound)) + } + AccountAddressOrId::Address(address) => Ok(address), + } + } + + fn parse_account_id_or_address( + &self, + account_address_or_id: &str, + ) -> Result { + if let Ok(account_id) = u32::from_str(account_address_or_id) { + Ok(AccountAddressOrId::Id(AccountId(account_id))) + } else { + let address_str = if let Some(address_str) = account_address_or_id.strip_prefix("0x") { + address_str + } else { + account_address_or_id + }; + + if let Ok(address) = Address::from_str(address_str) { + Ok(AccountAddressOrId::Address(address)) + } else { + Err(Error::from(InvalidDataError::InvalidAccountIdOrAddress)) + } + } + } + + async fn api_account( + &self, + account: zksync_types::Account, + account_id: AccountId, + last_update_in_block: BlockNumber, + storage: &mut StorageProcessor<'_>, + ) -> Result { + let mut balances = BTreeMap::new(); + for (token_id, balance) in account.get_nonzero_balances() { + let token_symbol = self + .tokens + .token_symbol(storage, token_id) + .await + .map_err(Error::storage)? + .ok_or_else(|| Error::from(PriceError::token_not_found(token_id)))?; + + balances.insert(token_symbol, balance); + } + let account_type = storage + .chain() + .account_schema() + .account_type_by_id(account_id) + .await + .map_err(Error::storage)? + .map(|t| t.into()); + Ok(Account { + account_id, + address: account.address, + nonce: account.nonce, + pub_key_hash: account.pub_key_hash, + last_update_in_block, + balances, + account_type, + }) + } + + async fn account_committed_info( + &self, + account_id: AccountId, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let mut transaction = storage.start_transaction().await.map_err(Error::storage)?; + let account = transaction + .chain() + .account_schema() + .last_committed_state_for_account(account_id) + .await + .map_err(Error::storage)? + .1; + let result = if let Some(account) = account { + let last_block = transaction + .chain() + .account_schema() + .last_committed_block_with_update_for_acc(account_id) + .await + .map_err(Error::storage)?; + Ok(Some( + self.api_account(account, account_id, last_block, &mut transaction) + .await?, + )) + } else { + Ok(None) + }; + transaction.commit().await.map_err(Error::storage)?; + result + } + + async fn account_finalized_info( + &self, + account_id: AccountId, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let mut transaction = storage.start_transaction().await.map_err(Error::storage)?; + let (last_block, account) = transaction + .chain() + .account_schema() + .account_and_last_block(account_id) + .await + .map_err(Error::storage)?; + let result = if let Some(account) = account { + Ok(Some( + self.api_account( + account, + account_id, + BlockNumber(last_block as u32), + &mut transaction, + ) + .await?, + )) + } else { + Ok(None) + }; + transaction.commit().await.map_err(Error::storage)?; + result + } + + async fn account_full_info(&self, account_id: AccountId) -> Result { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let mut transaction = storage.start_transaction().await.map_err(Error::storage)?; + let (finalized_state, committed_state) = transaction + .chain() + .account_schema() + .last_committed_state_for_account(account_id) + .await + .map_err(Error::storage)?; + let finalized = if let Some(account) = finalized_state.1 { + Some( + self.api_account( + account, + account_id, + BlockNumber(finalized_state.0 as u32), + &mut transaction, + ) + .await?, + ) + } else { + None + }; + let committed = if let Some(account) = committed_state { + let last_block = transaction + .chain() + .account_schema() + .last_committed_block_with_update_for_acc(account_id) + .await + .map_err(Error::storage)?; + Some( + self.api_account(account, account_id, last_block, &mut transaction) + .await?, + ) + } else { + None + }; + transaction.commit().await.map_err(Error::storage)?; + Ok(AccountState { + committed, + finalized, + }) + } + + async fn account_txs( + &self, + query: PaginationQuery>, + address: Address, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let new_query = PaginationQuery { + from: AccountTxsRequest { + address, + tx_hash: query.from, + }, + limit: query.limit, + direction: query.direction, + }; + storage.paginate_checked(&new_query).await + } + + /// Pending deposits can be matched only with addresses, + /// while pending full exits can be matched only with account ids. + /// If the account isn't created yet it doesn't have an id + /// but we can still find pending deposits for its address that is why account_id is Option. + async fn account_pending_txs( + &self, + query: PaginationQuery>, + address: Address, + account_id: Option, + ) -> Result, Error> { + let new_query = PaginationQuery { + from: PendingOpsRequest { + address, + account_id, + serial_id: query.from, + }, + limit: query.limit, + direction: query.direction, + }; + let mut client = self.core_api_client.clone(); + client.paginate_checked(&new_query).await + } +} + +async fn account_committed_info( + data: web::Data, + web::Path(account_id_or_address): web::Path, +) -> ApiResult> { + let address_or_id = api_try!(data.parse_account_id_or_address(&account_id_or_address)); + let account_id = api_try!(data.get_id_by_address_or_id(address_or_id).await); + if let Some(account_id) = account_id { + data.account_committed_info(account_id).await.into() + } else { + ApiResult::Ok(None) + } +} + +async fn account_finalized_info( + data: web::Data, + web::Path(account_id_or_address): web::Path, +) -> ApiResult> { + let address_or_id = api_try!(data.parse_account_id_or_address(&account_id_or_address)); + let account_id = api_try!(data.get_id_by_address_or_id(address_or_id).await); + if let Some(account_id) = account_id { + data.account_finalized_info(account_id).await.into() + } else { + ApiResult::Ok(None) + } +} + +async fn account_full_info( + data: web::Data, + web::Path(account_id_or_address): web::Path, +) -> ApiResult { + let address_or_id = api_try!(data.parse_account_id_or_address(&account_id_or_address)); + let account_id = api_try!(data.get_id_by_address_or_id(address_or_id).await); + if let Some(account_id) = account_id { + data.account_full_info(account_id).await.into() + } else { + ApiResult::Ok(AccountState { + committed: None, + finalized: None, + }) + } +} + +async fn account_txs( + data: web::Data, + web::Path(account_id_or_address): web::Path, + web::Query(query): web::Query>, +) -> ApiResult> { + let query = api_try!(parse_query(query).map_err(Error::from)); + let address_or_id = api_try!(data.parse_account_id_or_address(&account_id_or_address)); + let address = api_try!(data.get_address_by_address_or_id(address_or_id).await); + data.account_txs(query, address).await.into() +} + +async fn account_pending_txs( + data: web::Data, + web::Path(account_id_or_address): web::Path, + web::Query(query): web::Query>, +) -> ApiResult> { + let query = api_try!(parse_query(query).map_err(Error::from)); + let address_or_id = api_try!(data.parse_account_id_or_address(&account_id_or_address)); + let address = api_try!( + data.get_address_by_address_or_id(address_or_id.clone()) + .await + ); + let account_id = api_try!(data.get_id_by_address_or_id(address_or_id).await); + data.account_pending_txs(query, address, account_id) + .await + .into() +} + +pub fn api_scope( + pool: ConnectionPool, + tokens: TokenDBCache, + core_api_client: CoreApiClient, +) -> Scope { + let data = ApiAccountData::new(pool, tokens, core_api_client); + + web::scope("accounts") + .data(data) + .route( + "{account_id_or_address}/committed", + web::get().to(account_committed_info), + ) + .route( + "{account_id_or_address}/finalized", + web::get().to(account_finalized_info), + ) + .route("{account_id_or_address}", web::get().to(account_full_info)) + .route( + "{account_id_or_address}/transactions", + web::get().to(account_txs), + ) + .route( + "{account_id_or_address}/transactions/pending", + web::get().to(account_pending_txs), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{deserialize_response_result, TestServerConfig}, + SharedData, + }; + use actix_web::{web::Json, App}; + use chrono::Utc; + use serde::Deserialize; + use serde_json::json; + use std::sync::Arc; + use tokio::sync::Mutex; + use zksync_api_client::rest::client::Client; + use zksync_api_types::v02::{ + pagination::{PaginationDirection, PaginationQuery, PendingOpsRequest}, + transaction::{L1Transaction, TransactionData}, + ApiVersion, + }; + use zksync_storage::StorageProcessor; + use zksync_types::{AccountId, Address, H256}; + + type PendingOpsHandle = Arc>; + + fn create_pending_ops_handle() -> PendingOpsHandle { + Arc::new(Mutex::new(json!({ + "list": [], + "pagination": { + "from": 1, + "limit": 1, + "direction": "newer", + "count": 0 + } + }))) + } + + #[derive(Debug, Deserialize)] + struct PendingOpsFlattenRequest { + pub address: Address, + pub account_id: Option, + pub serial_id: String, + pub limit: u32, + pub direction: PaginationDirection, + } + + fn get_unconfirmed_ops_loopback( + ops_handle: PendingOpsHandle, + ) -> (CoreApiClient, actix_web::test::TestServer) { + async fn get_ops( + data: web::Data, + web::Query(_query): web::Query, + ) -> Json { + Json(data.lock().await.clone()) + } + + let server = actix_web::test::start(move || { + App::new().service( + web::scope("unconfirmed_ops") + .data(ops_handle.clone()) + .route("", web::get().to(get_ops)), + ) + }); + + let url = server.url("").trim_end_matches('/').to_owned(); + (CoreApiClient::new(url), server) + } + + struct TestServer { + core_server: actix_web::test::TestServer, + api_server: actix_web::test::TestServer, + pool: ConnectionPool, + pending_ops: PendingOpsHandle, + } + + impl TestServer { + async fn new() -> anyhow::Result<(Client, Self)> { + let cfg = TestServerConfig::default(); + cfg.fill_database().await?; + + let pending_ops = create_pending_ops_handle(); + let (core_client, core_server) = get_unconfirmed_ops_loopback(pending_ops.clone()); + + let pool = cfg.pool.clone(); + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (api_client, api_server) = cfg.start_server( + move |cfg: &TestServerConfig| { + api_scope(cfg.pool.clone(), TokenDBCache::new(), core_client.clone()) + }, + Some(shared_data), + ); + + Ok(( + api_client, + Self { + core_server, + api_server, + pool, + pending_ops, + }, + )) + } + + async fn account_id_and_tx_hash( + storage: &mut StorageProcessor<'_>, + block: BlockNumber, + ) -> anyhow::Result<(AccountId, TxHash)> { + let transactions = storage + .chain() + .block_schema() + .get_block_transactions(block) + .await?; + + let tx = &transactions[0]; + let op = tx.op.as_object().unwrap(); + + let id = serde_json::from_value(op["accountId"].clone()).unwrap(); + Ok((id, TxHash::from_str(&tx.tx_hash).unwrap())) + } + + async fn stop(self) { + self.api_server.stop().await; + self.core_server.stop().await; + } + } + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn unconfirmed_deposits_loopback() -> anyhow::Result<()> { + let (client, server) = get_unconfirmed_ops_loopback(create_pending_ops_handle()); + client + .get_unconfirmed_ops(&PaginationQuery { + from: PendingOpsRequest { + address: Address::default(), + account_id: Some(AccountId::default()), + serial_id: ApiEither::from(0), + }, + limit: 0, + direction: PaginationDirection::Newer, + }) + .await?; + + server.stop().await; + Ok(()) + } + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn accounts_scope() -> anyhow::Result<()> { + let (client, server) = TestServer::new().await?; + + // Get account information. + let (account_id, tx_hash) = TestServer::account_id_and_tx_hash( + &mut server.pool.access_storage().await?, + BlockNumber(1), + ) + .await?; + + let response = client + .account_info(&account_id.to_string(), "committed") + .await?; + let account_committed_info_by_id: Account = deserialize_response_result(response)?; + + let address = account_committed_info_by_id.address; + let response = client + .account_info(&format!("{:?}", address), "committed") + .await?; + let account_committed_info_by_address: Account = deserialize_response_result(response)?; + assert_eq!( + account_committed_info_by_id, + account_committed_info_by_address + ); + + let response = client + .account_info(&format!("{:?}", address), "finalized") + .await?; + let account_finalized_info: Option = deserialize_response_result(response)?; + + let response = client.account_full_info(&format!("{:?}", address)).await?; + let account_full_info: AccountState = deserialize_response_result(response)?; + assert_eq!( + account_full_info.committed, + Some(account_committed_info_by_id) + ); + assert_eq!(account_full_info.finalized, account_finalized_info); + + let query = PaginationQuery { + from: ApiEither::from(tx_hash), + limit: 1, + direction: PaginationDirection::Newer, + }; + let response = client.account_txs(&query, &account_id.to_string()).await?; + let txs: Paginated = deserialize_response_result(response)?; + assert_eq!(txs.list[0].tx_hash, tx_hash); + // Provide unconfirmed pending ops. + *server.pending_ops.lock().await = json!({ + "list": [ + { + "txHash": TxHash::from_slice(&[0u8; 32]), + "blockNumber": Option::::None, + "op": { + "type": "Deposit", + "from": Address::default(), + "tokenId": 0, + "amount": "100500", + "to": address, + "accountId": Option::::None, + "ethHash": H256::from_slice(&[0u8; 32]), + "id": 10, + "txHash": TxHash::from_slice(&[0u8; 32]) + }, + "status": "queued", + "failReason": Option::::None, + "createdAt": Utc::now() + } + ], + "pagination": { + "from": 1, + "limit": 1, + "count": 1, + "direction": "newer" + } + }); + + let query = PaginationQuery { + from: ApiEither::from(1), + limit: 1, + direction: PaginationDirection::Newer, + }; + let response = client + .account_pending_txs(&query, &account_id.to_string()) + .await?; + let txs: Paginated = deserialize_response_result(response)?; + match &txs.list[0].op { + TransactionData::L1(tx) => match tx { + L1Transaction::Deposit(deposit) => { + assert_eq!(deposit.id, 10); + } + _ => panic!("should return deposit"), + }, + _ => panic!("account_pending_txs returned L2 tx"), + } + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/block.rs b/core/bin/zksync_api/src/api_server/rest/v02/block.rs new file mode 100644 index 0000000000..10a844abbf --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/block.rs @@ -0,0 +1,286 @@ +//! Block part of API implementation. + +// Built-in uses +use std::str::FromStr; + +// External uses +use actix_web::{web, Scope}; + +// Workspace uses +use zksync_api_types::v02::{ + block::{BlockInfo, BlockStatus}, + pagination::{parse_query, ApiEither, BlockAndTxHash, Paginated, PaginationQuery}, + transaction::{Transaction, TxHashSerializeWrapper}, +}; +use zksync_crypto::{convert::FeConvert, Fr}; +use zksync_storage::{chain::block::records::StorageBlockDetails, ConnectionPool, QueryResult}; +use zksync_types::{tx::TxHash, BlockNumber, H256}; + +// Local uses +use super::{ + error::{Error, InvalidDataError}, + paginate_trait::Paginate, + response::ApiResult, +}; +use crate::{api_try, utils::block_details_cache::BlockDetailsCache}; + +pub fn block_info_from_details(details: StorageBlockDetails) -> BlockInfo { + let status = if details.is_verified() { + BlockStatus::Finalized + } else { + BlockStatus::Committed + }; + BlockInfo { + block_number: BlockNumber(details.block_number as u32), + new_state_root: Fr::from_bytes(&details.new_state_root).unwrap_or_else(|err| { + panic!( + "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", + details.new_state_root, err + ) + }), + block_size: details.block_size as u64, + commit_tx_hash: details.commit_tx_hash.map(|bytes| H256::from_slice(&bytes)), + verify_tx_hash: details.verify_tx_hash.map(|bytes| H256::from_slice(&bytes)), + committed_at: details.committed_at, + finalized_at: details.verified_at, + status, + } +} + +/// Shared data between `api/v0.2/blocks` endpoints. +#[derive(Debug, Clone)] +struct ApiBlockData { + pool: ConnectionPool, + verified_blocks_cache: BlockDetailsCache, +} + +impl ApiBlockData { + fn new(pool: ConnectionPool, verified_blocks_cache: BlockDetailsCache) -> Self { + Self { + pool, + verified_blocks_cache, + } + } + + /// Returns information about block with the specified number. + /// + /// This method caches some of the verified blocks. + async fn block_info(&self, block_number: BlockNumber) -> Result, Error> { + let details = self + .verified_blocks_cache + .get(&self.pool, block_number) + .await + .map_err(Error::storage)?; + if let Some(details) = details { + Ok(Some(block_info_from_details(details))) + } else { + Ok(None) + } + } + + async fn get_block_number_by_position( + &self, + block_position: &str, + ) -> Result { + if let Ok(number) = u32::from_str(block_position) { + Ok(BlockNumber(number)) + } else { + match block_position { + "lastCommitted" => self + .get_last_committed_block_number() + .await + .map_err(Error::storage), + "lastFinalized" => self + .get_last_finalized_block_number() + .await + .map_err(Error::storage), + _ => Err(Error::from(InvalidDataError::InvalidBlockPosition)), + } + } + } + + async fn block_page( + &self, + query: PaginationQuery>, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + storage.paginate_checked(&query).await + } + + async fn transaction_page( + &self, + block_number: BlockNumber, + query: PaginationQuery>, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + + let new_query = PaginationQuery { + from: BlockAndTxHash { + block_number, + tx_hash: query.from, + }, + limit: query.limit, + direction: query.direction, + }; + + storage.paginate_checked(&new_query).await + } + + async fn get_last_committed_block_number(&self) -> QueryResult { + let mut storage = self.pool.access_storage().await?; + storage + .chain() + .block_schema() + .get_last_committed_confirmed_block() + .await + } + + async fn get_last_finalized_block_number(&self) -> QueryResult { + let mut storage = self.pool.access_storage().await?; + storage + .chain() + .block_schema() + .get_last_verified_confirmed_block() + .await + } +} + +// Server implementation + +async fn block_pagination( + data: web::Data, + web::Query(query): web::Query>, +) -> ApiResult> { + let query = api_try!(parse_query(query).map_err(Error::from)); + data.block_page(query).await.into() +} + +// TODO: take `block_position` as enum. +// Currently actix path extractor doesn't work with enums: https://github.com/actix/actix-web/issues/318 (ZKS-628) +async fn block_by_position( + data: web::Data, + web::Path(block_position): web::Path, +) -> ApiResult> { + let block_number = api_try!(data.get_block_number_by_position(&block_position).await); + data.block_info(block_number).await.into() +} + +async fn block_transactions( + data: web::Data, + web::Path(block_position): web::Path, + web::Query(query): web::Query>, +) -> ApiResult> { + let block_number = api_try!(data.get_block_number_by_position(&block_position).await); + let query = api_try!(parse_query(query).map_err(Error::from)); + data.transaction_page(block_number, query).await.into() +} + +pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { + let data = ApiBlockData::new(pool, cache); + + web::scope("blocks") + .data(data) + .route("", web::get().to(block_pagination)) + .route("{block_position}", web::get().to(block_by_position)) + .route( + "{block_position}/transactions", + web::get().to(block_transactions), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{deserialize_response_result, TestServerConfig}, + SharedData, + }; + use zksync_api_types::v02::{ + pagination::PaginationDirection, transaction::TransactionData, ApiVersion, + }; + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn blocks_scope() -> anyhow::Result<()> { + let cfg = TestServerConfig::default(); + cfg.fill_database().await?; + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + |cfg: &TestServerConfig| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10)), + Some(shared_data), + ); + + let query = PaginationQuery { + from: ApiEither::from(BlockNumber(1)), + limit: 3, + direction: PaginationDirection::Newer, + }; + let expected_blocks: Paginated = { + let mut storage = cfg.pool.access_storage().await?; + storage + .paginate_checked(&query) + .await + .map_err(|err| anyhow::anyhow!(err.message))? + }; + + let response = client.block_by_position("2").await?; + let block: BlockInfo = deserialize_response_result(response)?; + assert_eq!(block, expected_blocks.list[1]); + + let response = client.block_pagination(&query).await?; + let paginated: Paginated = deserialize_response_result(response)?; + assert_eq!(paginated, expected_blocks); + + let block_number = BlockNumber(3); + let expected_txs = { + let mut storage = cfg.pool.access_storage().await?; + storage + .chain() + .block_schema() + .get_block_transactions(block_number) + .await? + }; + assert!(expected_txs.len() >= 3); + let tx_hash_str = expected_txs.first().unwrap().tx_hash.as_str(); + let tx_hash = TxHash::from_str(tx_hash_str).unwrap(); + + let query = PaginationQuery { + from: ApiEither::from(tx_hash), + limit: 2, + direction: PaginationDirection::Older, + }; + + let response = client + .block_transactions(&query, &*block_number.to_string()) + .await?; + let paginated: Paginated = deserialize_response_result(response)?; + assert_eq!(paginated.pagination.count as usize, expected_txs.len()); + assert_eq!(paginated.pagination.limit, query.limit); + assert_eq!(paginated.list.len(), query.limit as usize); + assert_eq!(paginated.pagination.direction, PaginationDirection::Older); + assert_eq!(paginated.pagination.from, tx_hash); + + for (tx, expected_tx) in paginated.list.into_iter().zip(expected_txs) { + assert_eq!( + tx.tx_hash.to_string().replace("sync-tx:", "0x"), + expected_tx.tx_hash + ); + assert_eq!(tx.created_at, Some(expected_tx.created_at)); + assert_eq!(*tx.block_number.unwrap(), expected_tx.block_number as u32); + assert_eq!(tx.fail_reason, expected_tx.fail_reason); + if matches!(tx.op, TransactionData::L2(_)) { + assert_eq!(serde_json::to_value(tx.op).unwrap(), expected_tx.op); + } + } + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/config.rs b/core/bin/zksync_api/src/api_server/rest/v02/config.rs index c80298f7b8..28f9d08ea2 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/config.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/config.rs @@ -4,33 +4,30 @@ // External uses use actix_web::{web, Scope}; -use serde::Serialize; +use serde::{Deserialize, Serialize}; + // Workspace uses +use zksync_api_types::v02::ZksyncVersion; use zksync_config::ZkSyncConfig; use zksync_types::{network::Network, Address}; // Local uses use super::response::ApiResult; -#[derive(Serialize, Debug, Clone, Copy)] -#[serde(rename_all = "snake_case")] -enum ZksyncVersion { - ContractV4, -} - -/// Shared data between `api/v02/config` endpoints. -#[derive(Serialize, Debug, Clone, Copy)] -struct ApiConfigData { +/// Shared data between `api/v0.2/config` endpoints. +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ApiConfigData { network: Network, contract: Address, gov_contract: Address, deposit_confirmations: u64, zksync_version: ZksyncVersion, - // TODO: server_version + // TODO: server_version (ZKS-627) } impl ApiConfigData { - fn new(config: &ZkSyncConfig) -> Self { + pub fn new(config: &ZkSyncConfig) -> Self { Self { network: config.chain.eth.network, contract: config.contracts.contract_addr, @@ -42,8 +39,9 @@ impl ApiConfigData { } // Server implementation + async fn config_endpoint(data: web::Data) -> ApiResult { - (*data.into_inner()).into() + ApiResult::Ok(*data.into_inner()) } pub fn api_scope(config: &ZkSyncConfig) -> Scope { @@ -53,3 +51,37 @@ pub fn api_scope(config: &ZkSyncConfig) -> Scope { .data(data) .route("", web::get().to(config_endpoint)) } + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{deserialize_response_result, TestServerConfig}, + SharedData, + }; + use zksync_api_types::v02::ApiVersion; + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn config_scope() -> anyhow::Result<()> { + let cfg = TestServerConfig::default(); + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + |cfg: &TestServerConfig| api_scope(&cfg.config), + Some(shared_data), + ); + let response = client.config().await?; + let api_config: ApiConfigData = deserialize_response_result(response)?; + assert_eq!(api_config, ApiConfigData::new(&cfg.config)); + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/error.rs b/core/bin/zksync_api/src/api_server/rest/v02/error.rs index 49e68ef86c..8d336dc980 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/error.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/error.rs @@ -1,19 +1,51 @@ -use serde::Serialize; +// Built-in uses +use std::fmt::{Display, Formatter}; + +// External uses +use serde::{Deserialize, Serialize}; use serde_repr::Serialize_repr; -use std::fmt::Formatter; +use thiserror::Error; + +// Workspace uses +use zksync_api_types::v02::pagination::{UnknownFromParameter, MAX_LIMIT}; -#[derive(Serialize_repr)] -#[repr(u8)] +// Local uses +use crate::{api_server::tx_sender::SubmitError, fee_ticker::PriceError}; + +#[derive(Serialize_repr, Debug, Deserialize)] +#[repr(u16)] pub enum ErrorCode { - Unreacheable = 0, + UnreacheableError = 0, + CoreApiError = 100, + TokenZeroPriceError = 200, + InvalidCurrency = 201, + InvalidBlockPosition = 202, + InvalidAccountIdOrAddress = 203, + AccountNotFound = 204, + TransactionNotFound = 205, + PaginationLimitTooBig = 206, + QueryDeserializationError = 207, + StorageError = 300, + TokenNotFound = 500, + ExternalApiError = 501, + InternalError = 600, + AccountCloseDisabled = 601, + InvalidParams = 602, + UnsupportedFastProcessing = 603, + IncorrectTx = 604, + TxAddError = 605, + InappropriateFeeToken = 606, + CommunicationCoreServer = 607, + Other = 60_000, } /// Error object in a response -#[derive(Serialize)] +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct Error { - error_type: String, - code: ErrorCode, - message: String, + pub error_type: String, + pub code: ErrorCode, + pub message: String, } /// Trait that can be used to map custom errors to the object. @@ -40,24 +72,142 @@ where } } -pub struct UnreachableError; +impl Error { + pub fn storage(err: impl Display) -> Error { + Error::from(StorageError::new(err)) + } + + pub fn core_api(err: impl Display) -> Error { + Error::from(CoreApiError::new(err)) + } +} + +#[derive(Error, Debug)] +pub enum InvalidDataError { + #[error("Cannot show price in zero price token")] + TokenZeroPriceError, + #[error("Cannot parse block position. There are only block_number, last_committed, last_finalized options")] + InvalidBlockPosition, + #[error("Cannot parse account id or address")] + InvalidAccountIdOrAddress, + #[error("Account is not found")] + AccountNotFound, + #[error("Cannot parse currency. There are only token_id, usd options")] + InvalidCurrency, + #[error("Transaction is not found")] + TransactionNotFound, + #[error("Limit for pagination should be less than or equal to {}", MAX_LIMIT)] + PaginationLimitTooBig, +} + +impl ApiError for InvalidDataError { + fn error_type(&self) -> String { + String::from("invalidDataError") + } + + fn code(&self) -> ErrorCode { + match self { + Self::TokenZeroPriceError => ErrorCode::TokenZeroPriceError, + Self::InvalidBlockPosition => ErrorCode::InvalidBlockPosition, + Self::InvalidAccountIdOrAddress => ErrorCode::InvalidAccountIdOrAddress, + Self::AccountNotFound => ErrorCode::AccountNotFound, + Self::InvalidCurrency => ErrorCode::InvalidCurrency, + Self::TransactionNotFound => ErrorCode::TransactionNotFound, + Self::PaginationLimitTooBig => ErrorCode::PaginationLimitTooBig, + } + } +} + +#[derive(Debug)] +pub struct StorageError(String); + +impl StorageError { + pub fn new(title: impl Display) -> Self { + Self(title.to_string()) + } +} -impl std::fmt::Display for UnreachableError { +impl Display for StorageError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!( - f, - "Unreachable error; you should never see this message, \ - please contact us at https://github.com/matter-labs/zksync with a report" - ) + f.write_str(&self.0) + } +} + +impl ApiError for StorageError { + fn error_type(&self) -> String { + String::from("storageError") + } + + fn code(&self) -> ErrorCode { + ErrorCode::StorageError + } +} + +#[derive(Debug)] +pub struct CoreApiError(String); + +impl CoreApiError { + pub fn new(title: impl Display) -> Self { + Self(title.to_string()) + } +} + +impl Display for CoreApiError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.0) + } +} + +impl ApiError for CoreApiError { + fn error_type(&self) -> String { + String::from("coreApiError") + } + + fn code(&self) -> ErrorCode { + ErrorCode::CoreApiError + } +} + +impl ApiError for SubmitError { + fn error_type(&self) -> String { + String::from("submitError") + } + + fn code(&self) -> ErrorCode { + match self { + Self::AccountCloseDisabled => ErrorCode::AccountCloseDisabled, + Self::InvalidParams(_) => ErrorCode::InvalidParams, + Self::UnsupportedFastProcessing => ErrorCode::UnsupportedFastProcessing, + Self::IncorrectTx(_) => ErrorCode::IncorrectTx, + Self::TxAdd(_) => ErrorCode::TxAddError, + Self::InappropriateFeeToken => ErrorCode::InappropriateFeeToken, + Self::CommunicationCoreServer(_) => ErrorCode::CommunicationCoreServer, + Self::Internal(_) => ErrorCode::InternalError, + Self::Other(_) => ErrorCode::Other, + } + } +} + +impl ApiError for PriceError { + fn error_type(&self) -> String { + String::from("tokenError") + } + + fn code(&self) -> ErrorCode { + match self { + Self::TokenNotFound(_) => ErrorCode::TokenNotFound, + Self::ApiError(_) => ErrorCode::ExternalApiError, + Self::DBError(_) => ErrorCode::StorageError, + } } } -impl ApiError for UnreachableError { +impl ApiError for UnknownFromParameter { fn error_type(&self) -> String { - String::from("api_error") + String::from("invalidDataError") } fn code(&self) -> ErrorCode { - ErrorCode::Unreacheable + ErrorCode::QueryDeserializationError } } diff --git a/core/bin/zksync_api/src/api_server/rest/v02/fee.rs b/core/bin/zksync_api/src/api_server/rest/v02/fee.rs new file mode 100644 index 0000000000..ead3a6dfba --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/fee.rs @@ -0,0 +1,135 @@ +//! Fee part of API implementation. + +// Built-in uses + +// External uses +use actix_web::{ + web::{self, Json}, + Scope, +}; + +// Workspace uses +use zksync_api_types::v02::fee::{ApiFee, BatchFeeRequest, TxFeeRequest}; + +// Local uses +use super::{error::Error, response::ApiResult}; +use crate::api_server::tx_sender::TxSender; + +/// Shared data between `api/v0.2/fee` endpoints. +#[derive(Clone)] +struct ApiFeeData { + tx_sender: TxSender, +} + +impl ApiFeeData { + fn new(tx_sender: TxSender) -> Self { + Self { tx_sender } + } +} + +async fn get_tx_fee( + data: web::Data, + Json(body): Json, +) -> ApiResult { + data.tx_sender + .get_txs_fee_in_wei(body.tx_type.into(), body.address, body.token_like) + .await + .map_err(Error::from) + .map(ApiFee::from) + .into() +} + +async fn get_batch_fee( + data: web::Data, + Json(body): Json, +) -> ApiResult { + let txs = body + .transactions + .into_iter() + .map(|tx| (tx.tx_type.into(), tx.address)) + .collect(); + data.tx_sender + .get_txs_batch_fee_in_wei(txs, body.token_like) + .await + .map_err(Error::from) + .map(ApiFee::from) + .into() +} + +pub fn api_scope(tx_sender: TxSender) -> Scope { + let data = ApiFeeData::new(tx_sender); + + web::scope("fee") + .data(data) + .route("", web::post().to(get_tx_fee)) + .route("/batch", web::post().to(get_batch_fee)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{ + deserialize_response_result, dummy_fee_ticker, dummy_sign_verifier, TestServerConfig, + }, + SharedData, + }; + use num::BigUint; + use zksync_api_types::v02::{ + fee::{ApiTxFeeTypes, TxInBatchFeeRequest}, + ApiVersion, + }; + use zksync_types::{tokens::TokenLike, Address, TokenId}; + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn fee_scope() -> anyhow::Result<()> { + let cfg = TestServerConfig::default(); + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + move |cfg: &TestServerConfig| { + api_scope(TxSender::new( + cfg.pool.clone(), + dummy_sign_verifier(), + dummy_fee_ticker(&[]), + &cfg.config, + )) + }, + Some(shared_data), + ); + + let tx_type = ApiTxFeeTypes::Withdraw; + let address = Address::default(); + let token_like = TokenLike::Id(TokenId(1)); + + let response = client + .get_txs_fee(tx_type, address, token_like.clone()) + .await?; + let api_fee: ApiFee = deserialize_response_result(response)?; + assert_eq!(api_fee.gas_fee, BigUint::from(1u32)); + assert_eq!(api_fee.zkp_fee, BigUint::from(1u32)); + assert_eq!(api_fee.total_fee, BigUint::from(2u32)); + + let tx = TxInBatchFeeRequest { + tx_type: ApiTxFeeTypes::Withdraw, + address: Address::default(), + }; + let txs = vec![tx.clone(), tx.clone(), tx]; + + let response = client.get_batch_fee(txs, token_like).await?; + let api_batch_fee: ApiFee = deserialize_response_result(response)?; + assert_eq!(api_batch_fee.gas_fee, BigUint::from(3u32)); + assert_eq!(api_batch_fee.zkp_fee, BigUint::from(3u32)); + assert_eq!(api_batch_fee.total_fee, BigUint::from(6u32)); + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/mod.rs b/core/bin/zksync_api/src/api_server/rest/v02/mod.rs index e501ec6f55..e1883646c4 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/mod.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/mod.rs @@ -3,35 +3,58 @@ use actix_web::{ web::{self}, Scope, }; -use serde::Serialize; // Workspace uses +use zksync_api_types::v02::ApiVersion; use zksync_config::ZkSyncConfig; use zksync_types::network::Network; // Local uses use crate::api_server::tx_sender::TxSender; +mod account; +mod block; mod config; mod error; +mod fee; +mod paginate_impl; +mod paginate_trait; mod response; +mod status; +#[cfg(test)] +pub mod test_utils; +mod token; +mod transaction; -#[derive(Serialize, Clone, Copy)] -#[serde(rename_all = "snake_case")] -pub enum ApiVersion { - V02, -} - +#[derive(Debug, Clone, Copy)] pub struct SharedData { pub net: Network, pub api_version: ApiVersion, } -pub(crate) fn api_scope(_tx_sender: TxSender, zk_config: &ZkSyncConfig) -> Scope { +pub(crate) fn api_scope(tx_sender: TxSender, zk_config: &ZkSyncConfig) -> Scope { web::scope("/api/v0.2") .data(SharedData { net: zk_config.chain.eth.network, api_version: ApiVersion::V02, }) + .service(account::api_scope( + tx_sender.pool.clone(), + tx_sender.tokens.clone(), + tx_sender.core_api_client.clone(), + )) + .service(block::api_scope( + tx_sender.pool.clone(), + tx_sender.blocks.clone(), + )) .service(config::api_scope(&zk_config)) + .service(fee::api_scope(tx_sender.clone())) + .service(status::api_scope(tx_sender.pool.clone())) + .service(token::api_scope( + &zk_config, + tx_sender.pool.clone(), + tx_sender.tokens.clone(), + tx_sender.ticker_requests.clone(), + )) + .service(transaction::api_scope(tx_sender)) } diff --git a/core/bin/zksync_api/src/api_server/rest/v02/paginate_impl.rs b/core/bin/zksync_api/src/api_server/rest/v02/paginate_impl.rs new file mode 100644 index 0000000000..3813fb4a0f --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/paginate_impl.rs @@ -0,0 +1,279 @@ +// Built-in uses + +// External uses + +// Workspace uses +use zksync_api_types::{ + v02::{ + block::BlockInfo, + pagination::{ + AccountTxsRequest, ApiEither, BlockAndTxHash, Paginated, PaginationQuery, + PendingOpsRequest, + }, + transaction::{Transaction, TxHashSerializeWrapper}, + }, + Either, +}; +use zksync_storage::StorageProcessor; +use zksync_types::{BlockNumber, SerialId, Token, TokenId}; + +// Local uses +use super::{ + block::block_info_from_details, + error::{Error, InvalidDataError}, + paginate_trait::Paginate, +}; +use crate::core_api_client::CoreApiClient; + +#[async_trait::async_trait] +impl Paginate> for StorageProcessor<'_> { + type OutputObj = Token; + type OutputId = TokenId; + + async fn paginate( + &mut self, + query: &PaginationQuery>, + ) -> Result, Error> { + let mut transaction = self.start_transaction().await.map_err(Error::storage)?; + + let token_id = match query.from.inner { + Either::Left(token_id) => token_id, + Either::Right(_) => transaction + .tokens_schema() + .get_last_token_id() + .await + .map_err(Error::storage)?, + }; + + let query = PaginationQuery { + from: token_id, + limit: query.limit, + direction: query.direction, + }; + + let tokens = transaction + .tokens_schema() + .load_token_page(&query) + .await + .map_err(Error::storage)?; + let count = transaction + .tokens_schema() + .get_count() + .await + .map_err(Error::storage)?; + + transaction.commit().await.map_err(Error::storage)?; + + Ok(Paginated::new( + tokens.values().cloned().collect(), + query.from, + query.limit, + query.direction, + count, + )) + } +} + +#[async_trait::async_trait] +impl Paginate> for StorageProcessor<'_> { + type OutputObj = BlockInfo; + type OutputId = BlockNumber; + + async fn paginate( + &mut self, + query: &PaginationQuery>, + ) -> Result, Error> { + let mut transaction = self.start_transaction().await.map_err(Error::storage)?; + + let last_block = transaction + .chain() + .block_schema() + .get_last_committed_confirmed_block() + .await + .map_err(Error::storage)?; + + let block_number = match query.from.inner { + Either::Left(block_number) => block_number, + Either::Right(_) => last_block, + }; + + let query = PaginationQuery { + from: block_number, + limit: query.limit, + direction: query.direction, + }; + + let blocks = transaction + .chain() + .block_schema() + .load_block_page(&query) + .await + .map_err(Error::storage)?; + let blocks: Vec = blocks.into_iter().map(block_info_from_details).collect(); + + transaction.commit().await.map_err(Error::storage)?; + + Ok(Paginated::new( + blocks, + query.from, + query.limit, + query.direction, + *last_block, + )) + } +} + +#[async_trait::async_trait] +impl Paginate for StorageProcessor<'_> { + type OutputObj = Transaction; + type OutputId = TxHashSerializeWrapper; + + async fn paginate( + &mut self, + query: &PaginationQuery, + ) -> Result, Error> { + let mut transaction = self.start_transaction().await.map_err(Error::storage)?; + + let tx_hash = match query.from.tx_hash.inner { + Either::Left(tx_hash) => tx_hash, + Either::Right(_) => { + if let Some(tx_hash) = transaction + .chain() + .operations_ext_schema() + .get_block_last_tx_hash(query.from.block_number) + .await + .map_err(Error::storage)? + { + tx_hash + } else { + return Ok(Paginated::new( + Vec::new(), + Default::default(), + query.limit, + query.direction, + 0, + )); + } + } + }; + + let query = PaginationQuery { + from: BlockAndTxHash { + block_number: query.from.block_number, + tx_hash: ApiEither::from(tx_hash), + }, + limit: query.limit, + direction: query.direction, + }; + + let txs = transaction + .chain() + .block_schema() + .get_block_transactions_page(&query) + .await + .map_err(Error::storage)? + .ok_or_else(|| Error::from(InvalidDataError::TransactionNotFound))?; + let count = transaction + .chain() + .block_schema() + .get_block_transactions_count(query.from.block_number) + .await + .map_err(Error::storage)?; + + transaction.commit().await.map_err(Error::storage)?; + + Ok(Paginated::new( + txs, + TxHashSerializeWrapper(tx_hash), + query.limit, + query.direction, + count, + )) + } +} + +#[async_trait::async_trait] +impl Paginate for StorageProcessor<'_> { + type OutputObj = Transaction; + type OutputId = TxHashSerializeWrapper; + + async fn paginate( + &mut self, + query: &PaginationQuery, + ) -> Result, Error> { + let mut transaction = self.start_transaction().await.map_err(Error::storage)?; + + let tx_hash = match query.from.tx_hash.inner { + Either::Left(tx_hash) => tx_hash, + Either::Right(_) => { + if let Some(tx_hash) = transaction + .chain() + .operations_ext_schema() + .get_account_last_tx_hash(query.from.address) + .await + .map_err(Error::storage)? + { + tx_hash + } else { + return Ok(Paginated::new( + Vec::new(), + Default::default(), + query.limit, + query.direction, + 0, + )); + } + } + }; + + let query = PaginationQuery { + from: AccountTxsRequest { + address: query.from.address, + tx_hash: ApiEither::from(tx_hash), + }, + limit: query.limit, + direction: query.direction, + }; + + let txs = transaction + .chain() + .operations_ext_schema() + .get_account_transactions(&query) + .await + .map_err(Error::storage)? + .ok_or_else(|| Error::from(InvalidDataError::TransactionNotFound))?; + let count = transaction + .chain() + .operations_ext_schema() + .get_account_transactions_count(query.from.address) + .await + .map_err(Error::storage)?; + + transaction.commit().await.map_err(Error::storage)?; + + Ok(Paginated::new( + txs, + TxHashSerializeWrapper(tx_hash), + query.limit, + query.direction, + count, + )) + } +} + +#[async_trait::async_trait] +impl Paginate for CoreApiClient { + type OutputObj = Transaction; + type OutputId = SerialId; + + async fn paginate( + &mut self, + query: &PaginationQuery, + ) -> Result, Error> { + let result = self + .get_unconfirmed_ops(&query) + .await + .map_err(Error::core_api)?; + Ok(result) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/paginate_trait.rs b/core/bin/zksync_api/src/api_server/rest/v02/paginate_trait.rs new file mode 100644 index 0000000000..dee8649364 --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/paginate_trait.rs @@ -0,0 +1,32 @@ +// Built-in uses + +// External uses +use serde::Serialize; + +// Workspace uses +use zksync_api_types::v02::pagination::{Paginated, PaginationQuery, MAX_LIMIT}; + +// Local uses +use super::error::{Error, InvalidDataError}; + +#[async_trait::async_trait] +pub trait Paginate { + type OutputObj: Serialize; + type OutputId: Serialize; + + async fn paginate( + &mut self, + query: &PaginationQuery, + ) -> Result, Error>; + + async fn paginate_checked( + &mut self, + query: &PaginationQuery, + ) -> Result, Error> { + if query.limit > MAX_LIMIT { + Err(Error::from(InvalidDataError::PaginationLimitTooBig)) + } else { + self.paginate(query).await + } + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/response.rs b/core/bin/zksync_api/src/api_server/rest/v02/response.rs index 20d66cd61c..76fd75a36e 100644 --- a/core/bin/zksync_api/src/api_server/rest/v02/response.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/response.rs @@ -1,54 +1,32 @@ -use actix_web::web::Data; -use actix_web::{Error, HttpRequest, HttpResponse, Responder}; -use chrono::{DateTime, Utc}; -use futures::future::{ready, Ready}; -use qstring::QString; -use serde::Serialize; -use serde_json::Value; +// Built-in uses use std::collections::HashMap; +use std::convert::From; -use zksync_types::network::Network; - -use crate::api_server::rest::v02::error::UnreachableError; -use crate::api_server::rest::v02::{error, ApiVersion, SharedData}; - -#[derive(Serialize)] -#[serde(rename_all = "snake_case")] -enum ResultStatus { - Success, - Error, -} +// External uses +use actix_web::{web::Data, Error as ActixError, HttpRequest, HttpResponse, Responder}; +use chrono::Utc; +use futures::future::{ready, Ready}; +use qstring::QString; +use serde::{Deserialize, Serialize}; -#[derive(Serialize)] -struct Request { - network: Network, - api_version: ApiVersion, - resource: String, - #[serde(skip_serializing_if = "HashMap::is_empty")] - args: HashMap, - timestamp: DateTime, -} +// Workspace uses +use zksync_api_types::v02::{Request, Response, ResultStatus}; -#[derive(Serialize)] -struct Response { - request: Request, - status: ResultStatus, - #[serde(skip_serializing_if = "Option::is_none")] - error: Option, - #[serde(skip_serializing_if = "Option::is_none")] - result: Option, -} +// Local uses +use super::{error::Error, SharedData}; -// TODO: remove #[allow(dead_code)] after adding endpoint that can return an error. (ZKS-572) -#[allow(dead_code)] -pub enum ApiResult { +// This struct is needed to wrap all api responses is `Response` struct by implementing `Responder` trait for it. +// We can't use simple `Result`, because `actix-web` has already `Responder` implementation for it. +// Because of this we can't use '?' operator in implementations of endpoints. +#[derive(Debug, Deserialize, Serialize)] +pub enum ApiResult { Ok(R), - Error(E), + Error(Error), } -impl Responder for ApiResult { - type Error = Error; - type Future = Ready>; +impl Responder for ApiResult { + type Error = ActixError; + type Future = Ready>; fn respond_to(self, req: &HttpRequest) -> Self::Future { let data = req @@ -82,7 +60,7 @@ impl Responder for ApiResult { request, status: ResultStatus::Error, result: None, - error: Some(err.into()), + error: Some(serde_json::to_value(err).unwrap()), }, }; @@ -94,8 +72,29 @@ impl Responder for ApiResult { } } -impl From for ApiResult { - fn from(res: R) -> Self { - Self::Ok(res) +impl From for ApiResult { + fn from(err: Error) -> Self { + Self::Error(err) + } +} + +impl From> for ApiResult { + fn from(result: Result) -> Self { + match result { + Ok(ok) => Self::Ok(ok), + Err(err) => Self::Error(err), + } } } + +#[macro_export] +macro_rules! api_try { + ($e:expr) => { + match $e { + Ok(res) => res, + Err(err) => { + return ApiResult::from(err); + } + }; + }; +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/status.rs b/core/bin/zksync_api/src/api_server/rest/v02/status.rs new file mode 100644 index 0000000000..0556b25eef --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/status.rs @@ -0,0 +1,138 @@ +//! Status part of API implementation. + +// Built-in uses + +// External uses +use actix_web::{web, Scope}; + +// Workspace uses +use zksync_api_types::v02::status::NetworkStatus; +use zksync_storage::ConnectionPool; + +// Local uses +use super::{error::Error, response::ApiResult}; +use crate::api_try; + +/// Shared data between `api/v0.2/networkStatus` endpoints. +#[derive(Debug, Clone)] +pub struct ApiStatusData { + pool: ConnectionPool, +} + +impl ApiStatusData { + pub fn new(pool: ConnectionPool) -> Self { + Self { pool } + } +} + +// Server implementation + +async fn get_status(data: web::Data) -> ApiResult { + let mut storage = api_try!(data.pool.access_storage().await.map_err(Error::storage)); + let mut transaction = api_try!(storage.start_transaction().await.map_err(Error::storage)); + + let last_committed = api_try!(transaction + .chain() + .block_schema() + .get_last_committed_confirmed_block() + .await + .map_err(Error::storage)); + let finalized = api_try!(transaction + .chain() + .block_schema() + .get_last_verified_confirmed_block() + .await + .map_err(Error::storage)); + let total_transactions = api_try!(transaction + .chain() + .stats_schema() + .count_total_transactions() + .await + .map_err(Error::storage)); + let mempool_size = api_try!(transaction + .chain() + .mempool_schema() + .get_mempool_size() + .await + .map_err(Error::storage)); + api_try!(transaction.commit().await.map_err(Error::storage)); + + Ok(NetworkStatus { + last_committed, + finalized, + total_transactions, + mempool_size, + }) + .into() +} + +pub fn api_scope(pool: ConnectionPool) -> Scope { + let data = ApiStatusData::new(pool); + + web::scope("networkStatus") + .data(data) + .route("", web::get().to(get_status)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{deserialize_response_result, TestServerConfig}, + SharedData, + }; + use zksync_api_types::v02::ApiVersion; + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn status_scope() -> anyhow::Result<()> { + let cfg = TestServerConfig::default(); + cfg.fill_database().await?; + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + |cfg: &TestServerConfig| api_scope(cfg.pool.clone()), + Some(shared_data), + ); + + let expected_status = { + let mut storage = cfg.pool.access_storage().await?; + let last_committed = storage + .chain() + .block_schema() + .get_last_committed_block() + .await?; + let finalized = storage + .chain() + .block_schema() + .get_last_verified_confirmed_block() + .await?; + let total_transactions = storage + .chain() + .stats_schema() + .count_total_transactions() + .await?; + let mempool_size = storage.chain().mempool_schema().get_mempool_size().await?; + NetworkStatus { + last_committed, + finalized, + total_transactions, + mempool_size, + } + }; + + let response = client.status().await?; + let status: NetworkStatus = deserialize_response_result(response)?; + + assert_eq!(expected_status, status); + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/test_utils.rs b/core/bin/zksync_api/src/api_server/rest/v02/test_utils.rs similarity index 80% rename from core/bin/zksync_api/src/api_server/rest/v1/test_utils.rs rename to core/bin/zksync_api/src/api_server/rest/v02/test_utils.rs index 0002c27c7c..a3c92d5d43 100644 --- a/core/bin/zksync_api/src/api_server/rest/v1/test_utils.rs +++ b/core/bin/zksync_api/src/api_server/rest/v02/test_utils.rs @@ -1,14 +1,22 @@ //! API testing helpers. // Built-in uses +use std::collections::HashMap; +use std::str::FromStr; // External uses use actix_web::{web, App, Scope}; +use bigdecimal::BigDecimal; use chrono::Utc; +use futures::{channel::mpsc, StreamExt}; +use num::{rational::Ratio, BigUint}; use once_cell::sync::Lazy; +use serde::de::DeserializeOwned; use tokio::sync::Mutex; // Workspace uses +use zksync_api_client::rest::client::Client; +use zksync_api_types::v02::Response; use zksync_config::ZkSyncConfig; use zksync_crypto::rand::{SeedableRng, XorShiftRng}; use zksync_storage::{ @@ -29,13 +37,17 @@ use zksync_types::{ operations::{ChangePubKeyOp, TransferToNewOp}, prover::ProverJobType, tx::ChangePubKeyType, - AccountId, AccountMap, Address, BlockNumber, Deposit, DepositOp, ExecutedOperations, - ExecutedPriorityOp, ExecutedTx, FullExit, FullExitOp, MintNFTOp, Nonce, PriorityOp, Token, - TokenId, Transfer, TransferOp, ZkSyncOp, ZkSyncTx, H256, + AccountId, AccountMap, Address, BatchFee, BlockNumber, Deposit, DepositOp, ExecutedOperations, + ExecutedPriorityOp, ExecutedTx, Fee, FullExit, FullExitOp, MintNFTOp, Nonce, OutputFeeType, + PriorityOp, Token, TokenId, TokenLike, Transfer, TransferOp, ZkSyncOp, ZkSyncTx, H256, }; // Local uses use super::Client; +use crate::{ + fee_ticker::{ResponseBatchFee, ResponseFee, TickerRequest}, + signature_checker::{VerifiedTx, VerifySignatureRequest}, +}; use std::str::FromStr; use zksync_storage::test_data::generate_nft; @@ -72,17 +84,26 @@ pub struct TestTransactions { } impl TestServerConfig { - pub fn start_server_with_scope( + pub fn start_server_with_scope( &self, scope: String, scope_factory: F, + shared_data: Option, ) -> (Client, actix_web::test::TestServer) where F: Fn(&TestServerConfig) -> Scope + Clone + Send + 'static, + D: Clone + Send + 'static, { let this = self.clone(); let server = actix_web::test::start(move || { - App::new().service(web::scope(scope.as_ref()).service(scope_factory(&this))) + let app = App::new(); + let shared_data = shared_data.clone(); + let app = if let Some(shared_data) = shared_data { + app.data(shared_data) + } else { + app + }; + app.service(web::scope(scope.as_ref()).service(scope_factory(&this))) }); let url = server.url("").trim_end_matches('/').to_owned(); @@ -91,11 +112,16 @@ impl TestServerConfig { (client, server) } - pub fn start_server(&self, scope_factory: F) -> (Client, actix_web::test::TestServer) + pub fn start_server( + &self, + scope_factory: F, + shared_data: Option, + ) -> (Client, actix_web::test::TestServer) where F: Fn(&TestServerConfig) -> Scope + Clone + Send + 'static, + D: Clone + Send + 'static, { - self.start_server_with_scope(String::from("/api/v1"), scope_factory) + self.start_server_with_scope(String::from("/api/v0.2"), scope_factory, shared_data) } /// Creates several transactions and the corresponding executed operations. @@ -586,6 +612,8 @@ impl TestServerConfig { .to_vec(), eth_block: 10, created_at: chrono::Utc::now(), + eth_block_index: Some(1), + tx_hash: Default::default(), }, // Committed priority operation. NewExecutedPriorityOperation { @@ -605,6 +633,8 @@ impl TestServerConfig { .to_vec(), eth_block: 14, created_at: chrono::Utc::now(), + eth_block_index: Some(1), + tx_hash: Default::default(), }, ]; @@ -663,6 +693,7 @@ pub fn dummy_deposit_op( deadline_block: 0, eth_hash: H256::default(), eth_block: 10, + eth_block_index: Some(1), }, op: deposit_op, block_index, @@ -698,9 +729,114 @@ pub fn dummy_full_exit_op( deadline_block: 0, eth_hash: H256::default(), eth_block: 10, + eth_block_index: Some(1), }, op: deposit_op, block_index, created_at: Utc::now(), } } + +pub fn deserialize_response_result(response: Response) -> anyhow::Result { + match response.result { + Some(result) => { + let result = serde_json::from_value(result)?; + Ok(result) + } + None => { + if response.error.is_some() { + anyhow::bail!("Response returned error: {:?}", response); + } else { + let result = serde_json::from_str("null")?; + Ok(result) + } + } + } +} + +pub fn dummy_sign_verifier() -> mpsc::Sender { + let (sender, mut receiver) = mpsc::channel::(10); + + actix_rt::spawn(async move { + while let Some(item) = receiver.next().await { + let verified = VerifiedTx::unverified(item.data.get_tx_variant()); + item.response + .send(Ok(verified)) + .expect("Unable to send response"); + } + }); + + sender +} + +pub fn dummy_fee_ticker(prices: &[(TokenLike, BigDecimal)]) -> mpsc::Sender { + let (sender, mut receiver) = mpsc::channel(10); + + let prices: HashMap<_, _> = prices.iter().cloned().collect(); + actix_rt::spawn(async move { + while let Some(item) = receiver.next().await { + match item { + TickerRequest::GetTxFee { response, .. } => { + let normal_fee = Fee::new( + OutputFeeType::Withdraw, + BigUint::from(1_u64).into(), + BigUint::from(1_u64).into(), + 1_u64.into(), + 1_u64.into(), + ); + + let subsidy_fee = normal_fee.clone(); + + let res = Ok(ResponseFee { + normal_fee, + subsidy_fee, + subsidy_size_usd: Ratio::from_integer(0u32.into()), + }); + + response.send(res).expect("Unable to send response"); + } + TickerRequest::GetTokenPrice { + token, response, .. + } => { + let msg = if let Some(price) = prices.get(&token) { + Ok(price.clone()) + } else { + Ok(BigDecimal::from(0u64)) + }; + + response.send(msg).expect("Unable to send response"); + } + TickerRequest::IsTokenAllowed { token, response } => { + // For test purposes, PHNX token is not allowed. + let is_phnx = match token { + TokenLike::Id(id) => *id == 1, + TokenLike::Symbol(sym) => sym == "PHNX", + TokenLike::Address(_) => unreachable!(), + }; + response.send(Ok(!is_phnx)).unwrap_or_default(); + } + TickerRequest::GetBatchTxFee { + response, + transactions, + .. + } => { + let normal_fee = BatchFee::new( + BigUint::from(transactions.len()).into(), + BigUint::from(transactions.len()).into(), + ); + let subsidy_fee = normal_fee.clone(); + + let res = Ok(ResponseBatchFee { + normal_fee, + subsidy_fee, + subsidy_size_usd: Ratio::from_integer(0u32.into()), + }); + + response.send(res).expect("Unable to send response"); + } + } + } + }); + + sender +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/token.rs b/core/bin/zksync_api/src/api_server/rest/v02/token.rs new file mode 100644 index 0000000000..89844173c7 --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/token.rs @@ -0,0 +1,413 @@ +//! Tokens part of API implementation. + +// Built-in uses +use std::str::FromStr; + +// External uses +use actix_web::{ + web::{self}, + Scope, +}; +use bigdecimal::{BigDecimal, Zero}; +use futures::{ + channel::{mpsc, oneshot}, + prelude::*, +}; +use num::{rational::Ratio, BigUint, FromPrimitive}; + +// Workspace uses +use zksync_api_types::v02::{ + pagination::{parse_query, ApiEither, Paginated, PaginationQuery}, + token::{ApiToken, TokenPrice}, +}; +use zksync_config::ZkSyncConfig; +use zksync_storage::{ConnectionPool, StorageProcessor}; +use zksync_types::{Token, TokenId, TokenLike}; + +// Local uses +use super::{ + error::{Error, InvalidDataError}, + paginate_trait::Paginate, + response::ApiResult, +}; +use crate::{ + api_try, + fee_ticker::{PriceError, TickerRequest, TokenPriceRequestType}, + utils::token_db_cache::TokenDBCache, +}; + +/// Shared data between `api/v0.2/tokens` endpoints. +#[derive(Clone)] +struct ApiTokenData { + min_market_volume: Ratio, + fee_ticker: mpsc::Sender, + tokens: TokenDBCache, + pool: ConnectionPool, +} + +impl ApiTokenData { + fn new( + config: &ZkSyncConfig, + pool: ConnectionPool, + tokens: TokenDBCache, + fee_ticker: mpsc::Sender, + ) -> Self { + Self { + min_market_volume: Ratio::from( + BigUint::from_f64(config.ticker.liquidity_volume) + .expect("TickerConfig::liquidity_volume must be positive"), + ), + pool, + tokens, + fee_ticker, + } + } + + async fn is_token_enabled_for_fees( + &self, + storage: &mut StorageProcessor<'_>, + token_id: TokenId, + ) -> Result { + let result = storage + .tokens_schema() + .filter_tokens_by_market_volume(vec![token_id], &self.min_market_volume) + .await + .map_err(Error::storage)?; + Ok(!result.is_empty()) + } + + async fn token_page( + &self, + query: PaginationQuery>, + ) -> Result, Error> { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let paginated_tokens: Result, Error> = + storage.paginate_checked(&query).await; + match paginated_tokens { + Ok(paginated_tokens) => { + let tokens_to_check: Vec = + paginated_tokens.list.iter().map(|token| token.id).collect(); + let tokens_enabled_for_fees = storage + .tokens_schema() + .filter_tokens_by_market_volume(tokens_to_check, &self.min_market_volume) + .await + .map_err(Error::storage)?; + let list = paginated_tokens + .list + .into_iter() + .map(|token| { + let eligibility = tokens_enabled_for_fees.contains(&token.id); + ApiToken::from_token_and_eligibility(token, eligibility) + }) + .collect(); + Ok(Paginated::new( + list, + paginated_tokens.pagination.from, + paginated_tokens.pagination.limit, + paginated_tokens.pagination.direction, + paginated_tokens.pagination.count, + )) + } + Err(err) => Err(err), + } + } + + async fn token(&self, token_like: TokenLike) -> Result { + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + + let token = self + .tokens + .get_token(&mut storage, token_like) + .await + .map_err(Error::storage)?; + if let Some(token) = token { + Ok(token) + } else { + Err(Error::from(PriceError::token_not_found( + "Token not found in storage", + ))) + } + } + + async fn api_token(&self, token_like: TokenLike) -> Result { + let token = self.token(token_like).await?; + let mut storage = self.pool.access_storage().await.map_err(Error::storage)?; + let enabled_for_fees = self + .is_token_enabled_for_fees(&mut storage, token.id) + .await?; + Ok(ApiToken::from_token_and_eligibility( + token, + enabled_for_fees, + )) + } + + async fn token_price_usd(&self, token: TokenLike) -> Result { + let (price_sender, price_receiver) = oneshot::channel(); + self.fee_ticker + .clone() + .send(TickerRequest::GetTokenPrice { + token, + response: price_sender, + req_type: TokenPriceRequestType::USDForOneToken, + }) + .await + .map_err(Error::storage)?; + + let price_result = price_receiver.await.map_err(Error::storage)?; + price_result.map_err(Error::from) + } + + // TODO: take `currency` as enum. (ZKS-628) + async fn token_price_in( + &self, + first_token: TokenLike, + currency: &str, + ) -> Result { + if let Ok(second_token_id) = u32::from_str(currency) { + let second_token = TokenLike::from(TokenId(second_token_id)); + let first_usd_price = self.token_price_usd(first_token).await; + let second_usd_price = self.token_price_usd(second_token).await; + match (first_usd_price, second_usd_price) { + (Ok(first_usd_price), Ok(second_usd_price)) => { + if second_usd_price.is_zero() { + Err(Error::from(InvalidDataError::TokenZeroPriceError)) + } else { + Ok(first_usd_price / second_usd_price) + } + } + (Err(err), _) => Err(err), + (_, Err(err)) => Err(err), + } + } else { + match currency { + "usd" => self.token_price_usd(first_token).await, + _ => Err(Error::from(InvalidDataError::InvalidCurrency)), + } + } + } +} + +// Server implementation + +async fn token_pagination( + data: web::Data, + web::Query(query): web::Query>, +) -> ApiResult> { + let query = api_try!(parse_query(query).map_err(Error::from)); + data.token_page(query).await.into() +} + +async fn token_by_id_or_address( + data: web::Data, + web::Path(token_like_string): web::Path, +) -> ApiResult { + let token_like = TokenLike::parse(&token_like_string); + let token_like = match token_like { + TokenLike::Symbol(_) => { + return Error::from(PriceError::token_not_found( + "Could not parse token as id or address", + )) + .into(); + } + _ => token_like, + }; + + data.api_token(token_like).await.into() +} + +// TODO: take `currency` as enum. +// Currently actix path extractor doesn't work with enums: https://github.com/actix/actix-web/issues/318 (ZKS-628) +async fn token_price( + data: web::Data, + web::Path((token_like_string, currency)): web::Path<(String, String)>, +) -> ApiResult { + let first_token = TokenLike::parse(&token_like_string); + let first_token = match first_token { + TokenLike::Symbol(_) => { + return Error::from(PriceError::token_not_found( + "Could not parse token as id or address", + )) + .into(); + } + _ => first_token, + }; + + let price = api_try!(data.token_price_in(first_token.clone(), ¤cy).await); + let token = api_try!(data.token(first_token).await); + + Ok(TokenPrice { + token_id: token.id, + token_symbol: token.symbol, + price_in: currency, + decimals: token.decimals, + price, + }) + .into() +} + +pub fn api_scope( + config: &ZkSyncConfig, + pool: ConnectionPool, + tokens_db: TokenDBCache, + fee_ticker: mpsc::Sender, +) -> Scope { + let data = ApiTokenData::new(config, pool, tokens_db, fee_ticker); + + web::scope("tokens") + .data(data) + .route("", web::get().to(token_pagination)) + .route( + "{token_id_or_address}", + web::get().to(token_by_id_or_address), + ) + .route( + "{token_id_or_address}/priceIn/{currency}", + web::get().to(token_price), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::api_server::rest::v02::{ + test_utils::{deserialize_response_result, dummy_fee_ticker, TestServerConfig}, + SharedData, + }; + use zksync_api_types::v02::{pagination::PaginationDirection, ApiVersion}; + use zksync_types::Address; + + async fn is_token_enabled_for_fees( + storage: &mut StorageProcessor<'_>, + token_id: TokenId, + config: &ZkSyncConfig, + ) -> anyhow::Result { + let market_volume = TokenDBCache::get_token_market_volume(storage, token_id).await?; + let min_market_volume = Ratio::from( + BigUint::from_f64(config.ticker.liquidity_volume) + .expect("TickerConfig::liquidity_volume must be positive"), + ); + Ok(market_volume + .map(|volume| volume.market_volume.ge(&min_market_volume)) + .unwrap_or(false)) + } + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn tokens_scope() -> anyhow::Result<()> { + let cfg = TestServerConfig::default(); + cfg.fill_database().await?; + + let prices = [ + (TokenLike::Id(TokenId(1)), 10_u64.into()), + (TokenLike::Id(TokenId(15)), 10_500_u64.into()), + (Address::default().into(), 1_u64.into()), + ]; + let fee_ticker = dummy_fee_ticker(&prices); + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + move |cfg| { + api_scope( + &cfg.config, + cfg.pool.clone(), + TokenDBCache::new(), + fee_ticker.clone(), + ) + }, + Some(shared_data), + ); + + let token_like = TokenLike::Id(TokenId(1)); + let response = client.token_by_id(&token_like).await?; + let api_token: ApiToken = deserialize_response_result(response)?; + + let expected_token = { + let mut storage = cfg.pool.access_storage().await?; + storage + .tokens_schema() + .get_token(token_like) + .await? + .unwrap() + }; + let expected_enabled_for_fees = { + let mut storage = cfg.pool.access_storage().await?; + is_token_enabled_for_fees(&mut storage, TokenId(1), &cfg.config).await? + }; + let expected_api_token = + ApiToken::from_token_and_eligibility(expected_token, expected_enabled_for_fees); + assert_eq!(api_token, expected_api_token); + + let query = PaginationQuery { + from: ApiEither::from(TokenId(15)), + limit: 2, + direction: PaginationDirection::Older, + }; + let response = client.token_pagination(&query).await?; + let pagination: Paginated = deserialize_response_result(response)?; + + let expected_pagination = { + let mut storage = cfg.pool.access_storage().await?; + let paginated_tokens: Paginated = storage + .paginate_checked(&query) + .await + .map_err(|err| anyhow::anyhow!(err.message))?; + let mut list = Vec::new(); + for token in paginated_tokens.list { + let enabled_for_fees = + is_token_enabled_for_fees(&mut storage, token.id, &cfg.config).await?; + list.push(ApiToken::from_token_and_eligibility( + token, + enabled_for_fees, + )); + } + Paginated::new( + list, + paginated_tokens.pagination.from, + paginated_tokens.pagination.limit, + paginated_tokens.pagination.direction, + paginated_tokens.pagination.count, + ) + }; + assert_eq!(pagination, expected_pagination); + + let token_like = TokenLike::Id(TokenId(1)); + let token = { + let mut storage = cfg.pool.access_storage().await?; + storage + .tokens_schema() + .get_token(token_like.clone()) + .await? + .unwrap() + }; + let mut expected_token_price = TokenPrice { + token_id: token.id, + token_symbol: token.symbol, + price_in: String::from("15"), + decimals: token.decimals, + price: BigDecimal::from_u32(10).unwrap() / BigDecimal::from_u32(10500).unwrap(), + }; + + let response = client.token_price(&token_like, "15").await?; + let price_in_token: TokenPrice = deserialize_response_result(response)?; + assert_eq!(price_in_token, expected_token_price); + + expected_token_price.price_in = String::from("usd"); + expected_token_price.price = BigDecimal::from_u32(10).unwrap(); + + let response = client.token_price(&token_like, "usd").await?; + let price_in_usd: TokenPrice = deserialize_response_result(response)?; + assert_eq!(price_in_usd, expected_token_price); + + let response = client.token_price(&token_like, "333").await?; + assert!(response.error.is_some()); + + server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs b/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs new file mode 100644 index 0000000000..5edbe5585d --- /dev/null +++ b/core/bin/zksync_api/src/api_server/rest/v02/transaction.rs @@ -0,0 +1,419 @@ +//! Transactions part of API implementation. + +// Built-in uses + +// External uses +use actix_web::{ + web::{self, Json}, + Scope, +}; + +// Workspace uses +use zksync_api_types::{ + v02::transaction::{ + ApiTxBatch, IncomingTx, IncomingTxBatch, L1Receipt, L1Transaction, Receipt, + SubmitBatchResponse, Transaction, TransactionData, TxData, TxHashSerializeWrapper, + TxInBlockStatus, + }, + PriorityOpLookupQuery, +}; +use zksync_types::{tx::TxHash, EthBlockId}; + +// Local uses +use super::{error::Error, response::ApiResult}; +use crate::api_server::{rpc_server::types::TxWithSignature, tx_sender::TxSender}; +use zksync_types::tx::TxEthSignatureVariant; + +/// Shared data between `api/v0.2/transactions` endpoints. +#[derive(Clone)] +struct ApiTransactionData { + tx_sender: TxSender, +} + +impl ApiTransactionData { + fn new(tx_sender: TxSender) -> Self { + Self { tx_sender } + } + + async fn tx_status(&self, tx_hash: TxHash) -> Result, Error> { + let mut storage = self + .tx_sender + .pool + .access_storage() + .await + .map_err(Error::storage)?; + if let Some(receipt) = storage + .chain() + .operations_ext_schema() + .tx_receipt_api_v02(tx_hash.as_ref()) + .await + .map_err(Error::storage)? + { + Ok(Some(receipt)) + } else if let Some(op) = self + .tx_sender + .core_api_client + .get_unconfirmed_op(PriorityOpLookupQuery::ByAnyHash(tx_hash)) + .await + .map_err(Error::core_api)? + { + Ok(Some(Receipt::L1(L1Receipt { + status: TxInBlockStatus::Queued, + eth_block: EthBlockId(op.eth_block), + rollup_block: None, + id: op.serial_id, + }))) + } else { + Ok(None) + } + } + + async fn tx_data(&self, tx_hash: TxHash) -> Result, Error> { + let mut storage = self + .tx_sender + .pool + .access_storage() + .await + .map_err(Error::storage)?; + if let Some(data) = storage + .chain() + .operations_ext_schema() + .tx_data_api_v02(tx_hash.as_ref()) + .await + .map_err(Error::storage)? + { + Ok(Some(data)) + } else if let Some(op) = self + .tx_sender + .core_api_client + .get_unconfirmed_op(PriorityOpLookupQuery::ByAnyHash(tx_hash)) + .await + .map_err(Error::core_api)? + { + let tx_hash = op.tx_hash(); + let tx = Transaction { + tx_hash, + block_number: None, + op: TransactionData::L1(L1Transaction::from_pending_op( + op.data, + op.eth_hash, + op.serial_id, + tx_hash, + )), + status: TxInBlockStatus::Queued, + fail_reason: None, + created_at: None, + }; + + Ok(Some(TxData { + tx, + eth_signature: None, + })) + } else { + Ok(None) + } + } + + async fn get_batch(&self, batch_hash: TxHash) -> Result, Error> { + let mut storage = self + .tx_sender + .pool + .access_storage() + .await + .map_err(Error::storage)?; + storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await + .map_err(Error::storage) + } +} + +// Server implementation + +async fn tx_status( + data: web::Data, + web::Path(tx_hash): web::Path, +) -> ApiResult> { + data.tx_status(tx_hash).await.into() +} + +async fn tx_data( + data: web::Data, + web::Path(tx_hash): web::Path, +) -> ApiResult> { + data.tx_data(tx_hash).await.into() +} + +async fn submit_tx( + data: web::Data, + Json(body): Json, +) -> ApiResult { + let tx_hash = data + .tx_sender + .submit_tx(body.tx, body.signature) + .await + .map_err(Error::from); + + tx_hash.map(TxHashSerializeWrapper).into() +} + +async fn submit_batch( + data: web::Data, + Json(body): Json, +) -> ApiResult { + let txs = body + .txs + .into_iter() + .map(|tx| TxWithSignature { + tx, + signature: TxEthSignatureVariant::Single(None), + // #TODO verify + }) + .collect(); + + let signatures = body.signature; + let response = data + .tx_sender + .submit_txs_batch(txs, Some(signatures)) + .await + .map_err(Error::from); + + response.into() +} + +async fn get_batch( + data: web::Data, + web::Path(batch_hash): web::Path, +) -> ApiResult> { + data.get_batch(batch_hash).await.into() +} + +pub fn api_scope(tx_sender: TxSender) -> Scope { + let data = ApiTransactionData::new(tx_sender); + + web::scope("transactions") + .data(data) + .route("", web::post().to(submit_tx)) + .route("{tx_hash}", web::get().to(tx_status)) + .route("{tx_hash}/data", web::get().to(tx_data)) + .route("/batches", web::post().to(submit_batch)) + .route("/batches/{batch_hash}", web::get().to(get_batch)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + api_server::rest::v02::{ + test_utils::{ + deserialize_response_result, dummy_fee_ticker, dummy_sign_verifier, + TestServerConfig, TestTransactions, + }, + SharedData, + }, + core_api_client::CoreApiClient, + }; + use actix_web::App; + use std::str::FromStr; + use zksync_api_types::v02::{ + transaction::{L2Receipt, TxHashSerializeWrapper}, + ApiVersion, + }; + use zksync_types::{ + tokens::Token, + tx::{EthBatchSignData, EthBatchSignatures, PackedEthSignature, TxEthSignature}, + BlockNumber, SignedZkSyncTx, TokenId, + }; + + fn submit_txs_loopback() -> (CoreApiClient, actix_web::test::TestServer) { + async fn send_tx(_tx: Json) -> Json> { + Json(Ok(())) + } + + async fn send_txs_batch( + _txs: Json<(Vec, Vec)>, + ) -> Json> { + Json(Ok(())) + } + + async fn get_unconfirmed_op(_query: Json) -> Json> { + Json(None) + } + + let server = actix_web::test::start(move || { + App::new() + .route("new_tx", web::post().to(send_tx)) + .route("new_txs_batch", web::post().to(send_txs_batch)) + .route("unconfirmed_op", web::post().to(get_unconfirmed_op)) + }); + + let url = server.url("").trim_end_matches('/').to_owned(); + + (CoreApiClient::new(url), server) + } + + #[actix_rt::test] + #[cfg_attr( + not(feature = "api_test"), + ignore = "Use `zk test rust-api` command to perform this test" + )] + async fn transactions_scope() -> anyhow::Result<()> { + let (core_client, core_server) = submit_txs_loopback(); + + let cfg = TestServerConfig::default(); + cfg.fill_database().await?; + + let shared_data = SharedData { + net: cfg.config.chain.eth.network, + api_version: ApiVersion::V02, + }; + let (client, server) = cfg.start_server( + move |cfg: &TestServerConfig| { + api_scope(TxSender::with_client( + core_client.clone(), + cfg.pool.clone(), + dummy_sign_verifier(), + dummy_fee_ticker(&[]), + &cfg.config, + )) + }, + Some(shared_data), + ); + + let tx = TestServerConfig::gen_zk_txs(100_u64).txs[0].0.clone(); + let response = client.submit_tx(tx.clone(), None).await?; + let tx_hash: TxHash = deserialize_response_result(response)?; + assert_eq!(tx.hash(), tx_hash); + + let TestTransactions { acc, txs } = TestServerConfig::gen_zk_txs(1_00); + let eth = Token::new(TokenId(0), Default::default(), "ETH", 18); + let (good_batch, expected_tx_hashes): (Vec<_>, Vec<_>) = txs + .into_iter() + .map(|(tx, _op)| { + let tx_hash = tx.hash(); + (tx, tx_hash) + }) + .unzip(); + let expected_batch_hash = TxHash::batch_hash(&expected_tx_hashes); + let expected_response = SubmitBatchResponse { + transaction_hashes: expected_tx_hashes + .into_iter() + .map(TxHashSerializeWrapper) + .collect(), + batch_hash: expected_batch_hash, + }; + + let txs = good_batch + .iter() + .zip(std::iter::repeat(eth)) + .map(|(tx, token)| (tx.clone(), token, tx.account())) + .collect::>(); + let batch_signature = { + let eth_private_key = acc + .try_get_eth_private_key() + .expect("Should have ETH private key"); + let batch_message = EthBatchSignData::get_batch_sign_message(txs); + let eth_sig = PackedEthSignature::sign(eth_private_key, &batch_message).unwrap(); + let single_signature = TxEthSignature::EthereumSignature(eth_sig); + + EthBatchSignatures::Single(single_signature) + }; + + let response = client + .submit_batch(good_batch.clone(), batch_signature) + .await?; + let submit_batch_response: SubmitBatchResponse = deserialize_response_result(response)?; + assert_eq!(submit_batch_response, expected_response); + + { + let mut storage = cfg.pool.access_storage().await?; + let txs: Vec<_> = good_batch + .into_iter() + .map(|tx| SignedZkSyncTx { + tx, + eth_sign_data: None, + }) + .collect(); + storage + .chain() + .mempool_schema() + .insert_batch(&txs, Vec::new()) + .await?; + }; + + let response = client.get_batch(submit_batch_response.batch_hash).await?; + let batch: ApiTxBatch = deserialize_response_result(response)?; + assert_eq!(batch.batch_hash, submit_batch_response.batch_hash); + assert_eq!( + batch.transaction_hashes, + submit_batch_response.transaction_hashes + ); + assert_eq!(batch.batch_status.last_state, TxInBlockStatus::Queued); + + let tx_hash = { + let mut storage = cfg.pool.access_storage().await?; + + let transactions = storage + .chain() + .block_schema() + .get_block_transactions(BlockNumber(1)) + .await?; + + TxHash::from_str(&transactions[0].tx_hash).unwrap() + }; + let response = client.tx_status(tx_hash).await?; + let tx_status: Receipt = deserialize_response_result(response)?; + let expected_tx_status = Receipt::L2(L2Receipt { + tx_hash, + rollup_block: Some(BlockNumber(1)), + status: TxInBlockStatus::Finalized, + fail_reason: None, + }); + assert_eq!(tx_status, expected_tx_status); + + let response = client.tx_data(tx_hash).await?; + let tx_data: Option = deserialize_response_result(response)?; + assert_eq!(tx_data.unwrap().tx.tx_hash, tx_hash); + + let pending_tx_hash = { + let mut storage = cfg.pool.access_storage().await?; + + let tx = TestServerConfig::gen_zk_txs(1_u64).txs[0].0.clone(); + let tx_hash = tx.hash(); + storage + .chain() + .mempool_schema() + .insert_tx(&SignedZkSyncTx { + tx, + eth_sign_data: None, + }) + .await?; + + tx_hash + }; + let response = client.tx_status(pending_tx_hash).await?; + let tx_status: Receipt = deserialize_response_result(response)?; + let expected_tx_status = Receipt::L2(L2Receipt { + tx_hash: pending_tx_hash, + rollup_block: None, + status: TxInBlockStatus::Queued, + fail_reason: None, + }); + assert_eq!(tx_status, expected_tx_status); + + let response = client.tx_data(pending_tx_hash).await?; + let tx_data: Option = deserialize_response_result(response)?; + assert_eq!(tx_data.unwrap().tx.tx_hash, pending_tx_hash); + + let tx = TestServerConfig::gen_zk_txs(1_u64).txs[0].0.clone(); + let response = client.tx_data(tx.hash()).await?; + let tx_data: Option = deserialize_response_result(response)?; + assert!(tx_data.is_none()); + + server.stop().await; + core_server.stop().await; + Ok(()) + } +} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/accounts/mod.rs b/core/bin/zksync_api/src/api_server/rest/v1/accounts/mod.rs deleted file mode 100644 index 36f20b9069..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/accounts/mod.rs +++ /dev/null @@ -1,327 +0,0 @@ -//! Accounts part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -// Workspace uses -use zksync_storage::{ConnectionPool, QueryResult, StorageProcessor}; -use zksync_types::{AccountId, Address, BlockNumber, TokenId}; - -// Local uses -use crate::{core_api_client::CoreApiClient, utils::token_db_cache::TokenDBCache}; - -use super::{ApiError, JsonResult}; -use zksync_config::ZkSyncConfig; - -use self::types::{ - convert::{ - depositing_balances_from_pending_ops, op_receipt_from_response, - pending_account_op_receipt_from_priority_op, search_direction_as_storage, - tx_receipt_from_response, validate_receipts_query, - }, - AccountReceiptsQuery, SearchDirection, -}; -// Public uses -pub use self::types::{ - convert::account_state_from_storage, AccountInfo, AccountOpReceipt, AccountQuery, - AccountReceipts, AccountState, AccountTxReceipt, DepositingBalances, DepositingFunds, - PendingAccountOpReceipt, TxLocation, -}; - -#[cfg(test)] -mod tests; -mod types; - -fn unable_to_find_token(token_id: TokenId) -> anyhow::Error { - anyhow::anyhow!("Unable to find token with ID {}", *token_id) -} - -// Additional parser because actix-web doesn't understand enums in path extractor. -fn parse_account_query(query: String) -> Result { - query.parse().map_err(|err| { - ApiError::bad_request("Must be specified either an account ID or an account address.") - .detail(format!("An error occurred: {}", err)) - }) -} - -/// Shared data between `api/v1/accounts` endpoints. -#[derive(Clone)] -struct ApiAccountsData { - pool: ConnectionPool, - tokens: TokenDBCache, - core_api_client: CoreApiClient, - confirmations_for_eth_event: BlockNumber, -} - -impl ApiAccountsData { - fn new( - pool: ConnectionPool, - tokens: TokenDBCache, - core_api_client: CoreApiClient, - confirmations_for_eth_event: BlockNumber, - ) -> Self { - Self { - pool, - tokens, - core_api_client, - confirmations_for_eth_event, - } - } - - async fn access_storage(&self) -> QueryResult> { - self.pool.access_storage().await.map_err(From::from) - } - - async fn find_account_address(&self, query: String) -> Result { - let query = parse_account_query(query)?; - self.account_address(query) - .await - .map_err(ApiError::internal)? - .ok_or_else(|| { - ApiError::bad_request("Unable to find account.") - .detail(format!("Given account {:?} is absent", query)) - }) - } - - async fn account_id( - storage: &mut StorageProcessor<'_>, - query: AccountQuery, - ) -> QueryResult> { - match query { - AccountQuery::Id(id) => Ok(Some(id)), - AccountQuery::Address(address) => { - storage - .chain() - .account_schema() - .account_id_by_address(address) - .await - } - } - } - - async fn account_address(&self, query: AccountQuery) -> QueryResult> { - match query { - AccountQuery::Id(id) => { - let mut storage = self.access_storage().await?; - storage - .chain() - .account_schema() - .account_address_by_id(id) - .await - } - AccountQuery::Address(address) => Ok(Some(address)), - } - } - - async fn account_info(&self, query: AccountQuery) -> QueryResult> { - let mut storage = self.access_storage().await?; - let account_id = if let Some(id) = Self::account_id(&mut storage, query).await? { - id - } else { - return Ok(None); - }; - - let account_state = storage - .chain() - .account_schema() - .account_state_by_id(account_id) - .await?; - - let (account_id, account) = if let Some(state) = account_state.committed { - state - } else { - // This account has not been committed. - return Ok(None); - }; - - let committed = account_state_from_storage(&mut storage, &self.tokens, &account).await?; - let verified = match account_state.verified { - Some((_id, account)) => { - account_state_from_storage(&mut storage, &self.tokens, &account).await? - } - None => AccountState::default(), - }; - - let depositing = { - let ongoing_ops = self - .core_api_client - .get_unconfirmed_deposits(account.address) - .await?; - - depositing_balances_from_pending_ops( - &mut storage, - &self.tokens, - ongoing_ops, - self.confirmations_for_eth_event, - ) - .await? - }; - - let info = AccountInfo { - address: account.address, - id: account_id, - committed, - verified, - depositing, - }; - - Ok(Some(info)) - } - - async fn tx_receipts( - &self, - address: Address, - location: TxLocation, - direction: SearchDirection, - limit: u32, - ) -> QueryResult> { - let mut storage = self.access_storage().await?; - - let items = storage - .chain() - .operations_ext_schema() - .get_account_transactions_receipts( - address, - *location.block as u64, - location.index, - search_direction_as_storage(direction), - limit as u64, - ) - .await?; - - Ok(items.into_iter().map(tx_receipt_from_response).collect()) - } - - async fn op_receipts( - &self, - address: Address, - location: TxLocation, - direction: SearchDirection, - limit: u32, - ) -> QueryResult> { - let mut storage = self.access_storage().await?; - - let items = storage - .chain() - .operations_ext_schema() - .get_account_operations_receipts( - address, - *location.block as u64, - location.index.unwrap_or_default(), - search_direction_as_storage(direction), - limit as u64, - ) - .await?; - - Ok(items.into_iter().map(op_receipt_from_response).collect()) - } - - async fn pending_op_receipts( - &self, - address: Address, - ) -> QueryResult> { - let ongoing_ops = self.core_api_client.get_unconfirmed_ops(address).await?; - - let receipts = ongoing_ops - .into_iter() - .map(pending_account_op_receipt_from_priority_op) - .collect(); - - Ok(receipts) - } -} - -// Server implementation - -async fn account_info( - data: web::Data, - web::Path(query): web::Path, -) -> JsonResult> { - let query = parse_account_query(query)?; - - data.account_info(query) - .await - .map(Json) - .map_err(ApiError::internal) -} - -async fn account_tx_receipts( - data: web::Data, - web::Path(account_query): web::Path, - web::Query(location_query): web::Query, -) -> JsonResult> { - let (location, direction, limit) = validate_receipts_query(location_query)?; - let address = data.find_account_address(account_query).await?; - - let receipts = data - .tx_receipts(address, location, direction, *limit) - .await - .map_err(ApiError::internal)?; - - Ok(Json(receipts)) -} - -async fn account_op_receipts( - data: web::Data, - web::Path(account_query): web::Path, - web::Query(location_query): web::Query, -) -> JsonResult> { - let (location, direction, limit) = validate_receipts_query(location_query)?; - let address = data.find_account_address(account_query).await?; - - let receipts = data - .op_receipts(address, location, direction, *limit) - .await - .map_err(ApiError::internal)?; - - Ok(Json(receipts)) -} - -async fn account_pending_receipts( - data: web::Data, - web::Path(account_query): web::Path, -) -> JsonResult> { - let address = data.find_account_address(account_query).await?; - - let receipts = data - .pending_op_receipts(address) - .await - .map_err(ApiError::internal)?; - - Ok(Json(receipts)) -} - -pub fn api_scope( - pool: ConnectionPool, - config: &ZkSyncConfig, - tokens: TokenDBCache, - core_api_client: CoreApiClient, -) -> Scope { - let data = ApiAccountsData::new( - pool, - tokens, - core_api_client, - BlockNumber(config.eth_watch.confirmations_for_eth_event as u32), - ); - - web::scope("accounts") - .data(data) - .route("{id}", web::get().to(account_info)) - .route( - "{id}/transactions/receipts", - web::get().to(account_tx_receipts), - ) - .route( - "{id}/operations/receipts", - web::get().to(account_op_receipts), - ) - .route( - "{id}/operations/pending", - web::get().to(account_pending_receipts), - ) -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/accounts/tests.rs b/core/bin/zksync_api/src/api_server/rest/v1/accounts/tests.rs index f020633636..e69de29bb2 100644 --- a/core/bin/zksync_api/src/api_server/rest/v1/accounts/tests.rs +++ b/core/bin/zksync_api/src/api_server/rest/v1/accounts/tests.rs @@ -1,517 +0,0 @@ -// Built-in uses -use std::sync::Arc; - -// External uses -use actix_web::{ - web::{self, Json}, - App, -}; -use serde_json::json; -use tokio::sync::Mutex; - -// Workspace uses -use zksync_storage::{ - chain::operations_ext::records::{AccountOpReceiptResponse, AccountTxReceiptResponse}, - ConnectionPool, StorageProcessor, -}; -use zksync_types::{tx::TxHash, AccountId, Address, BlockNumber, ExecutedOperations, H256}; - -// Local uses -use crate::{ - api_server::v1::{ - test_utils::{dummy_deposit_op, TestServerConfig}, - transactions::Receipt, - Client, - }, - core_api_client::CoreApiClient, - utils::token_db_cache::TokenDBCache, -}; - -use super::{ - api_scope, - types::{ - convert::{op_receipt_from_response, tx_receipt_from_response}, - AccountOpReceipt, AccountReceipts, AccountTxReceipt, - }, -}; - -type PendingOpsHandle = Arc>; - -fn create_pending_ops_handle() -> PendingOpsHandle { - Arc::new(Mutex::new(json!([]))) -} - -fn get_unconfirmed_ops_loopback( - ops_handle: PendingOpsHandle, - deposits_handle: PendingOpsHandle, -) -> (CoreApiClient, actix_web::test::TestServer) { - async fn get_ops( - data: web::Data, - _path: web::Path, - ) -> Json { - Json(data.lock().await.clone()) - } - - let server = actix_web::test::start(move || { - let ops_handle = ops_handle.clone(); - let deposits_handle = deposits_handle.clone(); - App::new() - .service( - web::scope("unconfirmed_ops") - .data(ops_handle) - .route("{address}", web::get().to(get_ops)), - ) - .service( - web::scope("unconfirmed_deposits") - .data(deposits_handle) - .route("{address}", web::get().to(get_ops)), - ) - }); - - let url = server.url("").trim_end_matches('/').to_owned(); - (CoreApiClient::new(url), server) -} - -struct TestServer { - core_server: actix_web::test::TestServer, - api_server: actix_web::test::TestServer, - pool: ConnectionPool, - pending_ops: PendingOpsHandle, - pending_deposits: PendingOpsHandle, -} - -impl TestServer { - async fn new() -> anyhow::Result<(Client, Self)> { - let cfg = TestServerConfig::default(); - cfg.fill_database().await?; - - let pending_ops = create_pending_ops_handle(); - let pending_deposits = create_pending_ops_handle(); - let (core_client, core_server) = - get_unconfirmed_ops_loopback(pending_ops.clone(), pending_deposits.clone()); - - let pool = cfg.pool.clone(); - - let (api_client, api_server) = cfg.start_server(move |cfg| { - api_scope( - cfg.pool.clone(), - &cfg.config, - TokenDBCache::new(), - core_client.clone(), - ) - }); - - Ok(( - api_client, - Self { - core_server, - api_server, - pool, - pending_ops, - pending_deposits, - }, - )) - } - - async fn account_id( - storage: &mut StorageProcessor<'_>, - block: BlockNumber, - ) -> anyhow::Result { - let transactions = storage - .chain() - .block_schema() - .get_block_transactions(block) - .await?; - - let tx = &transactions[1]; - let op = tx.op.as_object().unwrap(); - - let id = if op.contains_key("accountId") { - serde_json::from_value(op["accountId"].clone()).unwrap() - } else { - serde_json::from_value(op["creatorId"].clone()).unwrap() - }; - Ok(id) - } - - async fn stop(self) { - self.api_server.stop().await; - self.core_server.stop().await; - } -} - -#[actix_rt::test] -#[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" -)] -async fn unconfirmed_deposits_loopback() -> anyhow::Result<()> { - let (client, server) = - get_unconfirmed_ops_loopback(create_pending_ops_handle(), create_pending_ops_handle()); - - client.get_unconfirmed_deposits(Address::default()).await?; - client.get_unconfirmed_ops(Address::default()).await?; - - server.stop().await; - Ok(()) -} - -#[actix_rt::test] -#[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" -)] -async fn accounts_scope() -> anyhow::Result<()> { - let (client, server) = TestServer::new().await?; - - // Get account information. - let account_id = - TestServer::account_id(&mut server.pool.access_storage().await?, BlockNumber(1)).await?; - - let account_info = client.account_info(account_id).await?.unwrap(); - let address = account_info.address; - assert_eq!(client.account_info(address).await?, Some(account_info)); - - // Provide unconfirmed pending deposits. - *server.pending_deposits.lock().await = json!([ - { - "serial_id": 1, - "data": { - "type": "Deposit", - "account_id": account_id, - "amount": "100500", - "from": Address::default(), - "to": address, - "token": 0, - }, - "deadline_block": 10, - "eth_hash": vec![0u8; 32], - "eth_block": 5, - }, - ]); - - // Check account information about unconfirmed deposits. - let account_info = client.account_info(account_id).await?.unwrap(); - - let depositing_balances = &account_info.depositing.balances["ETH"]; - assert_eq!(*depositing_balances.expected_accept_block, 5); - assert_eq!(depositing_balances.amount.0, 100_500_u64.into()); - - // Get account transaction receipts. - let receipts = client - .account_tx_receipts( - address, - AccountReceipts::newer_than(BlockNumber(0), None), - 10, - ) - .await?; - - assert_eq!(receipts[0].index, None); - assert_eq!( - receipts[0].receipt, - Receipt::Rejected { - reason: Some("Unknown token".to_string()) - } - ); - assert_eq!(receipts[2].index, Some(3)); - assert_eq!( - receipts[2].receipt, - Receipt::Verified { - block: BlockNumber(1) - } - ); - - // Get a reversed list of receipts with requests from the end. - let receipts: Vec<_> = receipts.into_iter().rev().collect(); - assert_eq!( - client - .account_tx_receipts(address, AccountReceipts::Latest, 10) - .await?, - receipts - ); - assert_eq!( - client - .account_tx_receipts( - address, - AccountReceipts::older_than(BlockNumber(10), Some(0)), - 10 - ) - .await?, - receipts - ); - - // Save priority operation in block. - let deposit_op = dummy_deposit_op(address, account_id, 10234, 1); - server - .pool - .access_storage() - .await? - .chain() - .block_schema() - .save_block_transactions( - BlockNumber(1), - vec![ExecutedOperations::PriorityOp(Box::new(deposit_op))], - ) - .await?; - - // Get account operation receipts. - let receipts = client - .account_op_receipts( - address, - AccountReceipts::newer_than(BlockNumber(1), Some(0)), - 10, - ) - .await?; - - assert_eq!( - receipts[0], - AccountOpReceipt { - hash: H256::default(), - index: 1, - receipt: Receipt::Verified { - block: BlockNumber(1) - } - } - ); - assert_eq!( - client - .account_op_receipts( - address, - AccountReceipts::newer_than(BlockNumber(1), Some(0)), - 10 - ) - .await?, - receipts - ); - assert_eq!( - client - .account_op_receipts( - address, - AccountReceipts::older_than(BlockNumber(2), Some(0)), - 10 - ) - .await?, - receipts - ); - assert_eq!( - client - .account_op_receipts( - account_id, - AccountReceipts::newer_than(BlockNumber(1), Some(0)), - 10 - ) - .await?, - receipts - ); - assert_eq!( - client - .account_op_receipts( - account_id, - AccountReceipts::older_than(BlockNumber(2), Some(0)), - 10 - ) - .await?, - receipts - ); - - // Get account pending receipts. - *server.pending_ops.lock().await = json!([ - { - "serial_id": 1, - "data": { - "type": "Deposit", - "account_id": account_id, - "amount": "100500", - "from": Address::default(), - "to": address, - "token": 0, - }, - "deadline_block": 10, - "eth_hash": vec![0u8; 32], - "eth_block": 5, - }, - { - "serial_id": 2, - "data": { - "type": "FullExit", - "account_id": account_id, - "eth_address": Address::default(), - "token": 0 - }, - "deadline_block": 0, - "eth_hash": vec![1u8; 32], - "eth_block": 5 - } - ]); - let pending_receipts = client.account_pending_ops(account_id).await?; - - assert_eq!(pending_receipts[0].eth_block, 5); - assert_eq!(pending_receipts[0].hash, [0u8; 32].into()); - assert_eq!(pending_receipts[1].eth_block, 5); - assert_eq!(pending_receipts[1].hash, [1u8; 32].into()); - - server.stop().await; - Ok(()) -} - -#[test] -fn account_tx_response_to_receipt() { - fn empty_hash() -> Vec { - TxHash::default().as_ref().to_vec() - } - - let cases = vec![ - ( - AccountTxReceiptResponse { - block_index: Some(1), - block_number: 1, - success: true, - fail_reason: None, - commit_tx_hash: None, - verify_tx_hash: None, - tx_hash: empty_hash(), - }, - AccountTxReceipt { - index: Some(1), - hash: TxHash::default(), - receipt: Receipt::Executed, - }, - ), - ( - AccountTxReceiptResponse { - block_index: None, - block_number: 1, - success: true, - fail_reason: None, - commit_tx_hash: None, - verify_tx_hash: None, - tx_hash: empty_hash(), - }, - AccountTxReceipt { - index: None, - hash: TxHash::default(), - receipt: Receipt::Executed, - }, - ), - ( - AccountTxReceiptResponse { - block_index: Some(1), - block_number: 1, - success: false, - fail_reason: Some("Oops".to_string()), - commit_tx_hash: None, - verify_tx_hash: None, - tx_hash: empty_hash(), - }, - AccountTxReceipt { - index: Some(1), - hash: TxHash::default(), - receipt: Receipt::Rejected { - reason: Some("Oops".to_string()), - }, - }, - ), - ( - AccountTxReceiptResponse { - block_index: Some(1), - block_number: 1, - success: true, - fail_reason: None, - commit_tx_hash: Some(empty_hash()), - verify_tx_hash: None, - tx_hash: empty_hash(), - }, - AccountTxReceipt { - index: Some(1), - hash: TxHash::default(), - receipt: Receipt::Committed { - block: BlockNumber(1), - }, - }, - ), - ( - AccountTxReceiptResponse { - block_index: Some(1), - block_number: 1, - success: true, - fail_reason: None, - commit_tx_hash: Some(empty_hash()), - verify_tx_hash: Some(empty_hash()), - tx_hash: empty_hash(), - }, - AccountTxReceipt { - index: Some(1), - hash: TxHash::default(), - receipt: Receipt::Verified { - block: BlockNumber(1), - }, - }, - ), - ]; - - for (resp, expected_receipt) in cases { - let actual_receipt = tx_receipt_from_response(resp); - assert_eq!(actual_receipt, expected_receipt); - } -} - -#[test] -fn account_op_response_to_receipt() { - fn empty_hash() -> Vec { - H256::default().as_bytes().to_vec() - } - - let cases = vec![ - ( - AccountOpReceiptResponse { - block_index: 1, - block_number: 1, - commit_tx_hash: None, - verify_tx_hash: None, - eth_hash: empty_hash(), - }, - AccountOpReceipt { - index: 1, - hash: H256::default(), - receipt: Receipt::Executed, - }, - ), - ( - AccountOpReceiptResponse { - block_index: 1, - block_number: 1, - commit_tx_hash: Some(empty_hash()), - verify_tx_hash: None, - eth_hash: empty_hash(), - }, - AccountOpReceipt { - index: 1, - hash: H256::default(), - receipt: Receipt::Committed { - block: BlockNumber(1), - }, - }, - ), - ( - AccountOpReceiptResponse { - block_index: 1, - block_number: 1, - commit_tx_hash: Some(empty_hash()), - verify_tx_hash: Some(empty_hash()), - eth_hash: empty_hash(), - }, - AccountOpReceipt { - index: 1, - hash: H256::default(), - receipt: Receipt::Verified { - block: BlockNumber(1), - }, - }, - ), - ]; - - for (resp, expected_receipt) in cases { - let actual_receipt = op_receipt_from_response(resp); - assert_eq!(actual_receipt, expected_receipt); - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/accounts/types.rs b/core/bin/zksync_api/src/api_server/rest/v1/accounts/types.rs index aa31f665bd..e69de29bb2 100644 --- a/core/bin/zksync_api/src/api_server/rest/v1/accounts/types.rs +++ b/core/bin/zksync_api/src/api_server/rest/v1/accounts/types.rs @@ -1,229 +0,0 @@ -//! Data transfer objects used in the accounts API implementation - -// Built-in uses -use std::collections::BTreeMap; - -// Workspace uses -pub use zksync_api_client::rest::v1::accounts::{ - AccountInfo, AccountOpReceipt, AccountQuery, AccountReceipts, AccountReceiptsQuery, - AccountState, AccountTxReceipt, DepositingBalances, DepositingFunds, PendingAccountOpReceipt, - SearchDirection, TxLocation, -}; -use zksync_storage::{ - chain::operations_ext::{ - records::{AccountOpReceiptResponse, AccountTxReceiptResponse}, - SearchDirection as StorageSearchDirection, - }, - QueryResult, StorageProcessor, -}; -use zksync_types::{tx::TxHash, Account, BlockNumber, PriorityOp, ZkSyncPriorityOp, H256}; - -// Local uses -use crate::{api_server::v1::MAX_LIMIT, utils::token_db_cache::TokenDBCache}; - -use super::{ - super::{transactions::Receipt, ApiError}, - unable_to_find_token, -}; - -pub(super) mod convert { - use std::collections::HashMap; - use zksync_crypto::params::{MIN_NFT_TOKEN_ID, NFT_TOKEN_ID_VAL}; - - use super::*; - - pub async fn account_state_from_storage( - storage: &mut StorageProcessor<'_>, - tokens: &TokenDBCache, - account: &Account, - ) -> QueryResult { - let mut balances = BTreeMap::new(); - let mut nfts = HashMap::new(); - for (token_id, balance) in account.get_nonzero_balances() { - match token_id.0 { - NFT_TOKEN_ID_VAL => { - // Don't include special token to balances or nfts - } - MIN_NFT_TOKEN_ID..=NFT_TOKEN_ID_VAL => { - // https://github.com/rust-lang/rust/issues/37854 - // Exclusive range is an experimental feature, but we have already checked the last value in the previous step - nfts.insert( - token_id, - tokens - .get_nft_by_id(storage, token_id) - .await? - .ok_or_else(|| unable_to_find_token(token_id))? - .into(), - ); - } - _ => { - let token_symbol = tokens - .token_symbol(storage, token_id) - .await? - .ok_or_else(|| unable_to_find_token(token_id))?; - balances.insert(token_symbol, balance); - } - } - } - let minted_nfts = account - .minted_nfts - .iter() - .map(|(id, nft)| (*id, nft.clone().into())) - .collect(); - - Ok(AccountState { - balances, - nfts, - minted_nfts, - nonce: account.nonce, - pub_key_hash: account.pub_key_hash, - }) - } - - pub fn search_direction_as_storage(direction: SearchDirection) -> StorageSearchDirection { - match direction { - SearchDirection::Older => StorageSearchDirection::Older, - SearchDirection::Newer => StorageSearchDirection::Newer, - } - } - - pub async fn depositing_balances_from_pending_ops( - storage: &mut StorageProcessor<'_>, - tokens: &TokenDBCache, - ongoing_ops: Vec, - confirmations_for_eth_event: BlockNumber, - ) -> QueryResult { - let mut balances = BTreeMap::new(); - - for op in ongoing_ops { - let received_on_block = op.eth_block; - let (amount, token_id) = match op.data { - ZkSyncPriorityOp::Deposit(deposit) => (deposit.amount, deposit.token), - ZkSyncPriorityOp::FullExit(other) => { - panic!("Incorrect input for DepositingBalances: {:?}", other); - } - }; - - let token_symbol = tokens - .token_symbol(storage, token_id) - .await? - .ok_or_else(|| unable_to_find_token(token_id))?; - - let expected_accept_block = confirmations_for_eth_event + (received_on_block as u32); - - let balance = balances - .entry(token_symbol) - .or_insert_with(DepositingFunds::default); - - balance.amount.0 += amount; - - // `balance.expected_accept_block` should be the greatest block number among - // all the deposits for a certain token. - if expected_accept_block > balance.expected_accept_block { - balance.expected_accept_block = expected_accept_block; - } - } - - Ok(DepositingBalances { balances }) - } - - pub fn validate_receipts_query( - query: AccountReceiptsQuery, - ) -> Result<(TxLocation, SearchDirection, BlockNumber), ApiError> { - if *query.limit == 0 && *query.limit > MAX_LIMIT { - return Err(ApiError::bad_request("Incorrect limit") - .detail(format!("Limit should be between {} and {}", 1, MAX_LIMIT))); - } - - let (location, direction) = match (query.block, query.index, query.direction) { - // Just try to fetch latest transactions. - (None, None, None) => ( - TxLocation { - block: BlockNumber(u32::MAX), - index: None, - }, - SearchDirection::Older, - ), - (Some(block), index, Some(direction)) => (TxLocation { block, index }, direction), - - _ => { - return Err(ApiError::bad_request("Incorrect transaction location") - .detail("All parameters must be passed: block, index, direction.")) - } - }; - - Ok((location, direction, query.limit)) - } - - pub fn tx_receipt_from_response(inner: AccountTxReceiptResponse) -> AccountTxReceipt { - let block = BlockNumber(inner.block_number as u32); - let index = inner.block_index.map(|x| x as u32); - let hash = TxHash::from_slice(&inner.tx_hash).unwrap_or_else(|| { - panic!( - "Database provided an incorrect tx_hash field: {}", - hex::encode(&inner.tx_hash) - ) - }); - - if !inner.success { - return AccountTxReceipt { - index, - hash, - receipt: Receipt::Rejected { - reason: inner.fail_reason, - }, - }; - } - - let receipt = match ( - inner.commit_tx_hash.is_some(), - inner.verify_tx_hash.is_some(), - ) { - (false, false) => Receipt::Executed, - (true, false) => Receipt::Committed { block }, - (true, true) => Receipt::Verified { block }, - (false, true) => panic!( - "Database provided an incorrect account tx reciept: {:?}", - inner - ), - }; - - AccountTxReceipt { - index, - receipt, - hash, - } - } - - pub fn op_receipt_from_response(inner: AccountOpReceiptResponse) -> AccountOpReceipt { - let block = BlockNumber(inner.block_number as u32); - let index = inner.block_index as u32; - let hash = H256::from_slice(&inner.eth_hash); - - let receipt = match ( - inner.commit_tx_hash.is_some(), - inner.verify_tx_hash.is_some(), - ) { - (false, false) => Receipt::Executed, - (true, false) => Receipt::Committed { block }, - (true, true) => Receipt::Verified { block }, - (false, true) => panic!( - "Database provided an incorrect account tx receipt: {:?}", - inner - ), - }; - - AccountOpReceipt { - index, - receipt, - hash, - } - } - - pub fn pending_account_op_receipt_from_priority_op(op: PriorityOp) -> PendingAccountOpReceipt { - PendingAccountOpReceipt { - eth_block: op.eth_block, - hash: op.eth_hash, - } - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/blocks.rs b/core/bin/zksync_api/src/api_server/rest/v1/blocks.rs deleted file mode 100644 index 19840cb063..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/blocks.rs +++ /dev/null @@ -1,286 +0,0 @@ -//! Blocks part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -// Workspace uses -pub use zksync_api_client::rest::v1::{BlockInfo, TransactionInfo}; -use zksync_crypto::{convert::FeConvert, Fr}; -use zksync_storage::{chain::block::records, ConnectionPool, QueryResult}; -use zksync_types::{tx::TxHash, BlockNumber}; - -// Local uses -use super::{Error as ApiError, JsonResult, Pagination, PaginationQuery}; -use crate::{ - api_server::helpers::try_parse_tx_hash, utils::block_details_cache::BlockDetailsCache, -}; - -/// Shared data between `api/v1/blocks` endpoints. -#[derive(Debug, Clone)] -struct ApiBlocksData { - pool: ConnectionPool, - /// Verified blocks cache. - verified_blocks: BlockDetailsCache, -} - -impl ApiBlocksData { - fn new(pool: ConnectionPool, verified_blocks: BlockDetailsCache) -> Self { - Self { - pool, - verified_blocks, - } - } - - /// Returns information about block with the specified number. - /// - /// This method caches some of the verified blocks. - async fn block_info( - &self, - block_number: BlockNumber, - ) -> QueryResult> { - self.verified_blocks.get(&self.pool, block_number).await - } - - /// Returns the block range up to the given block number. - /// - /// Note that this method doesn't use cache and always requests blocks from the database - async fn blocks_range( - &self, - max_block: Option, - limit: u32, - ) -> QueryResult> { - let max_block = max_block.unwrap_or(BlockNumber(u32::MAX)); - - let mut storage = self.pool.access_storage().await?; - storage - .chain() - .block_schema() - .load_block_range(max_block, limit) - .await - } - - /// Return transactions stored in the block with the specified number. - async fn block_transactions( - &self, - block_number: BlockNumber, - ) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - storage - .chain() - .block_schema() - .get_block_transactions(block_number) - .await - } -} - -pub(super) mod convert { - use zksync_api_client::rest::v1::PaginationQueryError; - - use super::*; - - pub fn block_info_from_details(inner: records::StorageBlockDetails) -> BlockInfo { - BlockInfo { - block_number: BlockNumber(inner.block_number as u32), - new_state_root: Fr::from_bytes(&inner.new_state_root).unwrap_or_else(|err| { - panic!( - "Database provided an incorrect new_state_root field: {:?}, an error occurred {}", - inner.new_state_root, err - ) - }), - block_size: inner.block_size as u64, - commit_tx_hash: inner.commit_tx_hash.map(|bytes| { - TxHash::from_slice(&bytes).unwrap_or_else(|| { - panic!( - "Database provided an incorrect commit_tx_hash field: {:?}", - hex::encode(bytes) - ) - }) - }), - verify_tx_hash: inner.verify_tx_hash.map(|bytes| { - TxHash::from_slice(&bytes).unwrap_or_else(|| { - panic!( - "Database provided an incorrect verify_tx_hash field: {:?}", - hex::encode(bytes) - ) - }) - }), - committed_at: inner.committed_at, - verified_at: inner.verified_at, - } - } - - pub fn transaction_info_from_transaction_item( - inner: records::BlockTransactionItem, - ) -> TransactionInfo { - TransactionInfo { - tx_hash: try_parse_tx_hash(&inner.tx_hash).unwrap_or_else(|err| { - panic!( - "Database provided an incorrect transaction hash: {:?}, an error occurred: {}", - inner.tx_hash, err - ) - }), - block_number: BlockNumber(inner.block_number as u32), - op: inner.op, - success: inner.success, - fail_reason: inner.fail_reason, - created_at: inner.created_at, - } - } - - impl From for ApiError { - fn from(err: PaginationQueryError) -> Self { - ApiError::bad_request("Incorrect pagination query").detail(err.detail) - } - } -} - -// Server implementation - -async fn block_by_id( - data: web::Data, - web::Path(block_number): web::Path, -) -> JsonResult> { - Ok(Json( - data.block_info(block_number) - .await - .map_err(ApiError::internal)? - .map(convert::block_info_from_details), - )) -} - -async fn block_transactions( - data: web::Data, - web::Path(block_number): web::Path, -) -> JsonResult> { - let transactions = data - .block_transactions(block_number) - .await - .map_err(ApiError::internal)?; - - Ok(Json( - transactions - .into_iter() - .map(convert::transaction_info_from_transaction_item) - .collect(), - )) -} - -async fn blocks_range( - data: web::Data, - web::Query(pagination): web::Query, -) -> JsonResult> { - let (pagination, limit) = pagination.into_inner()?; - let max = pagination.into_max(limit)?; - - let range = data - .blocks_range(max, limit) - .await - .map_err(ApiError::internal)?; - // Handle edge case when "after + limit" greater than the total blocks count. - // TODO Handle this case directly in the `storage` crate. (ZKS-124) - let range = if let Pagination::After(after) = pagination { - range - .into_iter() - .filter(|block| block.block_number > *after as i64) - .map(convert::block_info_from_details) - .collect() - } else { - range - .into_iter() - .map(convert::block_info_from_details) - .collect() - }; - - Ok(Json(range)) -} - -pub fn api_scope(pool: ConnectionPool, cache: BlockDetailsCache) -> Scope { - let data = ApiBlocksData::new(pool, cache); - - web::scope("blocks") - .data(data) - .route("", web::get().to(blocks_range)) - .route("{id}", web::get().to(block_by_id)) - .route("{id}/transactions", web::get().to(block_transactions)) -} - -#[cfg(test)] -mod tests { - use super::{super::test_utils::TestServerConfig, *}; - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn test_blocks_scope() -> anyhow::Result<()> { - let cfg = TestServerConfig::default(); - cfg.fill_database().await?; - - let (client, server) = - cfg.start_server(|cfg| api_scope(cfg.pool.clone(), BlockDetailsCache::new(10))); - - // Block requests part - let blocks: Vec = { - let mut storage = cfg.pool.access_storage().await?; - - let blocks = storage - .chain() - .block_schema() - .load_block_range(BlockNumber(10), 10) - .await?; - - blocks - .into_iter() - .map(convert::block_info_from_details) - .collect() - }; - - assert_eq!( - client.block_by_id(BlockNumber(1)).await?.unwrap(), - blocks[7] - ); - assert_eq!(client.blocks_range(Pagination::Last, 10).await?, blocks); - assert_eq!( - client - .blocks_range(Pagination::Before(BlockNumber(2)), 5) - .await?, - &blocks[7..8] - ); - assert_eq!( - client - .blocks_range(Pagination::After(BlockNumber(7)), 5) - .await?, - &blocks[0..1] - ); - - // Transaction requests part. - let expected_txs: Vec = { - let mut storage = cfg.pool.access_storage().await?; - - let transactions = storage - .chain() - .block_schema() - .get_block_transactions(BlockNumber(1)) - .await?; - - transactions - .into_iter() - .map(convert::transaction_info_from_transaction_item) - .collect() - }; - assert_eq!( - client.block_transactions(BlockNumber(1)).await?, - expected_txs - ); - assert_eq!(client.block_transactions(BlockNumber(6)).await?, vec![]); - - server.stop().await; - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/config.rs b/core/bin/zksync_api/src/api_server/rest/v1/config.rs deleted file mode 100644 index 57cc773d8b..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/config.rs +++ /dev/null @@ -1,96 +0,0 @@ -//! Config part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{web, Scope}; - -// Workspace uses -use zksync_api_client::rest::v1::Contracts; -use zksync_config::ZkSyncConfig; -use zksync_types::{network::Network, Address}; - -// Local uses -use super::Json; - -/// Shared data between `api/v1/config` endpoints. -#[derive(Debug, Clone)] -struct ApiConfigData { - contract_address: Address, - deposit_confirmations: u64, - network: Network, -} - -impl ApiConfigData { - fn new(config: &ZkSyncConfig) -> Self { - Self { - contract_address: config.contracts.contract_addr, - deposit_confirmations: config.eth_watch.confirmations_for_eth_event, - network: config.chain.eth.network, - } - } -} - -// Server implementation - -async fn contracts(data: web::Data) -> Json { - Json(Contracts { - contract: data.contract_address, - }) -} - -async fn deposit_confirmations(data: web::Data) -> Json { - Json(data.deposit_confirmations) -} - -async fn network(data: web::Data) -> Json { - Json(data.network) -} - -pub fn api_scope(config: &ZkSyncConfig) -> Scope { - let data = ApiConfigData::new(config); - - web::scope("config") - .data(data) - .route("contracts", web::get().to(contracts)) - .route("network", web::get().to(network)) - .route( - "deposit_confirmations", - web::get().to(deposit_confirmations), - ) -} - -#[cfg(test)] -mod tests { - use super::{super::test_utils::TestServerConfig, *}; - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn test_config_scope() -> anyhow::Result<()> { - let cfg = TestServerConfig::default(); - let (client, server) = cfg.start_server(|cfg| api_scope(&cfg.config)); - - assert_eq!( - client.deposit_confirmations().await?, - cfg.config.eth_watch.confirmations_for_eth_event - ); - - assert_eq!( - client.network().await?, - cfg.config.chain.eth.network.to_string() - ); - assert_eq!( - client.contracts().await?, - Contracts { - contract: cfg.config.contracts.contract_addr - }, - ); - - server.stop().await; - - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/mod.rs b/core/bin/zksync_api/src/api_server/rest/v1/mod.rs deleted file mode 100644 index 1eaeb56b1f..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/mod.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! First stable API implementation. - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -pub use Error as ApiError; -// Workspace uses -pub use zksync_api_client::rest::v1::{ - Client, ClientError, Pagination, PaginationQuery, MAX_LIMIT, -}; -use zksync_config::ZkSyncConfig; - -// Local uses -use crate::api_server::tx_sender::TxSender; - -// Public uses -pub use self::error::{Error, ErrorBody}; - -pub(crate) mod accounts; -mod blocks; -mod config; -pub mod error; -mod operations; -mod search; -#[cfg(test)] -pub mod test_utils; -mod tokens; -mod transactions; - -pub type JsonResult = std::result::Result, Error>; - -pub(crate) fn api_scope(tx_sender: TxSender, zk_config: &ZkSyncConfig) -> Scope { - web::scope("/api/v1") - .service(accounts::api_scope( - tx_sender.pool.clone(), - zk_config, - tx_sender.tokens.clone(), - tx_sender.core_api_client.clone(), - )) - .service(config::api_scope(&zk_config)) - .service(blocks::api_scope( - tx_sender.pool.clone(), - tx_sender.blocks.clone(), - )) - .service(transactions::api_scope(tx_sender.clone())) - .service(operations::api_scope(tx_sender.pool.clone())) - .service(search::api_scope(tx_sender.pool.clone())) - .service(tokens::api_scope( - tx_sender.pool.clone(), - tx_sender.tokens, - tx_sender.ticker_requests, - )) -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/operations.rs b/core/bin/zksync_api/src/api_server/rest/v1/operations.rs deleted file mode 100644 index f5fbd7a9e7..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/operations.rs +++ /dev/null @@ -1,283 +0,0 @@ -//! Operations part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -// Workspace uses -use zksync_api_client::rest::v1::{ - PriorityOpData, PriorityOpQuery, PriorityOpQueryError, PriorityOpReceipt, -}; -use zksync_storage::{ - chain::operations::records::StoredExecutedPriorityOperation, ConnectionPool, QueryResult, - StorageProcessor, -}; -use zksync_types::{BlockNumber, H256}; - -// Local uses -use super::{transactions::Receipt, Error as ApiError, JsonResult}; - -/// Shared data between `api/v1/operations` endpoints. -#[derive(Debug, Clone)] -struct ApiOperationsData { - pool: ConnectionPool, -} - -impl ApiOperationsData { - pub fn new(pool: ConnectionPool) -> Self { - Self { pool } - } - - pub async fn priority_op_data( - &self, - query: PriorityOpQuery, - ) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - - let executed_op = executed_priority_op_for_query(query, &mut storage).await?; - Ok(executed_op.map(convert::priority_op_data_from_stored)) - } - - pub async fn priority_op( - &self, - query: PriorityOpQuery, - ) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - - let executed_op = executed_priority_op_for_query(query, &mut storage).await?; - let executed_op = if let Some(executed_op) = executed_op { - executed_op - } else { - return Ok(None); - }; - - let blocks = storage - .chain() - .block_schema() - .load_block_range(BlockNumber(executed_op.block_number as u32), 1) - .await?; - - let block_info = blocks - .into_iter() - .next() - .expect("Database provided an incorrect priority op receipt"); - - let block = BlockNumber(block_info.block_number as u32); - let index = executed_op.block_index as u32; - - let receipt = if block_info.verify_tx_hash.is_some() { - PriorityOpReceipt { - status: Receipt::Verified { block }, - index: Some(index), - } - } else if block_info.commit_tx_hash.is_some() { - PriorityOpReceipt { - status: Receipt::Committed { block }, - index: Some(index), - } - } else { - PriorityOpReceipt { - status: Receipt::Executed, - index: None, - } - }; - - Ok(Some(receipt)) - } -} - -async fn executed_priority_op_for_query( - query: PriorityOpQuery, - storage: &mut StorageProcessor<'_>, -) -> QueryResult> { - let mut schema = storage.chain().operations_schema(); - - match query { - PriorityOpQuery::Id(serial_id) => { - schema - .get_executed_priority_operation(serial_id as u32) - .await - } - PriorityOpQuery::Hash(eth_hash) => { - schema - .get_executed_priority_operation_by_hash(eth_hash.as_bytes()) - .await - } - } -} - -mod convert { - use super::*; - - pub fn priority_op_data_from_stored(v: StoredExecutedPriorityOperation) -> PriorityOpData { - PriorityOpData { - data: serde_json::from_value(v.operation.clone()).unwrap_or_else(|err| - panic!( - "Database provided an incorrect priority operation data: {:?}, an error occurred: {}", - v.operation, err - ) - ), - eth_hash: H256::from_slice(&v.eth_hash), - serial_id: v.priority_op_serialid as u64, - } - } - - impl From for ApiError { - fn from(err: PriorityOpQueryError) -> Self { - ApiError::bad_request("Cannot parse PrioorityOpQuery").detail(err.detail) - } - } -} - -// Server implementation - -async fn priority_op( - data: web::Data, - web::Path(path): web::Path, -) -> JsonResult> { - let query = PriorityOpQuery::from_path(path)?; - - let receipt = data.priority_op(query).await.map_err(ApiError::internal)?; - Ok(Json(receipt)) -} - -async fn priority_op_data( - data: web::Data, - web::Path(path): web::Path, -) -> JsonResult> { - let query = PriorityOpQuery::from_path(path)?; - - let data = data - .priority_op_data(query) - .await - .map_err(ApiError::internal)?; - Ok(Json(data)) -} - -pub fn api_scope(pool: ConnectionPool) -> Scope { - let data = ApiOperationsData::new(pool); - - web::scope("operations") - .data(data) - .route("{id}", web::get().to(priority_op)) - .route("{id}/data", web::get().to(priority_op_data)) -} - -#[cfg(test)] -mod tests { - use zksync_storage::test_data::dummy_ethereum_tx_hash; - use zksync_types::{AccountId, Address}; - - use crate::api_server::v1::test_utils::{dummy_deposit_op, dummy_full_exit_op}; - - use super::{ - super::test_utils::{TestServerConfig, COMMITTED_OP_SERIAL_ID, VERIFIED_OP_SERIAL_ID}, - *, - }; - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn operations_scope() -> anyhow::Result<()> { - let cfg = TestServerConfig::default(); - cfg.fill_database().await?; - - let (client, server) = cfg.start_server(|cfg| api_scope(cfg.pool.clone())); - - // Check verified priority operation. - - let verified_op_hash = dummy_ethereum_tx_hash(VERIFIED_OP_SERIAL_ID as i64); - - let expected_receipt = PriorityOpReceipt { - index: Some(2), - status: Receipt::Verified { - block: BlockNumber(2), - }, - }; - assert_eq!( - client.priority_op(VERIFIED_OP_SERIAL_ID).await?.as_ref(), - Some(&expected_receipt) - ); - assert_eq!( - client.priority_op(verified_op_hash).await?.as_ref(), - Some(&expected_receipt) - ); - - let expected_data = PriorityOpData { - data: dummy_deposit_op(Address::default(), AccountId(1), 15, 2).op, - serial_id: VERIFIED_OP_SERIAL_ID, - eth_hash: verified_op_hash, - }; - - assert_eq!( - client - .priority_op_data(VERIFIED_OP_SERIAL_ID) - .await? - .as_ref() - .unwrap() - .serial_id, - expected_data.serial_id - ); - assert_eq!( - client - .priority_op_data(verified_op_hash) - .await? - .unwrap() - .eth_hash, - expected_data.eth_hash - ); - - // Check committed priority operation. - let committed_eth_hash = dummy_ethereum_tx_hash(COMMITTED_OP_SERIAL_ID as i64); - - let expected_receipt = PriorityOpReceipt { - index: Some(1), - status: Receipt::Committed { - block: BlockNumber(4), - }, - }; - assert_eq!( - client.priority_op(COMMITTED_OP_SERIAL_ID).await?.as_ref(), - Some(&expected_receipt) - ); - assert_eq!( - client.priority_op(committed_eth_hash).await?.as_ref(), - Some(&expected_receipt) - ); - - let expected_data = PriorityOpData { - data: dummy_full_exit_op(AccountId(1), Address::default(), 16, 3).op, - serial_id: COMMITTED_OP_SERIAL_ID, - eth_hash: committed_eth_hash, - }; - assert_eq!( - client - .priority_op_data(COMMITTED_OP_SERIAL_ID) - .await? - .unwrap() - .eth_hash, - expected_data.eth_hash - ); - assert_eq!( - client - .priority_op_data(committed_eth_hash) - .await? - .unwrap() - .serial_id, - expected_data.serial_id - ); - - // Try to get non-existing priority operation. - assert!(client.priority_op(1000).await?.is_none()); - assert!(client.priority_op(H256::default()).await?.is_none()); - - server.stop().await; - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/search.rs b/core/bin/zksync_api/src/api_server/rest/v1/search.rs deleted file mode 100644 index 8931e5136e..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/search.rs +++ /dev/null @@ -1,116 +0,0 @@ -//! Search part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -// Workspace uses -use zksync_api_client::rest::v1::BlockSearchQuery; -use zksync_storage::{ConnectionPool, QueryResult}; - -// Local uses -use super::{ - blocks::{convert::block_info_from_details, BlockInfo}, - Error as ApiError, JsonResult, -}; - -/// Shared data between `api/v1/search` endpoints. -#[derive(Clone)] -struct ApiSearchData { - pool: ConnectionPool, -} - -impl ApiSearchData { - fn new(pool: ConnectionPool) -> Self { - Self { pool } - } - - async fn search_block(&self, query: String) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - - let block = storage - .chain() - .block_schema() - .find_block_by_height_or_hash(query) - .await; - - Ok(block.map(block_info_from_details)) - } -} - -// Server implementation - -async fn block_search( - data: web::Data, - web::Query(query): web::Query, -) -> JsonResult> { - let block_info = data - .search_block(query.query) - .await - .map_err(ApiError::internal)?; - - Ok(Json(block_info)) -} - -pub fn api_scope(pool: ConnectionPool) -> Scope { - let data = ApiSearchData::new(pool); - - web::scope("search") - .data(data) - .route("", web::get().to(block_search)) -} - -#[cfg(test)] -mod tests { - use super::{super::test_utils::TestServerConfig, *}; - use zksync_types::BlockNumber; - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn search_scope() -> anyhow::Result<()> { - let cfg = TestServerConfig::default(); - cfg.fill_database().await?; - - let (client, server) = cfg.start_server(move |cfg| api_scope(cfg.pool.clone())); - - // Search for the existing block by number. - let block_info = client - .search_block(BlockNumber(1)) - .await? - .expect("block should be exist"); - // Search for the existing block by root hash. - assert_eq!( - client - .search_block(block_info.new_state_root) - .await? - .unwrap(), - block_info - ); - // Search for the existing block by committed tx hash. - assert_eq!( - client - .search_block(block_info.commit_tx_hash.unwrap()) - .await? - .unwrap(), - block_info - ); - // Search for the existing block by verified tx hash. - assert_eq!( - client - .search_block(block_info.verify_tx_hash.unwrap()) - .await? - .unwrap(), - block_info - ); - - server.stop().await; - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/tokens.rs b/core/bin/zksync_api/src/api_server/rest/v1/tokens.rs deleted file mode 100644 index 63894dce5c..0000000000 --- a/core/bin/zksync_api/src/api_server/rest/v1/tokens.rs +++ /dev/null @@ -1,288 +0,0 @@ -//! Tokens part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; -use bigdecimal::BigDecimal; -use futures::{ - channel::{mpsc, oneshot}, - prelude::*, -}; - -// Workspace uses -use zksync_api_client::rest::v1::{TokenPriceKind, TokenPriceQuery}; -use zksync_storage::{ConnectionPool, QueryResult}; -use zksync_types::{Token, TokenLike}; - -use crate::{ - fee_ticker::{PriceError, TickerRequest, TokenPriceRequestType}, - utils::token_db_cache::TokenDBCache, -}; - -// Local uses -use super::{ApiError, JsonResult}; - -/// Shared data between `api/v1/tokens` endpoints. -#[derive(Clone)] -struct ApiTokensData { - fee_ticker: mpsc::Sender, - tokens: TokenDBCache, - pool: ConnectionPool, -} - -impl ApiTokensData { - fn new( - pool: ConnectionPool, - tokens: TokenDBCache, - fee_ticker: mpsc::Sender, - ) -> Self { - Self { - fee_ticker, - tokens, - pool, - } - } - - async fn tokens(&self) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - - let tokens = storage.tokens_schema().load_tokens().await?; - - // Provide tokens in a predictable order. - let mut tokens: Vec<_> = tokens.into_iter().map(|(_k, v)| v).collect(); - tokens.sort_unstable_by_key(|token| token.id); - - Ok(tokens) - } - - async fn token(&self, token_like: TokenLike) -> QueryResult> { - let mut storage = self.pool.access_storage().await?; - - self.tokens.get_token(&mut storage, token_like).await - } - - async fn token_price_usd(&self, token: TokenLike) -> QueryResult> { - let (price_sender, price_receiver) = oneshot::channel(); - self.fee_ticker - .clone() - .send(TickerRequest::GetTokenPrice { - token, - response: price_sender, - req_type: TokenPriceRequestType::USDForOneToken, - }) - .await?; - - match price_receiver.await? { - Ok(price) => Ok(Some(price)), - Err(PriceError::TokenNotFound(_)) => Ok(None), - Err(PriceError::DBError(err)) => Err(anyhow::format_err!(err)), - Err(PriceError::ApiError(err)) => Err(anyhow::format_err!(err)), - } - } -} - -// Server implementation - -async fn tokens(data: web::Data) -> JsonResult> { - let tokens = data.tokens().await.map_err(ApiError::internal)?; - - Ok(Json(tokens)) -} - -async fn token_by_id( - data: web::Data, - web::Path(token_like): web::Path, -) -> JsonResult> { - let token_like = TokenLike::parse(&token_like); - - let token = data.token(token_like).await.map_err(ApiError::internal)?; - Ok(Json(token)) -} - -async fn token_price( - data: web::Data, - web::Path(token_like): web::Path, - web::Query(token_query): web::Query, -) -> JsonResult> { - let token_like = TokenLike::parse(&token_like); - - let price = match token_query.kind { - TokenPriceKind::Currency => data - .token_price_usd(token_like) - .await - .map_err(ApiError::internal)?, - - TokenPriceKind::Token => { - return Err(ApiError::not_implemented( - "price in tokens not yet implemented", - )) - } - }; - - Ok(Json(price)) -} - -pub fn api_scope( - pool: ConnectionPool, - tokens_db: TokenDBCache, - fee_ticker: mpsc::Sender, -) -> Scope { - let data = ApiTokensData::new(pool, tokens_db, fee_ticker); - - web::scope("tokens") - .data(data) - .route("", web::get().to(tokens)) - .route("{id}", web::get().to(token_by_id)) - .route("{id}/price", web::get().to(token_price)) -} - -#[cfg(test)] -mod tests { - use std::collections::HashMap; - - use zksync_types::{Address, TokenId}; - - use super::{super::test_utils::TestServerConfig, *}; - - use zksync_api_client::rest::v1::ClientError; - - fn dummy_fee_ticker(prices: &[(TokenLike, BigDecimal)]) -> mpsc::Sender { - let (sender, mut receiver) = mpsc::channel(10); - - let prices: HashMap<_, _> = prices.iter().cloned().collect(); - actix_rt::spawn(async move { - while let Some(item) = receiver.next().await { - match item { - TickerRequest::GetTokenPrice { - token, - response, - req_type, - } => { - assert_eq!( - req_type, - TokenPriceRequestType::USDForOneToken, - "Unsupported price request type" - ); - - let msg = if let Some(price) = prices.get(&token) { - Ok(price.clone()) - } else { - Err(PriceError::token_not_found(format!( - "Token not found: {:?}", - token - ))) - }; - - response.send(msg).expect("Unable to send response"); - } - _ => unreachable!("Unsupported request"), - } - } - }); - - sender - } - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn test_tokens_scope() -> anyhow::Result<()> { - let cfg = TestServerConfig::default(); - cfg.fill_database().await?; - - let prices = [ - (TokenLike::Id(TokenId(1)), 10_u64.into()), - (TokenLike::Id(TokenId(15)), 10_500_u64.into()), - ("ETH".into(), 0_u64.into()), - (Address::default().into(), 1_u64.into()), - ]; - let fee_ticker = dummy_fee_ticker(&prices); - - let (client, server) = cfg.start_server(move |cfg| { - api_scope(cfg.pool.clone(), TokenDBCache::new(), fee_ticker.clone()) - }); - - // Fee requests - for (token, expected_price) in &prices { - let actual_price = client.token_price(token, TokenPriceKind::Currency).await?; - - assert_eq!( - actual_price.as_ref(), - Some(expected_price), - "Price does not match" - ); - } - assert_eq!( - client - .token_price(&TokenLike::Id(TokenId(2)), TokenPriceKind::Currency) - .await?, - None - ); - let error = client - .token_price(&TokenLike::Id(TokenId(2)), TokenPriceKind::Token) - .await - .unwrap_err(); - assert!( - matches!(error, ClientError::BadRequest { .. }), - "Incorrect error type: got {:?} instead of BadRequest", - error - ); - // Tokens requests - let expected_tokens = { - let mut storage = cfg.pool.access_storage().await?; - - let mut tokens: Vec<_> = storage - .tokens_schema() - .load_tokens() - .await? - .values() - .cloned() - .collect(); - tokens.sort_unstable_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); - tokens - }; - - assert_eq!(client.tokens().await?, expected_tokens); - - let expected_token = &expected_tokens[0]; - assert_eq!( - &client - .token_by_id(&TokenLike::Id(TokenId(0))) - .await? - .unwrap(), - expected_token - ); - assert_eq!( - &client - .token_by_id(&TokenLike::parse( - "0x0000000000000000000000000000000000000000" - )) - .await? - .unwrap(), - expected_token - ); - assert_eq!( - &client - .token_by_id(&TokenLike::parse( - "0000000000000000000000000000000000000000" - )) - .await? - .unwrap(), - expected_token - ); - assert_eq!( - &client.token_by_id(&TokenLike::parse("ETH")).await?.unwrap(), - expected_token - ); - assert_eq!(client.token_by_id(&TokenLike::parse("XM")).await?, None); - - server.stop().await; - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rest/v1/transactions.rs b/core/bin/zksync_api/src/api_server/rest/v1/transactions.rs index ae0bec3936..e69de29bb2 100644 --- a/core/bin/zksync_api/src/api_server/rest/v1/transactions.rs +++ b/core/bin/zksync_api/src/api_server/rest/v1/transactions.rs @@ -1,978 +0,0 @@ -//! Transactions part of API implementation. - -// Built-in uses - -// External uses -use actix_web::{ - web::{self, Json}, - Scope, -}; - -// Workspace uses -pub use zksync_api_client::rest::v1::{ - FastProcessingQuery, IncomingTx, IncomingTxBatch, IncomingTxBatchForFee, IncomingTxForFee, - Receipt, TxData, -}; -use zksync_storage::{ - chain::operations_ext::records::TxReceiptResponse, QueryResult, StorageProcessor, -}; -use zksync_types::{ - tx::{TxEthSignatureVariant, TxHash}, - BatchFee, BlockNumber, Fee, SignedZkSyncTx, -}; -// Local uses -use super::{Error as ApiError, JsonResult, Pagination, PaginationQuery}; -use crate::api_server::rpc_server::types::TxWithSignature; -use crate::api_server::tx_sender::{SubmitError, TxSender}; - -#[derive(Debug, Clone, Copy)] -pub enum SumbitErrorCode { - AccountCloseDisabled = 101, - InvalidParams = 102, - UnsupportedFastProcessing = 103, - IncorrectTx = 104, - TxAdd = 105, - InappropriateFeeToken = 106, - - Internal = 110, - CommunicationCoreServer = 111, - Other = 112, -} - -impl SumbitErrorCode { - fn from_err(err: &SubmitError) -> Self { - match err { - SubmitError::AccountCloseDisabled => Self::AccountCloseDisabled, - SubmitError::InvalidParams(_) => Self::InvalidParams, - SubmitError::UnsupportedFastProcessing => Self::UnsupportedFastProcessing, - SubmitError::IncorrectTx(_) => Self::IncorrectTx, - SubmitError::TxAdd(_) => Self::TxAdd, - SubmitError::InappropriateFeeToken => Self::InappropriateFeeToken, - SubmitError::CommunicationCoreServer(_) => Self::CommunicationCoreServer, - SubmitError::Internal(_) => Self::Internal, - SubmitError::Other(_) => Self::Other, - } - } - - fn as_code(self) -> u64 { - self as u64 - } -} - -impl From for ApiError { - fn from(inner: SubmitError) -> Self { - let internal_code = SumbitErrorCode::from_err(&inner).as_code(); - - if let SubmitError::Internal(err) = &inner { - ApiError::internal(err) - } else { - ApiError::bad_request(inner) - } - .code(internal_code) - } -} - -/// Shared data between `api/v1/transactions` endpoints. -#[derive(Clone)] -struct ApiTransactionsData { - tx_sender: TxSender, -} - -impl ApiTransactionsData { - fn new(tx_sender: TxSender) -> Self { - Self { tx_sender } - } - - async fn tx_receipt( - storage: &mut StorageProcessor<'_>, - tx_hash: TxHash, - ) -> QueryResult> { - storage - .chain() - .operations_ext_schema() - .tx_receipt(tx_hash.as_ref()) - .await - } - - async fn tx_status(&self, tx_hash: TxHash) -> QueryResult> { - let mut storage = self.tx_sender.pool.access_storage().await?; - - let tx_receipt = { - if let Some(tx_receipt) = Self::tx_receipt(&mut storage, tx_hash).await? { - tx_receipt - } else { - let tx_in_mempool = storage - .chain() - .mempool_schema() - .contains_tx(tx_hash) - .await?; - - let tx_receipt = if tx_in_mempool { - Some(Receipt::Pending) - } else { - None - }; - return Ok(tx_receipt); - } - }; - - let block_number = BlockNumber(tx_receipt.block_number as u32); - // Check the cases where we don't need to get block details. - if !tx_receipt.success { - return Ok(Some(Receipt::Rejected { - reason: tx_receipt.fail_reason, - })); - } - - if tx_receipt.verified { - return Ok(Some(Receipt::Verified { - block: block_number, - })); - } - - // To distinguish committed and executed transaction we have to examine - // the transaction's block. - // - // TODO `load_block_range` possibly is too heavy operation and we should write - // specific request in the storage schema. (Task number ????) - let block = storage - .chain() - .block_schema() - .load_block_range(block_number, 1) - .await? - .into_iter() - .next(); - - let is_committed = block - .filter(|block| block.commit_tx_hash.is_some()) - .is_some(); - - let tx_receipt = if is_committed { - Receipt::Committed { - block: block_number, - } - } else { - Receipt::Executed - }; - - Ok(Some(tx_receipt)) - } - - async fn tx_data(&self, tx_hash: TxHash) -> QueryResult> { - let mut storage = self.tx_sender.pool.access_storage().await?; - - let operation = storage - .chain() - .operations_schema() - .get_executed_operation(tx_hash.as_ref()) - .await?; - - if let Some(op) = operation { - let signed_tx = SignedZkSyncTx { - tx: serde_json::from_value(op.tx)?, - eth_sign_data: op.eth_sign_data.map(serde_json::from_value).transpose()?, - }; - - Ok(Some(signed_tx)) - } else { - // Check memory pool for pending transactions. - storage.chain().mempool_schema().get_tx(tx_hash).await - } - } -} - -// Server implementation - -async fn tx_status( - data: web::Data, - web::Path(tx_hash): web::Path, -) -> JsonResult> { - let tx_status = data.tx_status(tx_hash).await.map_err(ApiError::internal)?; - - Ok(Json(tx_status)) -} - -async fn tx_data( - data: web::Data, - web::Path(tx_hash): web::Path, -) -> JsonResult> { - let tx_data = data.tx_data(tx_hash).await.map_err(ApiError::internal)?; - - Ok(Json(tx_data.map(TxData::from))) -} - -async fn tx_receipt_by_id( - data: web::Data, - web::Path((tx_hash, receipt_id)): web::Path<(TxHash, u32)>, -) -> JsonResult> { - // At the moment we store only last receipt, so this endpoint is just only a stub. - if receipt_id > 0 { - return Ok(Json(None)); - } - - let tx_status = data.tx_status(tx_hash).await.map_err(ApiError::internal)?; - - Ok(Json(tx_status)) -} - -async fn tx_receipts( - data: web::Data, - web::Path(tx_hash): web::Path, - web::Query(pagination): web::Query, -) -> JsonResult> { - let (pagination, _limit) = pagination.into_inner()?; - // At the moment we store only last receipt, so this endpoint is just only a stub. - let is_some = match pagination { - Pagination::Before(before) if *before < 1 => false, - Pagination::After(_after) => false, - _ => true, - }; - - if is_some { - let tx_status = data.tx_status(tx_hash).await.map_err(ApiError::internal)?; - - Ok(Json(tx_status.into_iter().collect())) - } else { - Ok(Json(vec![])) - } -} - -async fn submit_tx( - data: web::Data, - Json(body): Json, - web::Query(query): web::Query, -) -> JsonResult { - let tx_hash = data - .tx_sender - .submit_tx(body.tx, body.signature, query.fast_processing) - .await - .map_err(ApiError::from)?; - - Ok(Json(tx_hash)) -} - -async fn submit_tx_batch( - data: web::Data, - Json(body): Json, -) -> JsonResult> { - let txs = body - .txs - .into_iter() - .map(|tx| TxWithSignature { - tx, - signature: TxEthSignatureVariant::Single(None), - }) - .collect(); - - let signatures = body.signature; - let tx_hashes = data - .tx_sender - .submit_txs_batch(txs, Some(signatures)) - .await - .map_err(ApiError::from)?; - - Ok(Json(tx_hashes)) -} - -async fn get_txs_fee_in_wei( - data: web::Data, - Json(body): Json, -) -> JsonResult { - let fee = data - .tx_sender - .get_txs_fee_in_wei(body.tx_type, body.address, body.token_like) - .await?; - Ok(Json(fee)) -} - -async fn get_txs_batch_fee_in_wei( - data: web::Data, - Json(body): Json, -) -> JsonResult { - let txs = body - .tx_types - .into_iter() - .zip(body.addresses.into_iter()) - .collect(); - let fee = data - .tx_sender - .get_txs_batch_fee_in_wei(txs, body.token_like) - .await - .map_err(ApiError::from)?; - - Ok(Json(fee)) -} - -pub fn api_scope(tx_sender: TxSender) -> Scope { - let data = ApiTransactionsData::new(tx_sender); - - web::scope("transactions") - .data(data) - .route("{tx_hash}", web::get().to(tx_status)) - .route("{tx_hash}/data", web::get().to(tx_data)) - .route( - "{tx_hash}/receipts/{receipt_id}", - web::get().to(tx_receipt_by_id), - ) - .route("{tx_hash}/receipts", web::get().to(tx_receipts)) - .route("submit", web::post().to(submit_tx)) - .route("submit/batch", web::post().to(submit_tx_batch)) - .route("fee/batch", web::post().to(get_txs_batch_fee_in_wei)) - .route("fee", web::post().to(get_txs_fee_in_wei)) -} - -#[cfg(test)] -mod tests { - use actix_web::App; - use bigdecimal::BigDecimal; - use futures::{channel::mpsc, StreamExt}; - use num::rational::Ratio; - use num::BigUint; - - use zksync_api_client::rest::v1::Client; - use zksync_storage::ConnectionPool; - use zksync_test_account::ZkSyncAccount; - use zksync_types::{ - tokens::{Token, TokenLike}, - tx::{EthBatchSignData, EthBatchSignatures, PackedEthSignature, TxEthSignature}, - AccountId, BlockNumber, Fee, Nonce, - OutputFeeType::Withdraw, - TokenId, ZkSyncTx, - }; - - use crate::{ - api_server::helpers::try_parse_tx_hash, - core_api_client::CoreApiClient, - fee_ticker::{ResponseBatchFee, ResponseFee, TickerRequest}, - signature_checker::{VerifiedTx, VerifySignatureRequest}, - }; - - use super::super::test_utils::{TestServerConfig, TestTransactions}; - use super::*; - - fn submit_txs_loopback() -> (CoreApiClient, actix_web::test::TestServer) { - async fn send_tx(_tx: Json) -> Json> { - Json(Ok(())) - } - - async fn send_txs_batch( - _txs: Json<(Vec, Vec)>, - ) -> Json> { - Json(Ok(())) - } - - let server = actix_web::test::start(move || { - App::new() - .route("new_tx", web::post().to(send_tx)) - .route("new_txs_batch", web::post().to(send_txs_batch)) - }); - - let url = server.url("").trim_end_matches('/').to_owned(); - - (CoreApiClient::new(url), server) - } - - fn dummy_fee_ticker() -> mpsc::Sender { - let (sender, mut receiver) = mpsc::channel(10); - - actix_rt::spawn(async move { - while let Some(item) = receiver.next().await { - match item { - TickerRequest::GetTxFee { response, .. } => { - let normal_fee = Fee::new( - Withdraw, - BigUint::from(1_u64).into(), - BigUint::from(1_u64).into(), - 1_u64.into(), - 1_u64.into(), - ); - - let subsidy_fee = normal_fee.clone(); - - let res = Ok(ResponseFee { - normal_fee, - subsidy_fee, - subsidy_size_usd: Ratio::from_integer(0u32.into()), - }); - - response.send(res).expect("Unable to send response"); - } - TickerRequest::GetTokenPrice { response, .. } => { - let price = Ok(BigDecimal::from(1_u64)); - - response.send(price).expect("Unable to send response"); - } - TickerRequest::IsTokenAllowed { token, response } => { - // For test purposes, PHNX token is not allowed. - let is_phnx = match token { - TokenLike::Id(id) => *id == 1, - TokenLike::Symbol(sym) => sym == "PHNX", - TokenLike::Address(_) => unreachable!(), - }; - response.send(Ok(!is_phnx)).unwrap_or_default(); - } - TickerRequest::GetBatchTxFee { - response, - transactions, - .. - } => { - let normal_fee = BatchFee { - total_fee: BigUint::from(transactions.len()), - }; - let subsidy_fee = normal_fee.clone(); - - let res = Ok(ResponseBatchFee { - normal_fee, - subsidy_fee, - subsidy_size_usd: Ratio::from_integer(0u32.into()), - }); - - response.send(res).expect("Unable to send response"); - } - } - } - }); - - sender - } - - fn dummy_sign_verifier() -> mpsc::Sender { - let (sender, mut receiver) = mpsc::channel::(10); - - actix_rt::spawn(async move { - while let Some(item) = receiver.next().await { - let verified = VerifiedTx::unverified(item.data.get_tx_variant()); - item.response - .send(Ok(verified)) - .expect("Unable to send response"); - } - }); - - sender - } - - struct TestServer { - core_server: actix_web::test::TestServer, - api_server: actix_web::test::TestServer, - #[allow(dead_code)] - pool: ConnectionPool, - } - - impl TestServer { - async fn new() -> anyhow::Result<(Client, Self)> { - let (core_client, core_server) = submit_txs_loopback(); - - let mut cfg = TestServerConfig::default(); - cfg.config - .api - .common - .fee_free_accounts - .push(AccountId(0xfee)); - let pool = cfg.pool.clone(); - cfg.fill_database().await?; - - let sign_verifier = dummy_sign_verifier(); - let fee_ticker = dummy_fee_ticker(); - - let (api_client, api_server) = cfg.start_server(move |cfg| { - api_scope(TxSender::with_client( - core_client.clone(), - cfg.pool.clone(), - sign_verifier.clone(), - fee_ticker.clone(), - &cfg.config, - )) - }); - - Ok(( - api_client, - Self { - core_server, - api_server, - pool, - }, - )) - } - - async fn stop(self) { - self.api_server.stop().await; - self.core_server.stop().await; - } - } - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn test_rust_api() -> anyhow::Result<()> { - // TODO: ZKS-561 - test_transactions_scope().await?; - test_bad_fee_token().await?; - test_fast_processing_flag().await?; - test_fee_free_accounts().await?; - Ok(()) - } - - #[actix_rt::test] - #[cfg_attr( - not(feature = "api_test"), - ignore = "Use `zk test rust-api` command to perform this test" - )] - async fn test_submit_txs_loopback() -> anyhow::Result<()> { - let (core_client, core_server) = submit_txs_loopback(); - - let signed_tx = SignedZkSyncTx { - tx: TestServerConfig::gen_zk_txs(0).txs[0].0.clone(), - eth_sign_data: None, - }; - - core_client.send_tx(signed_tx.clone()).await??; - core_client - .send_txs_batch(vec![signed_tx], vec![]) - .await??; - - core_server.stop().await; - Ok(()) - } - - async fn test_transactions_scope() -> anyhow::Result<()> { - let (client, server) = TestServer::new().await?; - - let committed_tx_hash = { - let mut storage = server.pool.access_storage().await?; - - let transactions = storage - .chain() - .block_schema() - .get_block_transactions(BlockNumber(1)) - .await?; - - try_parse_tx_hash(&transactions[0].tx_hash).unwrap() - }; - - // Tx receipt by ID. - let unknown_tx_hash = TxHash::default(); - assert!(client - .tx_receipt_by_id(committed_tx_hash, 0) - .await? - .is_some()); - assert!(client - .tx_receipt_by_id(committed_tx_hash, 1) - .await? - .is_none()); - assert!(client.tx_receipt_by_id(unknown_tx_hash, 0).await?.is_none()); - - // Tx receipts. - let queries = vec![ - ( - (committed_tx_hash, Pagination::Before(BlockNumber(1)), 1), - vec![Receipt::Verified { - block: BlockNumber(1), - }], - ), - ( - (committed_tx_hash, Pagination::Last, 1), - vec![Receipt::Verified { - block: BlockNumber(1), - }], - ), - ( - (committed_tx_hash, Pagination::Before(BlockNumber(2)), 1), - vec![Receipt::Verified { - block: BlockNumber(1), - }], - ), - ( - (committed_tx_hash, Pagination::After(BlockNumber(0)), 1), - vec![], - ), - ((unknown_tx_hash, Pagination::Last, 1), vec![]), - ]; - - for (query, expected_response) in queries { - let actual_response = client.tx_receipts(query.0, query.1, query.2).await?; - - assert_eq!( - actual_response, - expected_response, - "tx: {} from: {:?} limit: {:?}", - query.0.to_string(), - query.1, - query.2 - ); - } - - // Tx status and data for committed transaction. - assert_eq!( - client.tx_status(committed_tx_hash).await?, - Some(Receipt::Verified { - block: BlockNumber(1) - }) - ); - assert_eq!( - SignedZkSyncTx::from(client.tx_data(committed_tx_hash).await?.unwrap()).hash(), - committed_tx_hash - ); - - // Tx status and data for pending transaction. - let tx_hash = { - let mut storage = server.pool.access_storage().await?; - - let tx = TestServerConfig::gen_zk_txs(1_u64).txs[0].0.clone(); - let tx_hash = tx.hash(); - storage - .chain() - .mempool_schema() - .insert_tx(&SignedZkSyncTx { - tx, - eth_sign_data: None, - }) - .await?; - - tx_hash - }; - assert_eq!(client.tx_status(tx_hash).await?, Some(Receipt::Pending)); - assert_eq!( - SignedZkSyncTx::from(client.tx_data(tx_hash).await?.unwrap()).hash(), - tx_hash - ); - - // Tx status for unknown transaction. - let tx_hash = TestServerConfig::gen_zk_txs(1_u64).txs[1].0.hash(); - assert_eq!(client.tx_status(tx_hash).await?, None); - assert!(client.tx_data(tx_hash).await?.is_none()); - - // Submit correct transaction. - let tx = TestServerConfig::gen_zk_txs(1_00).txs[0].0.clone(); - let expected_tx_hash = tx.hash(); - assert_eq!( - client - .submit_tx(tx, TxEthSignatureVariant::Single(None), None) - .await?, - expected_tx_hash - ); - - // Submit transaction without fee. - let tx = TestServerConfig::gen_zk_txs(0).txs[0].0.clone(); - assert!(client - .submit_tx(tx, TxEthSignatureVariant::Single(None), None) - .await - .unwrap_err() - .to_string() - .contains("Transaction fee is too low")); - - // Submit correct transactions batch. - let TestTransactions { acc, txs } = TestServerConfig::gen_zk_txs(1_00); - let eth = Token::new(TokenId(0), Default::default(), "ETH", 18); - let (good_batch, tx_hashes): (Vec<_>, Vec<_>) = txs - .into_iter() - .map(|(tx, _op)| { - let tx_hash = tx.hash(); - (tx, tx_hash) - }) - .unzip(); - - let txs = good_batch - .iter() - .zip(std::iter::repeat(eth)) - .map(|(tx, token)| (tx.clone(), token, tx.account())) - .collect::>(); - let batch_signature = { - let eth_private_key = acc - .try_get_eth_private_key() - .expect("Should have ETH private key"); - let batch_message = EthBatchSignData::get_batch_sign_message(txs); - let eth_sig = PackedEthSignature::sign(eth_private_key, &batch_message).unwrap(); - let single_signature = TxEthSignature::EthereumSignature(eth_sig); - - EthBatchSignatures::Single(single_signature) - }; - - assert_eq!( - client.submit_tx_batch(good_batch, batch_signature).await?, - tx_hashes - ); - - server.stop().await; - Ok(()) - } - - /// This test checks the following criteria: - /// - /// - Attempt to pay fees in an inappropriate token fails for single txs. - /// - Attempt to pay fees in an inappropriate token fails for single batch. - /// - Batch with an inappropriate token still can be processed if the fee is covered with a common token. - async fn test_bad_fee_token() -> anyhow::Result<()> { - let (client, server) = TestServer::new().await?; - - let from = ZkSyncAccount::rand(); - from.set_account_id(Some(AccountId(0xdead))); - let to = ZkSyncAccount::rand(); - - // Submit transaction with a fee token that is not allowed. - - let (tx, eth_sig) = from.sign_transfer( - TokenId(1), - "PHNX", - 100u64.into(), - 100u64.into(), - &to.address, - Some(Nonce(0)), - false, - Default::default(), - ); - let transfer_bad_token = ZkSyncTx::Transfer(Box::new(tx)); - assert!(client - .submit_tx( - transfer_bad_token.clone(), - TxEthSignatureVariant::Single(eth_sig.map(TxEthSignature::EthereumSignature)), - None - ) - .await - .unwrap_err() - .to_string() - .contains("Chosen token is not suitable for paying fees")); - - // Prepare batch and make the same mistake. - let bad_token = Token::new(TokenId(1), Default::default(), "PHNX", 18); - let bad_batch = vec![transfer_bad_token.clone(), transfer_bad_token]; - let txs = bad_batch - .iter() - .zip(std::iter::repeat(bad_token)) - .map(|(tx, token)| (tx.clone(), token, tx.account())) - .collect::>(); - let batch_signature = { - let batch_message = EthBatchSignData::get_batch_sign_message(txs); - let eth_private_key = from - .try_get_eth_private_key() - .expect("should have eth private key"); - let eth_sig = PackedEthSignature::sign(eth_private_key, &batch_message).unwrap(); - let single_signature = TxEthSignature::EthereumSignature(eth_sig); - - EthBatchSignatures::Single(single_signature) - }; - - assert!(client - .submit_tx_batch(bad_batch, batch_signature) - .await - .unwrap_err() - .to_string() - .contains("Chosen token is not suitable for paying fees")); - - // Finally, prepare the batch in which fee is covered by the supported token. - let (tx, _) = from.sign_transfer( - TokenId(1), - "PHNX", - 100u64.into(), - 0u64.into(), // Note that fee is zero, which is OK. - &to.address, - Some(Nonce(0)), - false, - Default::default(), - ); - let phnx_transfer = ZkSyncTx::Transfer(Box::new(tx)); - let phnx_transfer_hash = phnx_transfer.hash(); - let (tx, _) = from.sign_transfer( - TokenId(0), - "ETH", - 0u64.into(), - 200u64.into(), // Here we pay fees for both transfers in ETH. - &to.address, - Some(Nonce(0)), - false, - Default::default(), - ); - let fee_tx = ZkSyncTx::Transfer(Box::new(tx)); - let fee_tx_hash = fee_tx.hash(); - - let eth = Token::new(TokenId(0), Default::default(), "ETH", 18); - let good_batch = vec![phnx_transfer, fee_tx]; - let good_batch_hashes = vec![phnx_transfer_hash, fee_tx_hash]; - let txs = good_batch - .iter() - .zip(std::iter::repeat(eth)) - .map(|(tx, token)| (tx.clone(), token, tx.account())) - .collect::>(); - let batch_signature = { - let batch_message = EthBatchSignData::get_batch_sign_message(txs); - let eth_private_key = from - .try_get_eth_private_key() - .expect("should have eth private key"); - let eth_sig = PackedEthSignature::sign(eth_private_key, &batch_message).unwrap(); - let single_signature = TxEthSignature::EthereumSignature(eth_sig); - - EthBatchSignatures::Single(single_signature) - }; - - assert_eq!( - client.submit_tx_batch(good_batch, batch_signature).await?, - good_batch_hashes - ); - - server.stop().await; - Ok(()) - } - - /// This test checks the following: - /// - /// Fee free account can pay zero fee in single tx. - /// Not a fee free account can't pay zero fee in single tx. - async fn test_fee_free_accounts() -> anyhow::Result<()> { - let (client, server) = TestServer::new().await?; - - let from1 = ZkSyncAccount::rand(); - from1.set_account_id(Some(AccountId(0xfee))); - let to1 = ZkSyncAccount::rand(); - - // Submit transaction with a zero fee by the fee free account - let (tx1, eth_sig1) = from1.sign_transfer( - TokenId(0), - "ETH", - 0u64.into(), - 0u64.into(), - &to1.address, - Some(Nonce(0)), - false, - Default::default(), - ); - let transfer1 = ZkSyncTx::Transfer(Box::new(tx1)); - client - .submit_tx( - transfer1.clone(), - TxEthSignatureVariant::Single(eth_sig1.map(TxEthSignature::EthereumSignature)), - None, - ) - .await - .expect("fee free account transaction fails"); - - let from2 = ZkSyncAccount::rand(); - from2.set_account_id(Some(AccountId(0xbee))); - let to2 = ZkSyncAccount::rand(); - - // Submit transaction with a zero fee not by the fee free account - let (tx2, eth_sig2) = from2.sign_transfer( - TokenId(0), - "ETH", - 0u64.into(), - 0u64.into(), - &to2.address, - Some(Nonce(0)), - false, - Default::default(), - ); - let transfer2 = ZkSyncTx::Transfer(Box::new(tx2)); - client - .submit_tx( - transfer2.clone(), - TxEthSignatureVariant::Single(eth_sig2.map(TxEthSignature::EthereumSignature)), - None, - ) - .await - .unwrap_err() - .to_string() - .contains("Transaction fee is too low"); - - server.stop().await; - Ok(()) - } - - /// This test checks the following criteria: - /// - /// - Attempt to submit non-withdraw transaction with the enabled fast-processing. - /// - Attempt to submit non-withdraw transaction with the disabled fast-processing. - /// - Attempt to submit withdraw transaction with the enabled fast-processing. - async fn test_fast_processing_flag() -> anyhow::Result<()> { - let (client, server) = TestServer::new().await?; - - let from = ZkSyncAccount::rand(); - from.set_account_id(Some(AccountId(0xdead))); - let to = ZkSyncAccount::rand(); - - // Submit non-withdraw transaction with the enabled fast-processing. - let (tx, eth_sig) = from.sign_transfer( - TokenId(0), - "ETH", - 10_u64.into(), - 10_u64.into(), - &to.address, - None, - false, - Default::default(), - ); - client - .submit_tx( - ZkSyncTx::Transfer(Box::new(tx.clone())), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - Some(true), - ) - .await - .unwrap_err(); - // Submit with the disabled fast-processing. - client - .submit_tx( - ZkSyncTx::Transfer(Box::new(tx.clone())), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - Some(false), - ) - .await?; - // Submit without fast-processing flag. - client - .submit_tx( - ZkSyncTx::Transfer(Box::new(tx)), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - None, - ) - .await?; - - // Submit withdraw transaction with the enabled fast-processing. - let (tx, eth_sig) = from.sign_withdraw( - TokenId(0), - "ETH", - 100u64.into(), - 10u64.into(), - &to.address, - None, - false, - Default::default(), - ); - client - .submit_tx( - ZkSyncTx::Withdraw(Box::new(tx.clone())), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - Some(true), - ) - .await?; - // Submit with the disabled fast-processing. - client - .submit_tx( - ZkSyncTx::Withdraw(Box::new(tx.clone())), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - Some(false), - ) - .await?; - // Submit without fast-processing flag. - client - .submit_tx( - ZkSyncTx::Withdraw(Box::new(tx)), - TxEthSignatureVariant::Single( - eth_sig.clone().map(TxEthSignature::EthereumSignature), - ), - None, - ) - .await?; - - server.stop().await; - Ok(()) - } -} diff --git a/core/bin/zksync_api/src/api_server/rpc_server/rpc_impl.rs b/core/bin/zksync_api/src/api_server/rpc_server/rpc_impl.rs index 65d7bf6aed..a7191bc430 100644 --- a/core/bin/zksync_api/src/api_server/rpc_server/rpc_impl.rs +++ b/core/bin/zksync_api/src/api_server/rpc_server/rpc_impl.rs @@ -4,10 +4,10 @@ use std::time::Instant; use bigdecimal::BigDecimal; use jsonrpc_core::{Error, Result}; // Workspace uses -use zksync_api_client::rest::v1::accounts::ApiNFT; +use zksync_api_types::v02::fee::ApiTxFeeTypes; use zksync_types::{ - tx::{EthBatchSignatures, TxEthSignatureVariant, TxHash}, - Address, BatchFee, Fee, Token, TokenId, TokenLike, TxFeeTypes, ZkSyncTx, + tx::{EthBatchSignatures, TxEthSignature, TxEthSignatureVariant, TxHash}, + Address, BatchFee, Fee, Token, TokenId, TokenLike, TotalFee, TxFeeTypes, ZkSyncTx, }; // Local uses @@ -15,6 +15,7 @@ use crate::{api_server::tx_sender::SubmitError, fee_ticker::TokenPriceRequestTyp use super::{types::*, RpcApp}; use crate::api_server::rpc_server::error::RpcErrorCodes; +use zksync_types::tokens::ApiNFT; impl RpcApp { pub async fn _impl_account_info(self, address: Address) -> Result { @@ -23,12 +24,24 @@ impl RpcApp { let account_state = self.get_account_state(address).await?; let depositing_ops = self.get_ongoing_deposits_impl(address).await?; + let mut storage = self.access_storage().await?; let depositing = DepositingAccountBalances::from_pending_ops( - &mut self.access_storage().await?, + &mut storage, &self.tx_sender.tokens, depositing_ops, ) .await?; + let account_type = if let Some(account_id) = account_state.account_id { + storage + .chain() + .account_schema() + .account_type_by_id(account_id) + .await + .map_err(|_| Error::internal_error())? + .map(|t| t.into()) + } else { + None + }; metrics::histogram!("api.rpc.account_info", start.elapsed()); Ok(AccountInfoResp { @@ -37,6 +50,7 @@ impl RpcApp { committed: account_state.committed, verified: account_state.verified, depositing, + account_type, }) } @@ -93,6 +107,7 @@ impl RpcApp { }) } + #[allow(deprecated)] pub async fn _impl_tx_submit( self, tx: Box, @@ -102,7 +117,7 @@ impl RpcApp { let start = Instant::now(); let result = self .tx_sender - .submit_tx(*tx, *signature, fast_processing) + .submit_tx_with_separate_fp(*tx, *signature, fast_processing) .await .map_err(Error::from); metrics::histogram!("api.rpc.tx_submit", start.elapsed()); @@ -115,11 +130,18 @@ impl RpcApp { eth_signatures: Option, ) -> Result> { let start = Instant::now(); - let result = self + let result: Result> = self .tx_sender .submit_txs_batch(txs, eth_signatures) .await - .map_err(Error::from); + .map_err(Error::from) + .map(|response| { + response + .transaction_hashes + .into_iter() + .map(|tx_hash| tx_hash.0) + .collect() + }); metrics::histogram!("api.rpc.submit_txs_batch", start.elapsed()); result } @@ -195,7 +217,7 @@ impl RpcApp { pub async fn _impl_get_tx_fee( self, - tx_type: TxFeeTypes, + tx_type: ApiTxFeeTypes, address: Address, token: TokenLike, ) -> Result { @@ -205,7 +227,8 @@ impl RpcApp { if !token_allowed { return Err(SubmitError::InappropriateFeeToken.into()); } - let result = Self::ticker_request(ticker.clone(), tx_type, address, token.clone()).await?; + let result = + Self::ticker_request(ticker.clone(), tx_type.into(), address, token.clone()).await?; let token = self.tx_sender.token_info_from_id(token).await?; let allowed_subsidy = self @@ -224,10 +247,10 @@ impl RpcApp { pub async fn _impl_get_txs_batch_fee_in_wei( self, - tx_types: Vec, + tx_types: Vec, addresses: Vec
, token: TokenLike, - ) -> Result { + ) -> Result { let start = Instant::now(); if tx_types.len() != addresses.len() { return Err(Error { @@ -243,8 +266,12 @@ impl RpcApp { return Err(SubmitError::InappropriateFeeToken.into()); } - let transactions: Vec<(TxFeeTypes, Address)> = - (tx_types.iter().cloned().zip(addresses.iter().cloned())).collect(); + let transactions: Vec<(TxFeeTypes, Address)> = (tx_types + .iter() + .cloned() + .map(|fee_type| fee_type.into()) + .zip(addresses.iter().cloned())) + .collect(); let result = Self::ticker_batch_fee_request(ticker, transactions, token.clone()).await?; let token = self.tx_sender.token_info_from_id(token).await?; @@ -259,7 +286,9 @@ impl RpcApp { }; metrics::histogram!("api.rpc.get_txs_batch_fee_in_wei", start.elapsed()); - Ok(fee) + Ok(TotalFee { + total_fee: fee.total_fee, + }) } pub async fn _impl_get_token_price(self, token: TokenLike) -> Result { diff --git a/core/bin/zksync_api/src/api_server/rpc_server/rpc_trait.rs b/core/bin/zksync_api/src/api_server/rpc_server/rpc_trait.rs index 9e81148658..42877fec4f 100644 --- a/core/bin/zksync_api/src/api_server/rpc_server/rpc_trait.rs +++ b/core/bin/zksync_api/src/api_server/rpc_server/rpc_trait.rs @@ -6,15 +6,16 @@ use jsonrpc_core::Error; use jsonrpc_derive::rpc; // Workspace uses -use zksync_api_client::rest::v1::accounts::ApiNFT; +use zksync_api_types::v02::fee::ApiTxFeeTypes; use zksync_crypto::params::ZKSYNC_VERSION; use zksync_types::{ tx::{EthBatchSignatures, TxEthSignatureVariant, TxHash}, - Address, BatchFee, Fee, Token, TokenId, TokenLike, TxFeeTypes, ZkSyncTx, + Address, BatchFee, Fee, Token, TokenId, TokenLike, TotalFee, TxFeeTypes, ZkSyncTx, }; // Local uses use super::{types::*, RpcApp}; +use zksync_types::tokens::ApiNFT; pub type FutureResp = Box + Send>; @@ -55,19 +56,19 @@ pub trait Rpc { #[rpc(name = "get_tx_fee", returns = "Fee")] fn get_tx_fee( &self, - tx_type: TxFeeTypes, + tx_type: ApiTxFeeTypes, _address: Address, token_like: TokenLike, ) -> FutureResp; // _addresses argument is left for the backward compatibility. - #[rpc(name = "get_txs_batch_fee_in_wei", returns = "BatchFee")] + #[rpc(name = "get_txs_batch_fee_in_wei", returns = "TotalFee")] fn get_txs_batch_fee_in_wei( &self, - tx_types: Vec, + tx_types: Vec, _addresses: Vec
, token_like: TokenLike, - ) -> FutureResp; + ) -> FutureResp; #[rpc(name = "get_token_price", returns = "BigDecimal")] fn get_token_price(&self, token_like: TokenLike) -> FutureResp; @@ -161,7 +162,7 @@ impl Rpc for RpcApp { fn get_tx_fee( &self, - tx_type: TxFeeTypes, + tx_type: ApiTxFeeTypes, address: Address, token_like: TokenLike, ) -> FutureResp { @@ -178,10 +179,10 @@ impl Rpc for RpcApp { fn get_txs_batch_fee_in_wei( &self, - tx_types: Vec, + tx_types: Vec, addresses: Vec
, token_like: TokenLike, - ) -> FutureResp { + ) -> FutureResp { let handle = self.runtime_handle.clone(); let self_ = self.clone(); let resp = async move { diff --git a/core/bin/zksync_api/src/api_server/rpc_server/types.rs b/core/bin/zksync_api/src/api_server/rpc_server/types.rs index 66ce33a169..0d28c257b4 100644 --- a/core/bin/zksync_api/src/api_server/rpc_server/types.rs +++ b/core/bin/zksync_api/src/api_server/rpc_server/types.rs @@ -6,20 +6,19 @@ use num::{BigUint, ToPrimitive}; use serde::{Deserialize, Serialize}; // Workspace uses +use zksync_api_types::v02::account::EthAccountType; use zksync_storage::StorageProcessor; use zksync_types::{ tx::TxEthSignatureVariant, Account, AccountId, Address, Nonce, PriorityOp, PubKeyHash, TokenId, - ZkSyncPriorityOp, ZkSyncTx, + ZkSyncPriorityOp, ZkSyncTx, NFT, }; use zksync_utils::{BigUintSerdeAsRadix10Str, BigUintSerdeWrapper}; -// This wrong dependency, but the whole data about account info stored in this place -use zksync_api_client::rest::v1::accounts::NFT; +// // This wrong dependency, but the whole data about account info stored in this place +// use zksync_api_client::rest::v1::accounts::NFT; // Local uses -use crate::{ - api_server::v1::accounts::account_state_from_storage, utils::token_db_cache::TokenDBCache, -}; +use crate::utils::token_db_cache::TokenDBCache; #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -45,21 +44,22 @@ impl ResponseAccountState { tokens: &TokenDBCache, account: Account, ) -> Result { - let inner = account_state_from_storage(storage, tokens, &account) - .await - .map_err(|_| Error::internal_error())?; - - // Old code used `HashMap` as well and didn't rely on the particular order, - // so here we use `HashMap` as well for the consistency. - let balances: HashMap<_, _> = inner.balances.into_iter().collect(); - - Ok(Self { - balances, - nfts: inner.nfts, - minted_nfts: inner.minted_nfts, - nonce: inner.nonce, - pub_key_hash: inner.pub_key_hash, - }) + unimplemented!() + // let inner = account_state_from_storage(storage, tokens, &account) + // .await + // .map_err(|_| Error::internal_error())?; + // + // // Old code used `HashMap` as well and didn't rely on the particular order, + // // so here we use `HashMap` as well for the consistency. + // let balances: HashMap<_, _> = inner.balances.into_iter().collect(); + // + // Ok(Self { + // balances, + // nfts: inner.nfts, + // minted_nfts: inner.minted_nfts, + // nonce: inner.nonce, + // pub_key_hash: inner.pub_key_hash, + // }) } } @@ -132,6 +132,7 @@ pub struct AccountInfoResp { pub depositing: DepositingAccountBalances, pub committed: ResponseAccountState, pub verified: ResponseAccountState, + pub account_type: Option, } #[derive(Debug, Serialize, Deserialize, Clone)] diff --git a/core/bin/zksync_api/src/api_server/tx_sender.rs b/core/bin/zksync_api/src/api_server/tx_sender.rs index a7fd32782b..49863ff9d8 100644 --- a/core/bin/zksync_api/src/api_server/tx_sender.rs +++ b/core/bin/zksync_api/src/api_server/tx_sender.rs @@ -21,6 +21,7 @@ use num::{bigint::ToBigInt, rational::Ratio, BigUint, CheckedSub, Zero}; use thiserror::Error; // Workspace uses +use zksync_api_types::v02::transaction::{SubmitBatchResponse, TxHashSerializeWrapper}; use zksync_config::ZkSyncConfig; use zksync_storage::{chain::account::records::EthAccountType, ConnectionPool}; use zksync_types::{ @@ -317,21 +318,15 @@ impl TxSender { Ok(()) } - pub async fn submit_tx( + // This method is left for RPC API + #[deprecated(note = "Use the submit_tx function instead")] + pub async fn submit_tx_with_separate_fp( &self, mut tx: ZkSyncTx, signature: TxEthSignatureVariant, fast_processing: Option, ) -> Result { - if tx.is_close() { - return Err(SubmitError::AccountCloseDisabled); - } - - if let ZkSyncTx::ForcedExit(forced_exit) = &tx { - self.check_forced_exit(forced_exit).await?; - } - - let fast_processing = fast_processing.unwrap_or_default(); // `None` => false + let fast_processing = fast_processing.unwrap_or(false); if fast_processing && !tx.is_withdraw() { return Err(SubmitError::UnsupportedFastProcessing); } @@ -344,12 +339,25 @@ impl TxSender { )); } - // `fast` field is not used in serializing (as it's an internal server option, - // not the actual transaction part), so we have to set it manually depending on - // the RPC method input. withdraw.fast = fast_processing; } + self.submit_tx(tx, signature).await + } + + pub async fn submit_tx( + &self, + tx: ZkSyncTx, + signature: TxEthSignatureVariant, + ) -> Result { + if tx.is_close() { + return Err(SubmitError::AccountCloseDisabled); + } + + if let ZkSyncTx::ForcedExit(forced_exit) = &tx { + self.check_forced_exit(forced_exit).await?; + } + // Resolve the token. let token = self.token_info_from_id(tx.token_id()).await?; let allowed_subsidy = self.subsidy_accumulator.get_allowed_subsidy(&token.address); @@ -475,7 +483,7 @@ impl TxSender { &self, txs: Vec, eth_signatures: Option, - ) -> Result, SubmitError> { + ) -> Result { // Bring the received signatures into a vector for simplified work. let eth_signatures = EthBatchSignatures::api_arg_to_vec(eth_signatures); @@ -717,7 +725,11 @@ impl TxSender { .map_err(SubmitError::communication_core_server)? .map_err(SubmitError::TxAdd)?; - Ok(tx_hashes) + let batch_hash = TxHash::batch_hash(&tx_hashes); + Ok(SubmitBatchResponse { + transaction_hashes: tx_hashes.into_iter().map(TxHashSerializeWrapper).collect(), + batch_hash, + }) } pub async fn get_txs_fee_in_wei( diff --git a/core/bin/zksync_api/src/bin/dev-ticker-server.rs b/core/bin/zksync_api/src/bin/dev-ticker-server.rs index 7cef170ef8..5b22496fb7 100644 --- a/core/bin/zksync_api/src/bin/dev-ticker-server.rs +++ b/core/bin/zksync_api/src/bin/dev-ticker-server.rs @@ -9,9 +9,11 @@ use bigdecimal::BigDecimal; use chrono::{SecondsFormat, Utc}; use serde::{Deserialize, Serialize}; use serde_json::json; +use std::{collections::HashMap, fs::read_to_string, path::Path}; use std::{convert::TryFrom, time::Duration}; use structopt::StructOpt; use zksync_crypto::rand::{thread_rng, Rng}; +use zksync_types::Address; #[derive(Debug, Serialize, Deserialize)] struct CoinMarketCapTokenQuery { @@ -20,7 +22,7 @@ struct CoinMarketCapTokenQuery { macro_rules! make_sloppy { ($f: ident) => {{ - |query| async { + |query, data| async { if thread_rng().gen_range(0, 100) < 5 { vlog::debug!("`{}` has been errored", stringify!($f)); return Ok(HttpResponse::InternalServerError().finish()); @@ -42,7 +44,7 @@ macro_rules! make_sloppy { ); tokio::time::delay_for(duration).await; - let resp = $f(query).await; + let resp = $f(query, data).await; resp } }}; @@ -50,6 +52,7 @@ macro_rules! make_sloppy { async fn handle_coinmarketcap_token_price_query( query: web::Query, + _data: web::Data>, ) -> Result { let symbol = query.symbol.clone(); let base_price = match symbol.as_str() { @@ -82,36 +85,62 @@ async fn handle_coinmarketcap_token_price_query( Ok(HttpResponse::Ok().json(resp)) } -async fn handle_coingecko_token_list(_req: HttpRequest) -> Result { - let resp = json!([ - {"id": "ethereum", "symbol": "eth", "name": "Ethereum"}, - {"id": "dai", "symbol":"dai", "name": "Dai"}, - {"id": "glm", "symbol":"glm", "name": "Golem"}, - {"id": "tglm", "symbol":"tglm", "name": "Golem"}, - {"id": "usdc", "symbol":"usdc", "name": "usdc"}, - {"id": "usdt", "symbol":"usdt", "name": "usdt"}, - {"id": "tusd", "symbol":"tusd", "name": "tusd"}, - {"id": "link", "symbol":"link", "name": "link"}, - {"id": "ht", "symbol":"ht", "name": "ht"}, - {"id": "omg", "symbol":"omg", "name": "omg"}, - {"id": "trb", "symbol":"trb", "name": "trb"}, - {"id": "zrx", "symbol":"zrx", "name": "zrx"}, - {"id": "rep", "symbol":"rep", "name": "rep"}, - {"id": "storj", "symbol":"storj", "name": "storj"}, - {"id": "nexo", "symbol":"nexo", "name": "nexo"}, - {"id": "mco", "symbol":"mco", "name": "mco"}, - {"id": "knc", "symbol":"knc", "name": "knc"}, - {"id": "lamb", "symbol":"lamb", "name": "lamb"}, - {"id": "xem", "symbol":"xem", "name": "xem"}, - {"id": "phnx", "symbol":"phnx", "name": "Golem"}, - {"id": "basic-attention-token", "symbol": "bat", "name": "Basic Attention Token"}, - {"id": "wrapped-bitcoin", "symbol": "wbtc", "name": "Wrapped Bitcoin"}, - ]); +#[derive(Debug, Deserialize)] +struct Token { + pub address: Address, + pub decimals: u8, + pub symbol: String, +} - Ok(HttpResponse::Ok().json(resp)) +#[derive(Serialize, Deserialize, Clone, Debug)] +struct TokenData { + id: String, + symbol: String, + name: String, + platforms: HashMap, +} + +fn load_tokens(path: impl AsRef) -> Vec { + if let Ok(text) = read_to_string(path) { + let tokens: Vec = serde_json::from_str(&text).unwrap(); + let tokens_data: Vec = tokens + .into_iter() + .map(|token| { + let symbol = token.symbol.to_lowercase(); + let mut platforms = HashMap::new(); + platforms.insert(String::from("ethereum"), token.address); + let id = match symbol.as_str() { + "eth" => String::from("ethereum"), + "wbtc" => String::from("wrapped-bitcoin"), + "bat" => String::from("basic-attention-token"), + _ => symbol.clone(), + }; + + TokenData { + id, + symbol: symbol.clone(), + name: symbol, + platforms, + } + }) + .collect(); + tokens_data + } else { + Vec::new() + } +} + +async fn handle_coingecko_token_list( + _req: HttpRequest, + data: web::Data>, +) -> Result { + Ok(HttpResponse::Ok().json((*data.into_inner()).clone())) } -async fn handle_coingecko_token_price_query(req: HttpRequest) -> Result { +async fn handle_coingecko_token_price_query( + req: HttpRequest, + _data: web::Data>, +) -> Result { let coin_id = req.match_info().get("coin_id"); let base_price = match coin_id { Some("ethereum") => BigDecimal::from(200), @@ -133,8 +162,17 @@ async fn handle_coingecko_token_price_query(req: HttpRequest) -> Result actix_web::Scope { + let localhost_tokens = load_tokens(&"etc/tokens/localhost.json"); + let rinkeby_tokens = load_tokens(&"etc/tokens/rinkeby.json"); + let ropsten_tokens = load_tokens(&"etc/tokens/ropsten.json"); + let data: Vec = localhost_tokens + .into_iter() + .chain(rinkeby_tokens.into_iter()) + .chain(ropsten_tokens.into_iter()) + .collect(); if sloppy_mode { web::scope("/") + .data(data) .route( "/cryptocurrency/quotes/latest", web::get().to(make_sloppy!(handle_coinmarketcap_token_price_query)), @@ -149,6 +187,7 @@ fn main_scope(sloppy_mode: bool) -> actix_web::Scope { ) } else { web::scope("/") + .data(data) .route( "/cryptocurrency/quotes/latest", web::get().to(handle_coinmarketcap_token_price_query), diff --git a/core/bin/zksync_api/src/core_api_client.rs b/core/bin/zksync_api/src/core_api_client.rs index 079cf927da..84e3fbea4a 100644 --- a/core/bin/zksync_api/src/core_api_client.rs +++ b/core/bin/zksync_api/src/core_api_client.rs @@ -1,5 +1,12 @@ +use zksync_api_types::{ + v02::{ + pagination::{Paginated, PaginationQuery, PendingOpsRequest}, + transaction::Transaction, + }, + PriorityOpLookupQuery, +}; pub use zksync_types::EthBlockId; -use zksync_types::{tx::TxEthSignature, Address, PriorityOp, SignedZkSyncTx, H256}; +use zksync_types::{tx::TxEthSignature, Address, PriorityOp, SignedZkSyncTx}; use crate::tx_error::TxAddError; @@ -49,23 +56,34 @@ impl CoreApiClient { self.get(&endpoint).await } - /// Queries information about unconfirmed priority operations for a certain address from a Core. - pub async fn get_unconfirmed_ops(&self, address: Address) -> anyhow::Result> { - let endpoint = format!("{}/unconfirmed_ops/0x{}", self.addr, hex::encode(address)); + /// Queries information about unconfirmed priority operations for a certain account from a Core. + pub async fn get_unconfirmed_ops( + &self, + query: &PaginationQuery, + ) -> anyhow::Result> { + let endpoint = format!( + "{}/unconfirmed_ops?address=0x{}&account_id={}&serial_id={}&limit={}&direction={}", + self.addr, + hex::encode(query.from.address), + serde_json::to_string(&query.from.account_id).unwrap(), + serde_json::to_string(&query.from.serial_id) + .unwrap() + .replace("\"", ""), + query.limit, + serde_json::to_string(&query.direction) + .unwrap() + .replace("\"", "") + ); self.get(&endpoint).await } /// Queries information about unconfirmed priority operation from a Core. pub async fn get_unconfirmed_op( &self, - eth_tx_hash: H256, - ) -> anyhow::Result> { - let endpoint = format!( - "{}/unconfirmed_op/0x{}", - self.addr, - hex::encode(eth_tx_hash) - ); - self.get(&endpoint).await + query: PriorityOpLookupQuery, + ) -> anyhow::Result> { + let endpoint = format!("{}/unconfirmed_op", self.addr,); + self.post(&endpoint, query).await } async fn get(&self, url: &str) -> anyhow::Result { diff --git a/core/bin/zksync_api/src/fee_ticker/mod.rs b/core/bin/zksync_api/src/fee_ticker/mod.rs index 258e8d7eea..0345c45bda 100644 --- a/core/bin/zksync_api/src/fee_ticker/mod.rs +++ b/core/bin/zksync_api/src/fee_ticker/mod.rs @@ -633,8 +633,8 @@ impl FeeTicker Result { + async fn get_price(&self, _token: &Token) -> Result { Err(PriceError::token_not_found("Wrong token")) } } -fn run_server() -> (String, AbortHandle) { +fn run_server(token_address: Address) -> (String, AbortHandle) { let mut url = None; let mut server = None; for i in 9000..9999 { @@ -210,10 +210,15 @@ fn run_server() -> (String, AbortHandle) { HttpResponse::MethodNotAllowed() })), ) - .service(web::resource("/api/v3/coins/list").to(|| { + .service(web::resource("/api/v3/coins/list").to(move || { + let mut platforms = HashMap::new(); + platforms.insert( + String::from("ethereum"), + serde_json::Value::String(serde_json::to_string(&token_address).unwrap()), + ); HttpResponse::Ok().json(CoinGeckoTokenList(vec![CoinGeckoTokenInfo { - id: "DAI".to_string(), - symbol: "DAI".to_string(), + id: "dai".to_string(), + platforms, }])) })) }) @@ -387,7 +392,13 @@ fn test_zero_price_token_fee() { #[ignore] // It's ignore because we can't initialize coingecko in current way with block async fn test_error_coingecko_api() { - let (address, handler) = run_server(); + let token = Token { + id: TokenId(1), + address: Address::random(), + symbol: String::from("DAI"), + decimals: 18, + }; + let (address, handler) = run_server(token.address); let client = reqwest::ClientBuilder::new() .timeout(CONNECTION_TIMEOUT) .connect_timeout(CONNECTION_TIMEOUT) @@ -402,20 +413,25 @@ async fn test_error_coingecko_api() { FakeTokenWatcher, ); let connection_pool = ConnectionPool::new(Some(1)); - connection_pool - .access_storage() - .await - .unwrap() - .tokens_schema() - .update_historical_ticker_price( - TokenId(1), - TokenPrice { - usd_price: big_decimal_to_ratio(&BigDecimal::from(10)).unwrap(), - last_updated: chrono::offset::Utc::now(), - }, - ) - .await - .unwrap(); + { + let mut storage = connection_pool.access_storage().await.unwrap(); + storage + .tokens_schema() + .store_token(token.clone()) + .await + .unwrap(); + storage + .tokens_schema() + .update_historical_ticker_price( + token.id, + TokenPrice { + usd_price: big_decimal_to_ratio(&BigDecimal::from(10)).unwrap(), + last_updated: chrono::offset::Utc::now(), + }, + ) + .await + .unwrap(); + } let ticker_api = TickerApi::new(connection_pool, coingecko); let config = get_test_ticker_config(); @@ -430,13 +446,13 @@ async fn test_error_coingecko_api() { ticker .get_fee_from_ticker_in_wei( TxFeeTypes::FastWithdraw, - TokenId(1).into(), + token.id.into(), Address::default(), ) .await .unwrap(); ticker - .get_token_price(TokenId(1).into(), TokenPriceRequestType::USDForOneWei) + .get_token_price(token.id.into(), TokenPriceRequestType::USDForOneWei) .await .unwrap(); } diff --git a/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs b/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs index e6b625daab..a708cc6ba6 100644 --- a/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs +++ b/core/bin/zksync_api/src/fee_ticker/ticker_api/coingecko.rs @@ -7,21 +7,22 @@ use num::BigUint; use reqwest::Url; use serde::{Deserialize, Serialize}; use std::collections::HashMap; +use std::str::FromStr; use std::time::Instant; -use zksync_types::TokenPrice; -use zksync_utils::UnsignedRatioSerializeAsDecimal; +use zksync_types::{Address, Token, TokenPrice}; +use zksync_utils::{remove_prefix, UnsignedRatioSerializeAsDecimal}; #[derive(Debug, Clone)] pub struct CoinGeckoAPI { base_url: Url, client: reqwest::Client, - token_ids: HashMap, + token_ids: HashMap, } impl CoinGeckoAPI { pub fn new(client: reqwest::Client, base_url: Url) -> anyhow::Result { let token_list_url = base_url - .join("api/v3/coins/list") + .join("api/v3/coins/list?include_platform=true") .expect("failed to join URL path"); let token_list = reqwest::blocking::get(token_list_url) @@ -30,8 +31,19 @@ impl CoinGeckoAPI { let mut token_ids = HashMap::new(); for token in token_list.0 { - token_ids.insert(token.symbol, token.id); + if let Some(address_value) = token.platforms.get("ethereum") { + if let Some(address_str) = address_value.as_str() { + let address_str = remove_prefix(address_str); + if let Ok(address) = Address::from_str(address_str) { + token_ids.insert(address, token.id); + } + } + } } + + // Add ETH manually because coingecko API doesn't return address for it. + token_ids.insert(Address::default(), String::from("ethereum")); + Ok(Self { base_url, client, @@ -42,34 +54,29 @@ impl CoinGeckoAPI { #[async_trait] impl TokenPriceAPI for CoinGeckoAPI { - async fn get_price(&self, token_symbol: &str) -> Result { + async fn get_price(&self, token: &Token) -> Result { let start = Instant::now(); - let token_lowercase_symbol = token_symbol.to_lowercase(); - let token_id = self - .token_ids - .get(&token_lowercase_symbol) - .or_else(|| self.token_ids.get(token_symbol)) - .unwrap_or(&token_lowercase_symbol); - // TODO ZKS-595. Uncomment this code - // .ok_or_else(|| { - // PriceError::token_not_found(format!( - // "Token '{}' is not listed on CoinGecko", - // token_symbol - // )) - // })?; + let token_id = self.token_ids.get(&token.address).ok_or_else(|| { + PriceError::token_not_found(format!( + "Token '{}, {:?}' is not listed on CoinGecko", + token.symbol, token.address + )) + })?; let market_chart_url = self .base_url .join(format!("api/v3/coins/{}/market_chart", token_id).as_str()) .expect("failed to join URL path"); - // If we use 2 day interval we will get hourly prices and not minute by minute which makes - // response faster and smaller let market_chart = self .client .get(market_chart_url) .timeout(REQUEST_TIMEOUT) - .query(&[("vs_currency", "usd"), ("days", "2")]) + .query(&[ + ("vs_currency", "usd"), + ("days", "1"), + ("interval", "hourly"), + ]) .send() .await .map_err(|err| PriceError::api_error(format!("CoinGecko API request failed: {}", err)))? @@ -83,9 +90,12 @@ impl TokenPriceAPI for CoinGeckoAPI { .ok_or_else(|| PriceError::api_error("CoinGecko returned empty price data"))? .0; + // Take prices over the last 6 hours let usd_prices = market_chart .prices .into_iter() + .rev() + .take(6) .map(|token_price| token_price.1); // We use max price for ETH token because we spend ETH with each commit and collect token @@ -93,10 +103,16 @@ impl TokenPriceAPI for CoinGeckoAPI { // Theoretically we should use min and max price for ETH in our ticker formula when we // calculate fee for tx with ETH token. Practically if we use only max price foe ETH it is fine because // we don't need to sell this token lnd price only affects ZKP cost of such tx which is negligible. - let usd_price = if token_symbol == "ETH" { + // For other tokens we use average price + let usd_price = if token.id.0 == 0 { usd_prices.max() } else { - usd_prices.min() + let len = usd_prices.len(); + if len == 0 { + None + } else { + Some(usd_prices.sum::>() / BigUint::from(len)) + } }; let usd_price = usd_price .ok_or_else(|| PriceError::api_error("CoinGecko returned empty price data"))?; @@ -117,7 +133,7 @@ impl TokenPriceAPI for CoinGeckoAPI { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CoinGeckoTokenInfo { pub(crate) id: String, - pub(crate) symbol: String, + pub(crate) platforms: HashMap, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -137,6 +153,7 @@ pub struct CoinGeckoMarketChart { #[cfg(test)] mod tests { use super::*; + use zksync_types::TokenId; use zksync_utils::parse_env; #[tokio::test] @@ -144,7 +161,8 @@ mod tests { let ticker_url = parse_env("FEE_TICKER_COINGECKO_BASE_URL"); let client = reqwest::Client::new(); let api = CoinGeckoAPI::new(client, ticker_url).unwrap(); - api.get_price("ETH") + let token = Token::new(TokenId(0), Default::default(), "ETH", 18); + api.get_price(&token) .await .expect("Failed to get data from ticker"); } diff --git a/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs b/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs index a263a8bbbe..2eede509ef 100644 --- a/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs +++ b/core/bin/zksync_api/src/fee_ticker/ticker_api/coinmarkercap.rs @@ -9,7 +9,7 @@ use serde::{Deserialize, Serialize}; // Workspace deps use super::{TokenPriceAPI, REQUEST_TIMEOUT}; use crate::fee_ticker::PriceError; -use zksync_types::{TokenLike, TokenPrice}; +use zksync_types::{Token, TokenLike, TokenPrice}; use zksync_utils::UnsignedRatioSerializeAsDecimal; #[derive(Debug)] @@ -26,7 +26,8 @@ impl CoinMarketCapAPI { #[async_trait] impl TokenPriceAPI for CoinMarketCapAPI { - async fn get_price(&self, token_symbol: &str) -> Result { + async fn get_price(&self, token: &Token) -> Result { + let token_symbol = token.symbol.as_str(); let request_url = self .base_url .join(&format!( @@ -84,6 +85,7 @@ pub(super) struct CoinmarketCapResponse { mod test { use super::*; use std::str::FromStr; + use zksync_types::TokenId; use zksync_utils::parse_env; #[test] @@ -97,8 +99,9 @@ mod test { let ticker_url = parse_env("FEE_TICKER_COINMARKETCAP_BASE_URL"); let client = reqwest::Client::new(); let api = CoinMarketCapAPI::new(client, ticker_url); + let token = Token::new(TokenId(0), Default::default(), "ETH", 18); runtime - .block_on(api.get_price("ETH")) + .block_on(api.get_price(&token)) .expect("Failed to get data from ticker"); } diff --git a/core/bin/zksync_api/src/fee_ticker/ticker_api/mod.rs b/core/bin/zksync_api/src/fee_ticker/ticker_api/mod.rs index 97666c71b5..be6a0d3104 100644 --- a/core/bin/zksync_api/src/fee_ticker/ticker_api/mod.rs +++ b/core/bin/zksync_api/src/fee_ticker/ticker_api/mod.rs @@ -25,7 +25,7 @@ pub const CONNECTION_TIMEOUT: Duration = Duration::from_millis(700); #[async_trait] pub trait TokenPriceAPI { - async fn get_price(&self, token_symbol: &str) -> Result; + async fn get_price(&self, token: &Token) -> Result; } /// Api responsible for querying for TokenPrices @@ -232,7 +232,7 @@ impl FeeTickerAPI for TickerApi { return Ok(cached_value); } - let api_price = self.token_price_api.get_price(&token.symbol).await; + let api_price = self.token_price_api.get_price(&token).await; match api_price { Ok(api_price) => { diff --git a/core/bin/zksync_api/src/utils/block_details_cache.rs b/core/bin/zksync_api/src/utils/block_details_cache.rs index 64155d7b50..ff0bb534df 100644 --- a/core/bin/zksync_api/src/utils/block_details_cache.rs +++ b/core/bin/zksync_api/src/utils/block_details_cache.rs @@ -30,7 +30,7 @@ impl BlockDetailsCache { let blocks = storage .chain() .block_schema() - .load_block_range(block_number, 1) + .load_block_range_desc(block_number, 1) .await?; if let Some(block) = blocks.into_iter().next() { diff --git a/core/bin/zksync_core/Cargo.toml b/core/bin/zksync_core/Cargo.toml index a7078e4107..15a3176817 100644 --- a/core/bin/zksync_core/Cargo.toml +++ b/core/bin/zksync_core/Cargo.toml @@ -13,6 +13,7 @@ publish = false # We don't want to publish our binaries. [dependencies] zksync_state = { path = "../../lib/state", version = "1.0" } zksync_types = { path = "../../lib/types", version = "1.0" } +zksync_api_types = { path = "../../lib/api_types", version = "1.0" } zksync_notifier = { path = "../../lib/notifier", version = "1.0" } zksync_storage = { path = "../../lib/storage", version = "1.0" } diff --git a/core/bin/zksync_core/src/eth_watch/mod.rs b/core/bin/zksync_core/src/eth_watch/mod.rs index 28be6a3c54..b22e16a971 100644 --- a/core/bin/zksync_core/src/eth_watch/mod.rs +++ b/core/bin/zksync_core/src/eth_watch/mod.rs @@ -14,12 +14,23 @@ use futures::{ SinkExt, StreamExt, }; +use itertools::Itertools; use tokio::{task::JoinHandle, time}; use web3::types::{Address, BlockNumber}; // Workspace deps +use zksync_api_types::{ + v02::{ + pagination::{Paginated, PaginationDirection, PaginationQuery, PendingOpsRequest}, + transaction::{L1Transaction, Transaction, TransactionData, TxInBlockStatus}, + }, + Either, +}; use zksync_crypto::params::PRIORITY_EXPIRATION; -use zksync_types::{NewTokenEvent, PriorityOp, RegisterNFTFactoryEvent, ZkSyncPriorityOp}; +use zksync_types::{ + tx::TxHash, NewTokenEvent, Nonce, PriorityOp, PubKeyHash, RegisterNFTFactoryEvent, + ZkSyncPriorityOp, H256, +}; // Local deps use self::{client::EthClient, eth_state::ETHState, received_ops::sift_outdated_ops}; @@ -27,6 +38,8 @@ use self::{client::EthClient, eth_state::ETHState, received_ops::sift_outdated_o pub use client::{get_web3_block_number, EthHttpClient}; use zksync_config::ZkSyncConfig; +use crate::eth_watch::received_ops::ReceivedPriorityOp; +use std::collections::HashMap; use zksync_eth_client::ethereum_gateway::EthereumGateway; mod client; @@ -68,11 +81,19 @@ pub enum EthWatchRequest { resp: oneshot::Sender>, }, GetUnconfirmedOps { - address: Address, - resp: oneshot::Sender>, + query: PaginationQuery, + resp: oneshot::Sender>, + }, + GetUnconfirmedOpByEthHash { + eth_hash: H256, + resp: oneshot::Sender>, }, - GetUnconfirmedOpByHash { - eth_hash: Vec, + GetUnconfirmedOpByTxHash { + tx_hash: TxHash, + resp: oneshot::Sender>, + }, + GetUnconfirmedOpByAnyHash { + hash: TxHash, resp: oneshot::Sender>, }, GetNewTokens { @@ -83,6 +104,12 @@ pub enum EthWatchRequest { last_eth_block: Option, resp: oneshot::Sender>, }, + IsPubkeyChangeAuthorized { + address: Address, + nonce: Nonce, + pubkey_hash: PubKeyHash, + resp: oneshot::Sender, + }, } pub struct EthWatch { @@ -129,6 +156,7 @@ impl EthWatch { async fn process_new_blocks(&mut self, last_ethereum_block: u64) -> anyhow::Result<()> { debug_assert!(self.eth_state.last_ethereum_block() < last_ethereum_block); + debug_assert!(self.eth_state.last_ethereum_block() < last_ethereum_block); // We have to process every block between the current and previous known values. // This is crucial since `eth_watch` may enter the backoff mode in which it will skip many blocks. @@ -144,9 +172,24 @@ impl EthWatch { // Extend the existing priority operations with the new ones. let mut priority_queue = sift_outdated_ops(self.eth_state.priority_queue()); + for (serial_id, op) in updated_state.priority_queue() { priority_queue.insert(*serial_id, op.clone()); } + + // Check for gaps in priority queue. If some event is missing we skip this `ETHState` update. + let mut priority_op_ids: Vec<_> = priority_queue.keys().cloned().collect(); + priority_op_ids.sort_unstable(); + for i in 0..priority_op_ids.len().saturating_sub(1) { + let gap = priority_op_ids[i + 1] - priority_op_ids[i]; + anyhow::ensure!( + gap == 1, + "Gap in priority op queue: gap={}, priority_op_before_gap={}", + gap, + priority_op_ids[i] + ); + } + // Extend the existing token events with the new ones. let mut new_tokens = self.eth_state.new_tokens().to_vec(); for token in updated_state.new_tokens() { @@ -198,7 +241,7 @@ impl EthWatch { new_block_with_accepted_events.saturating_sub(unprocessed_blocks_amount); let unconfirmed_queue = self.get_unconfirmed_ops(current_ethereum_block).await?; - let priority_queue = self + let priority_queue: HashMap = self .client .get_priority_op_events( BlockNumber::Number(previous_block_with_accepted_events.into()), @@ -223,6 +266,15 @@ impl EthWatch { ) .await?; + let mut new_priority_op_ids: Vec<_> = priority_queue.keys().cloned().collect(); + new_priority_op_ids.sort_unstable(); + vlog::debug!( + "Updating eth state: block_range=[{},{}], new_priority_ops={:?}", + previous_block_with_accepted_events, + new_block_with_accepted_events, + new_priority_op_ids + ); + let new_state = ETHState::new( current_ethereum_block, unconfirmed_queue, @@ -283,11 +335,41 @@ impl EthWatch { result } - fn find_ongoing_op_by_hash(&self, eth_hash: &[u8]) -> Option { + async fn is_new_pubkey_hash_authorized( + &self, + address: Address, + nonce: Nonce, + pub_key_hash: &PubKeyHash, + ) -> anyhow::Result { + let auth_fact_reset_time = self.client.get_auth_fact_reset_time(address, nonce).await?; + if auth_fact_reset_time != 0 { + return Ok(false); + } + let auth_fact = self.client.get_auth_fact(address, nonce).await?; + Ok(auth_fact.as_slice() == tiny_keccak::keccak256(&pub_key_hash.data[..])) + } + + fn find_ongoing_op_by_eth_hash(&self, eth_hash: H256) -> Option { + self.eth_state + .unconfirmed_queue() + .iter() + .find(|op| op.eth_hash == eth_hash) + .cloned() + } + + fn find_ongoing_op_by_tx_hash(&self, tx_hash: TxHash) -> Option { self.eth_state .unconfirmed_queue() .iter() - .find(|op| op.eth_hash.as_bytes() == eth_hash) + .find(|op| op.tx_hash() == tx_hash) + .cloned() + } + + fn find_ongoing_op_by_any_hash(&self, hash: TxHash) -> Option { + self.eth_state + .unconfirmed_queue() + .iter() + .find(|op| op.tx_hash() == hash || op.eth_hash.as_ref() == hash.as_ref()) .cloned() } @@ -306,19 +388,83 @@ impl EthWatch { .collect() } - fn get_ongoing_ops_for(&self, address: Address) -> Vec { - self.eth_state + fn get_ongoing_ops_for( + &self, + query: PaginationQuery, + ) -> Paginated { + let all_ops = self + .eth_state .unconfirmed_queue() .iter() .filter(|op| match &op.data { ZkSyncPriorityOp::Deposit(deposit) => { - // Address may be set to sender. - deposit.from == address + // Address may be set to recipient. + deposit.to == query.from.address + } + ZkSyncPriorityOp::FullExit(full_exit) => query + .from + .account_id + .map(|account_id| account_id == full_exit.account_id) + .unwrap_or(false), + }); + let count = all_ops.clone().count(); + let from_serial_id = match query.from.serial_id.inner { + Either::Left(id) => id, + Either::Right(_) => { + if let Some(op) = all_ops.clone().max_by_key(|op| op.serial_id) { + op.serial_id + } else { + return Paginated::new( + Vec::new(), + Default::default(), + query.limit, + query.direction, + 0, + ); + } + } + }; + let ops: Vec = match query.direction { + PaginationDirection::Newer => all_ops + .sorted_by_key(|op| op.serial_id) + .filter(|op| op.serial_id >= from_serial_id) + .take(query.limit as usize) + .cloned() + .collect(), + PaginationDirection::Older => all_ops + .sorted_by(|a, b| b.serial_id.cmp(&a.serial_id)) + .filter(|op| op.serial_id <= from_serial_id) + .take(query.limit as usize) + .cloned() + .collect(), + }; + let txs: Vec = ops + .into_iter() + .map(|op| { + let tx_hash = op.tx_hash(); + let tx = L1Transaction::from_pending_op( + op.data.clone(), + op.eth_hash, + op.serial_id, + tx_hash, + ); + Transaction { + tx_hash, + block_number: None, + op: TransactionData::L1(tx), + status: TxInBlockStatus::Queued, + fail_reason: None, + created_at: None, } - ZkSyncPriorityOp::FullExit(full_exit) => full_exit.eth_address == address, }) - .cloned() - .collect() + .collect(); + Paginated::new( + txs, + from_serial_id, + query.limit, + query.direction, + count as u32, + ) } async fn poll_eth_node(&mut self) -> anyhow::Result<()> { @@ -429,12 +575,20 @@ impl EthWatch { let deposits_for_address = self.get_ongoing_deposits_for(address); resp.send(deposits_for_address).ok(); } - EthWatchRequest::GetUnconfirmedOps { address, resp } => { - let deposits_for_address = self.get_ongoing_ops_for(address); - resp.send(deposits_for_address).ok(); + EthWatchRequest::GetUnconfirmedOps { query, resp } => { + let unconfirmed_ops = self.get_ongoing_ops_for(query); + resp.send(unconfirmed_ops).ok(); } - EthWatchRequest::GetUnconfirmedOpByHash { eth_hash, resp } => { - let unconfirmed_op = self.find_ongoing_op_by_hash(ð_hash); + EthWatchRequest::GetUnconfirmedOpByEthHash { eth_hash, resp } => { + let unconfirmed_op = self.find_ongoing_op_by_eth_hash(eth_hash); + resp.send(unconfirmed_op).unwrap_or_default(); + } + EthWatchRequest::GetUnconfirmedOpByTxHash { tx_hash, resp } => { + let unconfirmed_op = self.find_ongoing_op_by_tx_hash(tx_hash); + resp.send(unconfirmed_op).unwrap_or_default(); + } + EthWatchRequest::GetUnconfirmedOpByAnyHash { hash, resp } => { + let unconfirmed_op = self.find_ongoing_op_by_any_hash(hash); resp.send(unconfirmed_op).unwrap_or_default(); } EthWatchRequest::GetNewTokens { @@ -450,6 +604,18 @@ impl EthWatch { resp.send(self.get_register_factory_event(last_eth_block)) .ok(); } + EthWatchRequest::IsPubkeyChangeAuthorized { + address, + nonce, + pubkey_hash, + resp, + } => { + let authorized = self + .is_new_pubkey_hash_authorized(address, nonce, &pubkey_hash) + .await + .unwrap_or(false); + resp.send(authorized).unwrap_or_default(); + } } } } diff --git a/core/bin/zksync_core/src/eth_watch/tests.rs b/core/bin/zksync_core/src/eth_watch/tests.rs index 6d49d6577a..a8c28f9d8b 100644 --- a/core/bin/zksync_core/src/eth_watch/tests.rs +++ b/core/bin/zksync_core/src/eth_watch/tests.rs @@ -3,6 +3,9 @@ use std::collections::HashMap; use web3::types::{Address, BlockNumber}; +use zksync_api_types::v02::pagination::{ + ApiEither, PaginationDirection, PaginationQuery, PendingOpsRequest, +}; use zksync_types::{ AccountId, Deposit, FullExit, NewTokenEvent, Nonce, PriorityOp, RegisterNFTFactoryEvent, TokenId, ZkSyncPriorityOp, @@ -129,46 +132,48 @@ async fn test_operation_queues() { let from_addr = [1u8; 20].into(); let to_addr = [2u8; 20].into(); - client - .add_operations(&[ - PriorityOp { - serial_id: 0, - data: ZkSyncPriorityOp::Deposit(Deposit { - from: from_addr, - token: TokenId(0), - amount: Default::default(), - to: to_addr, - }), - deadline_block: 0, - eth_hash: [2; 32].into(), - eth_block: 4, - }, - PriorityOp { - serial_id: 1, - data: ZkSyncPriorityOp::Deposit(Deposit { - from: Default::default(), - token: TokenId(0), - amount: Default::default(), - to: Default::default(), - }), - deadline_block: 0, - eth_hash: [3; 32].into(), - eth_block: 3, - }, - PriorityOp { - serial_id: 2, - data: ZkSyncPriorityOp::FullExit(FullExit { - account_id: AccountId(1), - eth_address: from_addr, - token: TokenId(0), - is_legacy: false, - }), - deadline_block: 0, - eth_block: 4, - eth_hash: [4; 32].into(), - }, - ]) - .await; + let priority_ops = vec![ + PriorityOp { + serial_id: 0, + data: ZkSyncPriorityOp::Deposit(Deposit { + from: from_addr, + token: TokenId(0), + amount: Default::default(), + to: to_addr, + }), + deadline_block: 0, + eth_hash: [2; 32].into(), + eth_block: 4, + eth_block_index: Some(1), + }, + PriorityOp { + serial_id: 1, + data: ZkSyncPriorityOp::Deposit(Deposit { + from: Default::default(), + token: TokenId(0), + amount: Default::default(), + to: Default::default(), + }), + deadline_block: 0, + eth_hash: [3; 32].into(), + eth_block: 3, + eth_block_index: Some(1), + }, + PriorityOp { + serial_id: 2, + data: ZkSyncPriorityOp::FullExit(FullExit { + account_id: AccountId(1), + eth_address: from_addr, + token: TokenId(0), + }), + deadline_block: 0, + eth_block: 4, + eth_hash: [4; 32].into(), + eth_block_index: Some(2), + }, + ]; + + client.add_operations(&priority_ops).await; let mut watcher = create_watcher(client); watcher.poll_eth_node().await.unwrap(); @@ -186,17 +191,37 @@ async fn test_operation_queues() { assert_eq!(unconfirmed_queue[1].serial_id, 2); priority_queues.get(&1).unwrap(); - watcher.find_ongoing_op_by_hash(&[2u8; 32]).unwrap(); + watcher + .find_ongoing_op_by_eth_hash(H256::from_slice(&[2u8; 32])) + .unwrap(); // Make sure that the old behavior of the pending deposits getter has not changed. let deposits = watcher.get_ongoing_deposits_for(to_addr); assert_eq!(deposits.len(), 1); - // Check that the new pending operations getter shows only deposits with the same `from` address. - let ops = watcher.get_ongoing_ops_for(from_addr); - - assert_eq!(ops[0].serial_id, 0); - assert_eq!(ops[1].serial_id, 2); - assert!(watcher.get_ongoing_ops_for(to_addr).is_empty()); + // Check that the new pending operations getter shows only deposits with the same `to` address. + let ops = watcher.get_ongoing_ops_for(PaginationQuery { + from: PendingOpsRequest { + address: to_addr, + account_id: Some(AccountId(1)), + serial_id: ApiEither::from(0), + }, + limit: 2, + direction: PaginationDirection::Newer, + }); + assert_eq!(ops.list[0].tx_hash, priority_ops[0].tx_hash()); + assert_eq!(ops.list[1].tx_hash, priority_ops[2].tx_hash()); + assert!(watcher + .get_ongoing_ops_for(PaginationQuery { + from: PendingOpsRequest { + address: from_addr, + account_id: Some(AccountId(0)), + serial_id: ApiEither::from(0) + }, + limit: 3, + direction: PaginationDirection::Newer + }) + .list + .is_empty()); } /// This test simulates the situation when eth watch module did not poll Ethereum node for some time @@ -221,6 +246,7 @@ async fn test_operation_queues_time_lag() { deadline_block: 0, eth_hash: [2; 32].into(), eth_block: 1, // <- First operation goes to the first block. + eth_block_index: Some(1), }, PriorityOp { serial_id: 1, @@ -233,6 +259,7 @@ async fn test_operation_queues_time_lag() { deadline_block: 0, eth_hash: [3; 32].into(), eth_block: 100, // <-- Note 100th block, it will set the network block to 100. + eth_block_index: Some(1), }, PriorityOp { serial_id: 2, @@ -245,6 +272,7 @@ async fn test_operation_queues_time_lag() { deadline_block: 0, eth_hash: [3; 32].into(), eth_block: 110, // <-- This operation will get to the unconfirmed queue. + eth_block_index: Some(1), }, ]) .await; @@ -288,6 +316,7 @@ async fn test_restore_and_poll() { deadline_block: 0, eth_hash: [2; 32].into(), eth_block: 4, + eth_block_index: Some(1), }, PriorityOp { serial_id: 1, @@ -300,6 +329,7 @@ async fn test_restore_and_poll() { deadline_block: 0, eth_hash: [3; 32].into(), eth_block: 3, + eth_block_index: Some(1), }, ]) .await; @@ -319,6 +349,7 @@ async fn test_restore_and_poll() { deadline_block: 0, eth_hash: [2; 32].into(), eth_block: 5, + eth_block_index: Some(1), }, PriorityOp { serial_id: 4, @@ -331,6 +362,7 @@ async fn test_restore_and_poll() { deadline_block: 0, eth_hash: [3; 32].into(), eth_block: 5, + eth_block_index: Some(2), }, ]) .await; @@ -342,7 +374,9 @@ async fn test_restore_and_poll() { assert_eq!(unconfirmed_queue.len(), 2); assert_eq!(unconfirmed_queue[0].serial_id, 3); priority_queues.get(&1).unwrap(); - watcher.find_ongoing_op_by_hash(&[2u8; 32]).unwrap(); + watcher + .find_ongoing_op_by_eth_hash(H256::from_slice(&[2u8; 32])) + .unwrap(); let deposits = watcher.get_ongoing_deposits_for([2u8; 20].into()); assert_eq!(deposits.len(), 1); } @@ -364,6 +398,7 @@ async fn test_restore_and_poll_time_lag() { deadline_block: 0, eth_hash: [2; 32].into(), eth_block: 1, + eth_block_index: Some(1), }, PriorityOp { serial_id: 1, @@ -376,6 +411,7 @@ async fn test_restore_and_poll_time_lag() { deadline_block: 0, eth_hash: [3; 32].into(), eth_block: 100, + eth_block_index: Some(1), }, ]) .await; diff --git a/core/bin/zksync_core/src/private_api.rs b/core/bin/zksync_core/src/private_api.rs index e4819bfe35..5cf24d4c4d 100644 --- a/core/bin/zksync_core/src/private_api.rs +++ b/core/bin/zksync_core/src/private_api.rs @@ -13,9 +13,14 @@ use futures::{ channel::{mpsc, oneshot}, sink::SinkExt, }; -use std::thread; +use serde::Deserialize; +use std::{str::FromStr, thread}; +use zksync_api_types::{ + v02::pagination::{ApiEither, PaginationDirection, PaginationQuery, PendingOpsRequest}, + PriorityOpLookupQuery, +}; use zksync_config::configs::api::PrivateApi; -use zksync_types::{tx::TxEthSignature, Address, SignedZkSyncTx, H256}; +use zksync_types::{tx::TxEthSignature, AccountId, Address, SignedZkSyncTx}; use zksync_utils::panic_notify::ThreadPanicNotify; #[derive(Debug, Clone)] @@ -94,15 +99,40 @@ async fn unconfirmed_deposits( Ok(HttpResponse::Ok().json(response)) } -/// Obtains information about unconfirmed operations known for a certain address. -#[actix_web::get("/unconfirmed_ops/{address}")] +#[derive(Debug, Deserialize)] +struct PendingOpsFlattenRequest { + pub address: Address, + pub account_id: Option, + pub serial_id: String, + pub limit: u32, + pub direction: PaginationDirection, +} + +/// Obtains information about unconfirmed operations known for a certain account. +/// Pending deposits can be matched only with addresses, +/// while pending full exits can be matched only with account ids. +/// If the account isn't created yet it doesn't have an id +/// but we can still find pending deposits for its address that is why account_id is Option. +#[actix_web::get("/unconfirmed_ops")] async fn unconfirmed_ops( data: web::Data, - web::Path(address): web::Path
, + web::Query(params): web::Query, ) -> actix_web::Result { let (sender, receiver) = oneshot::channel(); + // Serializing enum query parameters doesn't work, so parse it separately. + let serial_id = ApiEither::from_str(¶ms.serial_id) + .map_err(|_| HttpResponse::InternalServerError().finish())?; + let query = PaginationQuery { + from: PendingOpsRequest { + address: params.address, + account_id: params.account_id, + serial_id, + }, + limit: params.limit, + direction: params.direction, + }; let item = EthWatchRequest::GetUnconfirmedOps { - address, + query, resp: sender, }; let mut eth_watch_sender = data.eth_watch_req_sender.clone(); @@ -118,16 +148,25 @@ async fn unconfirmed_ops( Ok(HttpResponse::Ok().json(response)) } -/// Obtains information about unconfirmed deposits known for a certain address. -#[actix_web::get("/unconfirmed_op/{tx_hash}")] +/// Returns information about unconfirmed operation. +#[actix_web::post("/unconfirmed_op")] async fn unconfirmed_op( data: web::Data, - web::Path(eth_hash): web::Path, + web::Json(query): web::Json, ) -> actix_web::Result { let (sender, receiver) = oneshot::channel(); - let item = EthWatchRequest::GetUnconfirmedOpByHash { - eth_hash: eth_hash.as_ref().to_vec(), - resp: sender, + let item = match query { + PriorityOpLookupQuery::ByEthHash(eth_hash) => EthWatchRequest::GetUnconfirmedOpByEthHash { + eth_hash, + resp: sender, + }, + PriorityOpLookupQuery::BySyncHash(tx_hash) => EthWatchRequest::GetUnconfirmedOpByTxHash { + tx_hash, + resp: sender, + }, + PriorityOpLookupQuery::ByAnyHash(hash) => { + EthWatchRequest::GetUnconfirmedOpByAnyHash { hash, resp: sender } + } }; let mut eth_watch_sender = data.eth_watch_req_sender.clone(); eth_watch_sender diff --git a/core/bin/zksync_core/src/state_keeper/tests.rs b/core/bin/zksync_core/src/state_keeper/tests.rs index 90e5a16e3c..7c3fc6fb6e 100644 --- a/core/bin/zksync_core/src/state_keeper/tests.rs +++ b/core/bin/zksync_core/src/state_keeper/tests.rs @@ -194,6 +194,7 @@ pub fn create_deposit(token: TokenId, amount: impl Into) -> PriorityOp deadline_block: 0, eth_hash: H256::zero(), eth_block: 0, + eth_block_index: None, } } diff --git a/core/bin/zksync_eth_sender/src/tests/test_data.rs b/core/bin/zksync_eth_sender/src/tests/test_data.rs index 04344f7ab1..955d4acf81 100644 --- a/core/bin/zksync_eth_sender/src/tests/test_data.rs +++ b/core/bin/zksync_eth_sender/src/tests/test_data.rs @@ -37,6 +37,7 @@ fn gen_aggregated_operation( deadline_block: 0, eth_hash: H256::zero(), eth_block: 0, + eth_block_index: None, }, op: ZkSyncOp::FullExit(Box::new(FullExitOp { priority_op, diff --git a/core/bin/zksync_forced_exit_requests/src/core_interaction_wrapper.rs b/core/bin/zksync_forced_exit_requests/src/core_interaction_wrapper.rs index 7399961656..0debcf4c69 100644 --- a/core/bin/zksync_forced_exit_requests/src/core_interaction_wrapper.rs +++ b/core/bin/zksync_forced_exit_requests/src/core_interaction_wrapper.rs @@ -73,7 +73,8 @@ impl CoreInteractionWrapper for MempoolCoreInteractionWrapper { let sender_state = account_schema .last_committed_state_for_account(account_id) - .await?; + .await? + .1; Ok(sender_state.map(|state| state.nonce)) } diff --git a/core/bin/zksync_witness_generator/src/tests/prover_server.rs b/core/bin/zksync_witness_generator/src/tests/prover_server.rs index 91b1317517..68b09460e4 100644 --- a/core/bin/zksync_witness_generator/src/tests/prover_server.rs +++ b/core/bin/zksync_witness_generator/src/tests/prover_server.rs @@ -172,6 +172,7 @@ pub async fn get_test_block() -> Block { deadline_block: 2, eth_hash: H256::zero(), eth_block: 10, + eth_block_index: Some(1), }, block_index: 1, created_at: chrono::Utc::now(), diff --git a/core/lib/api_client/Cargo.toml b/core/lib/api_client/Cargo.toml index db175f5356..b6d5fa2675 100644 --- a/core/lib/api_client/Cargo.toml +++ b/core/lib/api_client/Cargo.toml @@ -10,9 +10,11 @@ keywords = ["blockchain", "zksync"] categories = ["cryptography"] [dependencies] -zksync_types = { path = "../../lib/types", version = "1.0" } -zksync_utils = { path = "../../lib/utils", version = "1.0" } -zksync_crypto = { path = "../../lib/crypto", version = "1.0" } +zksync_api_types = { path = "../api_types", version = "1.0" } +zksync_types = { path = "../types", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } +zksync_crypto = { path = "../crypto", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } serde = "1.0.90" serde_json = "1.0.0" diff --git a/core/lib/api_client/src/rest/v1/client.rs b/core/lib/api_client/src/rest/client.rs similarity index 81% rename from core/lib/api_client/src/rest/v1/client.rs rename to core/lib/api_client/src/rest/client.rs index 733934ef81..56400e5f75 100644 --- a/core/lib/api_client/src/rest/v1/client.rs +++ b/core/lib/api_client/src/rest/client.rs @@ -5,19 +5,6 @@ use reqwest::StatusCode; use serde::{de::DeserializeOwned, ser::Serialize}; use thiserror::Error; -// Public uses -pub use super::{ - accounts::{ - AccountInfo, AccountQuery, AccountReceipts, AccountState, DepositingBalances, - DepositingFunds, - }, - blocks::{BlockInfo, TransactionInfo}, - config::Contracts, - operations::{PriorityOpData, PriorityOpQuery, PriorityOpReceipt}, - tokens::TokenPriceKind, - transactions::{Receipt, TxData}, - Pagination, -}; // Local uses use super::error::ErrorBody; @@ -52,8 +39,6 @@ pub struct Client { url: String, } -const API_V1_SCOPE: &str = "/api/v1/"; - impl Client { /// Creates a new REST API client with the specified Url. pub fn new(url: String) -> Self { @@ -67,11 +52,6 @@ impl Client { [&self.url, scope, method].concat() } - /// Constructs GET request for the specified method. - pub(crate) fn get(&self, method: impl AsRef) -> ClientRequestBuilder { - self.get_with_scope(API_V1_SCOPE, method) - } - pub(crate) fn get_with_scope( &self, scope: impl AsRef, @@ -84,11 +64,6 @@ impl Client { } } - /// Constructs POST request for the specified method. - pub(crate) fn post(&self, method: impl AsRef) -> ClientRequestBuilder { - self.post_with_scope(API_V1_SCOPE, method) - } - pub(crate) fn post_with_scope( &self, scope: impl AsRef, diff --git a/core/lib/api_client/src/rest/v1/error.rs b/core/lib/api_client/src/rest/error.rs similarity index 100% rename from core/lib/api_client/src/rest/v1/error.rs rename to core/lib/api_client/src/rest/error.rs diff --git a/core/lib/api_client/src/rest/forced_exit_requests/mod.rs b/core/lib/api_client/src/rest/forced_exit_requests/mod.rs index eb38fb4c3f..d2add9ff2d 100644 --- a/core/lib/api_client/src/rest/forced_exit_requests/mod.rs +++ b/core/lib/api_client/src/rest/forced_exit_requests/mod.rs @@ -12,8 +12,7 @@ use zksync_utils::BigUintSerdeAsRadix10Str; use num::BigUint; // Local uses -use crate::rest::v1::Client; -use crate::rest::v1::ClientResult; +use crate::rest::client::{Client, Result as ClientResult}; // Data transfer objects. #[derive(Serialize, Deserialize, PartialEq, Debug)] diff --git a/core/lib/api_client/src/rest/mod.rs b/core/lib/api_client/src/rest/mod.rs index cf50baa86b..19e2508ac3 100644 --- a/core/lib/api_client/src/rest/mod.rs +++ b/core/lib/api_client/src/rest/mod.rs @@ -1,2 +1,4 @@ +pub mod client; +pub mod error; pub mod forced_exit_requests; -pub mod v1; +pub mod v02; diff --git a/core/lib/api_client/src/rest/v02/account.rs b/core/lib/api_client/src/rest/v02/account.rs new file mode 100644 index 0000000000..f62617dcc8 --- /dev/null +++ b/core/lib/api_client/src/rest/v02/account.rs @@ -0,0 +1,59 @@ +use crate::rest::client::{Client, Result}; + +use zksync_api_types::v02::{ + pagination::{ApiEither, PaginationQuery}, + Response, +}; +use zksync_types::{tx::TxHash, SerialId}; + +impl Client { + pub async fn account_info( + &self, + account_id_or_address: &str, + state_type: &str, + ) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("accounts/{}/{}", account_id_or_address, state_type), + ) + .send() + .await + } + + pub async fn account_full_info(&self, account_id_or_address: &str) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("accounts/{}", account_id_or_address), + ) + .send() + .await + } + + pub async fn account_txs( + &self, + pagination_query: &PaginationQuery>, + account_id_or_address: &str, + ) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("accounts/{}/transactions", account_id_or_address), + ) + .query(&pagination_query) + .send() + .await + } + + pub async fn account_pending_txs( + &self, + pagination_query: &PaginationQuery>, + account_id_or_address: &str, + ) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("accounts/{}/transactions/pending", account_id_or_address), + ) + .query(pagination_query) + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/block.rs b/core/lib/api_client/src/rest/v02/block.rs new file mode 100644 index 0000000000..56ac513860 --- /dev/null +++ b/core/lib/api_client/src/rest/v02/block.rs @@ -0,0 +1,39 @@ +use crate::rest::client::{Client, Result}; + +use zksync_api_types::v02::{ + pagination::{ApiEither, PaginationQuery}, + Response, +}; +use zksync_types::{tx::TxHash, BlockNumber}; + +impl Client { + pub async fn block_by_position(&self, block_position: &str) -> Result { + self.get_with_scope(super::API_V02_SCOPE, &format!("blocks/{}", block_position)) + .send() + .await + } + + pub async fn block_transactions( + &self, + pagination_query: &PaginationQuery>, + block_position: &str, + ) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("blocks/{}/transactions", block_position), + ) + .query(&pagination_query) + .send() + .await + } + + pub async fn block_pagination( + &self, + pagination_query: &PaginationQuery>, + ) -> Result { + self.get_with_scope(super::API_V02_SCOPE, "blocks") + .query(pagination_query) + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/config.rs b/core/lib/api_client/src/rest/v02/config.rs new file mode 100644 index 0000000000..64e31d817d --- /dev/null +++ b/core/lib/api_client/src/rest/v02/config.rs @@ -0,0 +1,10 @@ +use crate::rest::client::{Client, Result}; +use zksync_api_types::v02::Response; + +impl Client { + pub async fn config(&self) -> Result { + self.get_with_scope(super::API_V02_SCOPE, "config") + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/fee.rs b/core/lib/api_client/src/rest/v02/fee.rs new file mode 100644 index 0000000000..650a9e3b4f --- /dev/null +++ b/core/lib/api_client/src/rest/v02/fee.rs @@ -0,0 +1,39 @@ +// Local uses +use crate::rest::client::{Client, Result}; +use zksync_api_types::v02::{ + fee::{ApiTxFeeTypes, BatchFeeRequest, TxFeeRequest, TxInBatchFeeRequest}, + Response, +}; +use zksync_types::{Address, TokenLike}; + +impl Client { + pub async fn get_txs_fee( + &self, + tx_type: ApiTxFeeTypes, + address: Address, + token_like: TokenLike, + ) -> Result { + self.post_with_scope(super::API_V02_SCOPE, "fee") + .body(&TxFeeRequest { + tx_type, + address, + token_like, + }) + .send() + .await + } + + pub async fn get_batch_fee( + &self, + transactions: Vec, + token_like: TokenLike, + ) -> Result { + self.post_with_scope(super::API_V02_SCOPE, "fee/batch") + .body(&BatchFeeRequest { + transactions, + token_like, + }) + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/mod.rs b/core/lib/api_client/src/rest/v02/mod.rs new file mode 100644 index 0000000000..14f7deb4bc --- /dev/null +++ b/core/lib/api_client/src/rest/v02/mod.rs @@ -0,0 +1,9 @@ +pub mod account; +pub mod block; +pub mod config; +pub mod fee; +pub mod status; +pub mod token; +pub mod transaction; + +const API_V02_SCOPE: &str = "/api/v0.2/"; diff --git a/core/lib/api_client/src/rest/v02/status.rs b/core/lib/api_client/src/rest/v02/status.rs new file mode 100644 index 0000000000..8f5aa57b93 --- /dev/null +++ b/core/lib/api_client/src/rest/v02/status.rs @@ -0,0 +1,10 @@ +use crate::rest::client::{Client, Result}; +use zksync_api_types::v02::Response; + +impl Client { + pub async fn status(&self) -> Result { + self.get_with_scope(super::API_V02_SCOPE, "networkStatus") + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/token.rs b/core/lib/api_client/src/rest/v02/token.rs new file mode 100644 index 0000000000..ecd28fa284 --- /dev/null +++ b/core/lib/api_client/src/rest/v02/token.rs @@ -0,0 +1,33 @@ +use crate::rest::client::{Client, Result}; +use zksync_api_types::v02::{ + pagination::{ApiEither, PaginationQuery}, + Response, +}; +use zksync_types::{TokenId, TokenLike}; + +impl Client { + pub async fn token_pagination( + &self, + pagination_query: &PaginationQuery>, + ) -> Result { + self.get_with_scope(super::API_V02_SCOPE, "tokens") + .query(&pagination_query) + .send() + .await + } + + pub async fn token_by_id(&self, token: &TokenLike) -> Result { + self.get_with_scope(super::API_V02_SCOPE, &format!("tokens/{}", token)) + .send() + .await + } + + pub async fn token_price(&self, token: &TokenLike, token_id_or_usd: &str) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("tokens/{}/priceIn/{}", token, token_id_or_usd), + ) + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v02/transaction.rs b/core/lib/api_client/src/rest/v02/transaction.rs new file mode 100644 index 0000000000..e9ab2da261 --- /dev/null +++ b/core/lib/api_client/src/rest/v02/transaction.rs @@ -0,0 +1,59 @@ +use crate::rest::client::{Client, Result}; +use zksync_api_types::v02::{ + transaction::{IncomingTx, IncomingTxBatch}, + Response, +}; +use zksync_types::tx::{ + EthBatchSignatures, TxEthSignature, TxEthSignatureVariant, TxHash, ZkSyncTx, +}; + +impl Client { + pub async fn submit_tx( + &self, + tx: ZkSyncTx, + signature: TxEthSignatureVariant, + ) -> Result { + self.post_with_scope(super::API_V02_SCOPE, "transactions") + .body(&IncomingTx { tx, signature }) + .send() + .await + } + + pub async fn submit_batch( + &self, + txs: Vec, + signature: EthBatchSignatures, + ) -> Result { + self.post_with_scope(super::API_V02_SCOPE, "transactions/batches") + .body(&IncomingTxBatch { txs, signature }) + .send() + .await + } + + pub async fn tx_status(&self, tx_hash: TxHash) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("transactions/{}", tx_hash.to_string()), + ) + .send() + .await + } + + pub async fn tx_data(&self, tx_hash: TxHash) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("transactions/{}/data", tx_hash.to_string()), + ) + .send() + .await + } + + pub async fn get_batch(&self, batch_hash: TxHash) -> Result { + self.get_with_scope( + super::API_V02_SCOPE, + &format!("transactions/batches/{}", batch_hash.to_string()), + ) + .send() + .await + } +} diff --git a/core/lib/api_client/src/rest/v1/accounts.rs b/core/lib/api_client/src/rest/v1/accounts.rs index 5a2137b80f..e69de29bb2 100644 --- a/core/lib/api_client/src/rest/v1/accounts.rs +++ b/core/lib/api_client/src/rest/v1/accounts.rs @@ -1,365 +0,0 @@ -//! Accounts API client implementation - -// Built-in uses -use std::{ - collections::{BTreeMap, HashMap}, - fmt::Display, - str::FromStr, -}; - -// External uses -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_types::{ - tx::TxHash, AccountId, Address, BlockNumber, Nonce, PriorityOp, PubKeyHash, TokenId, H256, -}; -use zksync_utils::{remove_prefix, BigUintSerdeWrapper}; - -// Local uses -use super::{ - client::{Client, ClientError}, - transactions::Receipt, -}; - -// Data transfer objects - -/// Account search query. -#[derive(Debug, Serialize, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] -#[serde(untagged, rename_all = "camelCase")] -pub enum AccountQuery { - /// Search account by ID. - Id(AccountId), - /// Search account by address. - Address(Address), -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Default)] -#[serde(rename_all = "camelCase")] -pub struct NFT { - id: TokenId, - content_hash: H256, - creator_id: AccountId, - creator_address: Address, - serial_id: u32, - address: Address, - symbol: String, -} - -impl From for NFT { - fn from(val: zksync_types::NFT) -> Self { - Self { - id: val.id, - content_hash: val.content_hash, - creator_id: val.creator_id, - creator_address: val.creator_address, - serial_id: val.serial_id, - address: val.address, - symbol: val.symbol, - } - } -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Default)] -#[serde(rename_all = "camelCase")] -pub struct ApiNFT { - id: TokenId, - content_hash: H256, - creator_id: AccountId, - creator_address: Address, - serial_id: u32, - address: Address, - symbol: String, - current_factory: Address, - withdrawn_factory: Option
, -} - -impl From for ApiNFT { - fn from(val: zksync_types::tokens::ApiNFT) -> Self { - Self { - id: val.id, - content_hash: val.content_hash, - creator_id: val.creator_id, - creator_address: val.creator_address, - serial_id: val.serial_id, - address: val.address, - symbol: val.symbol, - current_factory: val.current_factory, - withdrawn_factory: val.withdrawn_factory, - } - } -} - -/// Account state at the time of the zkSync block commit or verification. -/// This means that each account has various states. -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Default)] -#[serde(rename_all = "camelCase")] -pub struct AccountState { - /// Account wallet balances. - pub balances: BTreeMap, - pub nfts: HashMap, - pub minted_nfts: HashMap, - /// zkSync account nonce. - pub nonce: Nonce, - /// Hash of the account's owner public key. - pub pub_key_hash: PubKeyHash, -} - -/// Pending amount for the deposit. -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Default)] -#[serde(rename_all = "camelCase")] -pub struct DepositingFunds { - /// Amount in wei. - pub amount: BigUintSerdeWrapper, - /// The greatest block number among all the deposits for a certain token. - pub expected_accept_block: BlockNumber, -} - -/// Depositing balances -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Default)] -#[serde(rename_all = "camelCase")] -pub struct DepositingBalances { - /// The amount of deposits by token symbols. - pub balances: BTreeMap, -} - -/// Account summary info in the zkSync network. -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AccountInfo { - /// Account address. - pub address: Address, - /// Unique identifier of the account in the zkSync network. - pub id: AccountId, - /// Account state in accordance with the actual committed block. - pub committed: AccountState, - /// Account state in accordance with the actual verified block. - pub verified: AccountState, - /// Unconfirmed account deposits. - pub depositing: DepositingBalances, -} - -/// The unique transaction location, which is describes by a pair: -/// (block number, transaction index in it). -#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct TxLocation { - /// The block containing the transaction. - pub block: BlockNumber, - /// Transaction index in block. Absent for rejected transactions. - pub index: Option, -} - -/// Account receipts search options. -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum AccountReceipts { - /// Search for older receipts starting from a given location. - Older(TxLocation), - /// Search for newer receipts starting from a given location. - Newer(TxLocation), - /// Search for latest receipts. - Latest, -} - -impl AccountReceipts { - pub fn newer_than(block: BlockNumber, index: Option) -> Self { - Self::Newer(TxLocation { block, index }) - } - - pub fn older_than(block: BlockNumber, index: Option) -> Self { - Self::Older(TxLocation { block, index }) - } -} - -/// Direction to perform search of transactions to. -#[derive(Debug, Deserialize, Serialize, Copy, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub enum SearchDirection { - /// Find transactions older than specified one. - Older, - /// Find transactions newer than specified one. - Newer, -} - -#[derive(Debug, Deserialize, Serialize, Clone, Copy, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AccountReceiptsQuery { - pub block: Option, - pub index: Option, - pub direction: Option, - pub limit: BlockNumber, -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AccountTxReceipt { - pub index: Option, - #[serde(flatten)] - pub receipt: Receipt, - pub hash: TxHash, -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AccountOpReceipt { - pub index: u32, - #[serde(flatten)] - pub receipt: Receipt, - pub hash: H256, -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct PendingAccountOpReceipt { - pub eth_block: u64, - pub hash: H256, -} - -impl From for AccountQuery { - fn from(v: AccountId) -> Self { - Self::Id(v) - } -} - -impl From
for AccountQuery { - fn from(v: Address) -> Self { - Self::Address(v) - } -} - -impl Display for AccountQuery { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - AccountQuery::Id(id) => id.fmt(f), - AccountQuery::Address(address) => write!(f, "{:x}", address), - } - } -} - -impl FromStr for AccountQuery { - type Err = String; - - fn from_str(s: &str) -> Result { - if let Ok(id) = s.parse() { - return Ok(Self::Id(AccountId(id))); - } - - let s = remove_prefix(s); - s.parse().map(Self::Address).map_err(|e| e.to_string()) - } -} - -impl Display for SearchDirection { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - SearchDirection::Older => "older".fmt(f), - SearchDirection::Newer => "newer".fmt(f), - } - } -} - -impl FromStr for SearchDirection { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "older" => Ok(Self::Older), - "newer" => Ok(Self::Newer), - other => Err(format!("Unknown search direction: {}", other)), - } - } -} - -impl AccountReceiptsQuery { - pub fn new(from: AccountReceipts, limit: u32) -> Self { - match from { - AccountReceipts::Older(location) => { - Self::from_parts(location, SearchDirection::Older, limit) - } - - AccountReceipts::Newer(location) => { - Self::from_parts(location, SearchDirection::Newer, limit) - } - - AccountReceipts::Latest => Self::from_parts( - TxLocation { - // TODO: use `zksync_storage::MAX_BLOCK_NUMBER` instead - block: BlockNumber(u32::MAX), - index: None, - }, - SearchDirection::Older, - limit, - ), - } - } - - fn from_parts(location: TxLocation, direction: SearchDirection, limit: u32) -> Self { - Self { - direction: Some(direction), - block: Some(location.block), - index: location.index, - limit: BlockNumber(limit), - } - } -} - -impl PendingAccountOpReceipt { - pub fn from_priority_op(op: PriorityOp) -> Self { - Self { - eth_block: op.eth_block, - hash: op.eth_hash, - } - } -} - -/// Accounts API part. -impl Client { - /// Gets account information - pub async fn account_info( - &self, - account: impl Into, - ) -> Result, ClientError> { - let account = account.into(); - - self.get(&format!("accounts/{}", account)).send().await - } - - pub async fn account_tx_receipts( - &self, - account: impl Into, - from: AccountReceipts, - limit: u32, - ) -> Result, ClientError> { - let account = account.into(); - - self.get(&format!("accounts/{}/transactions/receipts", account)) - .query(&AccountReceiptsQuery::new(from, limit)) - .send() - .await - } - - pub async fn account_op_receipts( - &self, - account: impl Into, - from: AccountReceipts, - limit: u32, - ) -> Result, ClientError> { - let account = account.into(); - - self.get(&format!("accounts/{}/operations/receipts", account)) - .query(&AccountReceiptsQuery::new(from, limit)) - .send() - .await - } - - pub async fn account_pending_ops( - &self, - account: impl Into, - ) -> Result, ClientError> { - let account = account.into(); - - self.get(&format!("accounts/{}/operations/pending", account)) - .send() - .await - } -} diff --git a/core/lib/api_client/src/rest/v1/blocks.rs b/core/lib/api_client/src/rest/v1/blocks.rs deleted file mode 100644 index 08dadca155..0000000000 --- a/core/lib/api_client/src/rest/v1/blocks.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Blocks part of API implementation. - -// Built-in uses - -// External uses -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; -use serde_json::Value; - -// Workspace uses -use zksync_crypto::{serialization::FrSerde, Fr}; -use zksync_types::{tx::TxHash, BlockNumber}; - -// Local uses -use super::{ - client::{self, Client}, - Pagination, -}; - -// Data transfer objects. - -#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] -#[serde(rename_all = "camelCase")] -pub struct BlockInfo { - pub block_number: BlockNumber, - #[serde(with = "FrSerde")] - pub new_state_root: Fr, - pub block_size: u64, - pub commit_tx_hash: Option, - pub verify_tx_hash: Option, - pub committed_at: DateTime, - pub verified_at: Option>, -} - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct TransactionInfo { - pub tx_hash: TxHash, - pub block_number: BlockNumber, - pub op: Value, - pub success: Option, - pub fail_reason: Option, - pub created_at: DateTime, -} - -/// Blocks API part. -impl Client { - /// Returns information about block with the specified number or null if block doesn't exist. - pub async fn block_by_id( - &self, - block_number: BlockNumber, - ) -> client::Result> { - self.get(&format!("blocks/{}", *block_number)).send().await - } - - /// Returns information about transactions of the block with the specified number. - pub async fn block_transactions( - &self, - block_number: BlockNumber, - ) -> client::Result> { - self.get(&format!("blocks/{}/transactions", *block_number)) - .send() - .await - } - - /// Returns information about several blocks in a range. - pub async fn blocks_range( - &self, - from: Pagination, - limit: u32, - ) -> client::Result> { - self.get("blocks") - .query(&from.into_query(limit)) - .send() - .await - } -} diff --git a/core/lib/api_client/src/rest/v1/config.rs b/core/lib/api_client/src/rest/v1/config.rs deleted file mode 100644 index 1bd60f91cb..0000000000 --- a/core/lib/api_client/src/rest/v1/config.rs +++ /dev/null @@ -1,35 +0,0 @@ -//! Config part of API implementation. - -// Built-in uses - -// External uses -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_types::Address; - -// Local uses -use super::client::{self, Client}; - -// Data transfer objects. - -#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] -#[serde(rename_all = "camelCase")] -pub struct Contracts { - pub contract: Address, -} - -/// Configuration API part. -impl Client { - pub async fn contracts(&self) -> client::Result { - self.get("config/contracts").send().await - } - - pub async fn deposit_confirmations(&self) -> client::Result { - self.get("config/deposit_confirmations").send().await - } - - pub async fn network(&self) -> client::Result { - self.get("config/network").send().await - } -} diff --git a/core/lib/api_client/src/rest/v1/mod.rs b/core/lib/api_client/src/rest/v1/mod.rs deleted file mode 100644 index eeb6b01ebf..0000000000 --- a/core/lib/api_client/src/rest/v1/mod.rs +++ /dev/null @@ -1,180 +0,0 @@ -//! First stable API implementation client. - -// External uses -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_types::BlockNumber; - -// Public uses -pub use self::{ - blocks::{BlockInfo, TransactionInfo}, - client::{Client, ClientError, Result as ClientResult}, - config::Contracts, - error::ErrorBody, - operations::{PriorityOpData, PriorityOpQuery, PriorityOpQueryError, PriorityOpReceipt}, - search::BlockSearchQuery, - tokens::{TokenPriceKind, TokenPriceQuery}, - transactions::{ - FastProcessingQuery, IncomingTx, IncomingTxBatch, IncomingTxBatchForFee, IncomingTxForFee, - Receipt, TxData, - }, -}; - -// Local uses -pub mod accounts; -mod blocks; -mod client; -mod config; -mod error; -mod operations; -mod search; -mod tokens; -mod transactions; - -/// Maximum limit value in the requests. -pub const MAX_LIMIT: u32 = 100; - -/// Internal pagination query representation in according to spec: -/// -/// `?limit=..&[before={id}|after={id}]` where: -/// -/// - `limit` parameter is required -/// - if `before=#id` is set; returns `limit` objects before object with `id` (not including `id`) -/// - if `after=#id` is set; returns `limit` objects after object with `id` (not including `id`) -/// - if neither is set; returns last `limit` objects -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Default)] -pub struct PaginationQuery { - before: Option, - after: Option, - limit: u32, -} - -/// Pagination request parameter. -/// -/// Used together with the limit parameter to perform pagination. -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq)] -pub enum Pagination { - /// Request to return some items before specified (not including itself). - Before(BlockNumber), - /// Request to return some items after specified (not including itself) - After(BlockNumber), - /// Request to return some last items. - Last, -} - -#[derive(Debug)] -pub struct PaginationQueryError { - pub detail: String, -} - -impl PaginationQueryError { - fn with_detail(detail: String) -> Self { - Self { detail } - } -} - -impl PaginationQuery { - /// Parses the original query into a pair `(pagination, limit)`. - pub fn into_inner(self) -> Result<(Pagination, u32), PaginationQueryError> { - let (pagination, limit) = match self { - Self { - before: Some(before), - after: None, - limit, - } => Ok((Pagination::Before(before), limit)), - - Self { - before: None, - after: Some(after), - limit, - } => Ok((Pagination::After(after), limit)), - - Self { - before: None, - after: None, - limit, - } => Ok((Pagination::Last, limit)), - - _ => Err(PaginationQueryError::with_detail( - "Pagination query contains both `before` and `after` values.".into(), - )), - }?; - - if limit == 0 { - return Err(PaginationQueryError::with_detail( - "Limit should be greater than zero".into(), - )); - } - - if limit > MAX_LIMIT { - return Err(PaginationQueryError::with_detail(format!( - "Limit should be lower than {}", - MAX_LIMIT - ))); - } - - Ok((pagination, limit)) - } -} - -impl Pagination { - /// Converts `(pagination, limit)` pair into the `(max, limit)` pair to perform database queries. - /// - /// # Panics - /// - /// - if limit is zero. - pub fn into_max(self, limit: u32) -> Result, PaginationQueryError> { - assert!(limit > 0, "Limit should be greater than zero"); - - match self { - Pagination::Before(before) => { - if *before < 1 { - return Err(PaginationQueryError::with_detail( - "Before should be greater than zero".into(), - )); - } - - Ok(Some(BlockNumber(*before - 1))) - } - Pagination::After(after) => Ok(Some(BlockNumber(*after + limit + 1))), - Pagination::Last => Ok(None), - } - } - - /// Converts `(pagination, limit)` pair into the query. - fn into_query(self, limit: u32) -> PaginationQuery { - match self { - Pagination::Before(before) => PaginationQuery { - before: Some(before), - limit, - ..PaginationQuery::default() - }, - Pagination::After(after) => PaginationQuery { - after: Some(after), - limit, - ..PaginationQuery::default() - }, - Pagination::Last => PaginationQuery { - limit, - ..PaginationQuery::default() - }, - } - } -} - -#[test] -fn pagination_before_max_limit() { - let pagination = Pagination::Before(BlockNumber(10)); - - let max = pagination.into_max(10).unwrap(); - assert_eq!(max, Some(BlockNumber(9))) -} - -#[test] -fn pagination_after_max_limit() { - let pagination = Pagination::After(BlockNumber(10)); - - let max = pagination.into_max(10).unwrap(); - assert_eq!(max, Some(BlockNumber(21))) -} diff --git a/core/lib/api_client/src/rest/v1/operations.rs b/core/lib/api_client/src/rest/v1/operations.rs deleted file mode 100644 index 3db767fa7b..0000000000 --- a/core/lib/api_client/src/rest/v1/operations.rs +++ /dev/null @@ -1,123 +0,0 @@ -//! Operations part of API implementation. - -// Built-in uses -use std::{fmt::Display, str::FromStr}; - -// External uses -use serde::{Deserialize, Serialize}; - -// Local uses -use super::{ - client::{Client, ClientError}, - transactions::Receipt, -}; - -// Workspace uses -use zksync_types::{ZkSyncOp, H256}; - -// Data transfer objects. - -/// Priority op search query. -#[derive(Debug, Serialize, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] -#[serde(untagged, rename_all = "camelCase")] -pub enum PriorityOpQuery { - /// Search priority operation by serial ID. - Id(u64), - /// Search priority operation by hash. - Hash(H256), -} - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct PriorityOpReceipt { - #[serde(flatten)] - pub status: Receipt, - pub index: Option, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct PriorityOpData { - pub data: ZkSyncOp, - pub eth_hash: H256, - pub serial_id: u64, -} - -impl From for PriorityOpQuery { - fn from(v: u64) -> Self { - Self::Id(v) - } -} - -impl From for PriorityOpQuery { - fn from(v: H256) -> Self { - Self::Hash(v) - } -} - -impl Display for PriorityOpQuery { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Id(id) => id.fmt(f), - Self::Hash(hash) => write!(f, "{:x}", hash), - } - } -} - -impl FromStr for PriorityOpQuery { - type Err = String; - - fn from_str(s: &str) -> Result { - if let Ok(id) = s.parse::() { - return Ok(Self::Id(id)); - } - - s.parse::().map(Self::Hash).map_err(|e| e.to_string()) - } -} - -#[derive(Debug)] -pub struct PriorityOpQueryError { - pub detail: String, -} - -impl PriorityOpQueryError { - fn with_detail(detail: String) -> Self { - Self { detail } - } -} - -impl PriorityOpQuery { - /// Additional parser because actix-web doesn't understand enums in path extractor. - pub fn from_path(path: String) -> Result { - path.parse().map_err(|err| { - PriorityOpQueryError::with_detail(format!( - "Must be specified either a serial ID or a priority operation hash: {}", - err - )) - }) - } -} - -/// Operations API part. -impl Client { - /// Gets priority operation receipt. - pub async fn priority_op( - &self, - query: impl Into, - ) -> Result, ClientError> { - self.get(&format!("operations/{}", query.into())) - .send() - .await - } - - /// Gets priority operation receipt. - pub async fn priority_op_data( - &self, - query: impl Into, - ) -> Result, ClientError> { - self.get(&format!("operations/{}/data", query.into())) - .send() - .await - } -} diff --git a/core/lib/api_client/src/rest/v1/search.rs b/core/lib/api_client/src/rest/v1/search.rs deleted file mode 100644 index 7804b76911..0000000000 --- a/core/lib/api_client/src/rest/v1/search.rs +++ /dev/null @@ -1,67 +0,0 @@ -//! Search part of API implementation. - -// Built-in uses - -// External uses -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_crypto::{convert::FeConvert, Fr}; -use zksync_types::{tx::TxHash, BlockNumber}; - -// Local uses -use super::{ - blocks::BlockInfo, - client::{self, Client}, -}; - -// Data transfer objects. - -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct BlockSearchQuery { - pub query: String, -} - -impl From for BlockSearchQuery { - /// Convert the block number into the search query. - fn from(inner: BlockNumber) -> Self { - Self { - query: inner.to_string(), - } - } -} - -impl From for BlockSearchQuery { - /// Converts the state root hash of the block into the search query. - fn from(inner: Fr) -> Self { - Self { - query: inner.to_hex(), - } - } -} - -impl From for BlockSearchQuery { - /// Converts the commit/verify Ethereum transaction hash into the search query. - fn from(inner: TxHash) -> Self { - Self { - // Serialize without prefix. - query: hex::encode(inner), - } - } -} - -/// Search API part. -impl Client { - /// Performs a block search with an uncertain query, which can be either of: - /// - /// - Hash of commit/verify Ethereum transaction for the block. - /// - The state root hash of the block. - /// - The number of the block. - pub async fn search_block( - &self, - query: impl Into, - ) -> client::Result> { - self.get("search").query(&query.into()).send().await - } -} diff --git a/core/lib/api_client/src/rest/v1/tokens.rs b/core/lib/api_client/src/rest/v1/tokens.rs deleted file mode 100644 index a897c52db1..0000000000 --- a/core/lib/api_client/src/rest/v1/tokens.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! Tokens part of API implementation. - -// Built-in uses - -// External uses -use bigdecimal::BigDecimal; -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_types::{Token, TokenLike}; - -// Local uses -use super::client::{self, Client}; - -// Data transfer objects. - -#[derive(Debug, Deserialize, Serialize, Copy, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub enum TokenPriceKind { - Currency, - Token, -} - -#[derive(Debug, Deserialize, Serialize, Copy, Clone, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct TokenPriceQuery { - #[serde(rename = "in")] - pub kind: TokenPriceKind, -} - -/// Tokens API part. -impl Client { - pub async fn tokens(&self) -> client::Result> { - self.get("tokens").send().await - } - - pub async fn token_by_id(&self, token: &TokenLike) -> client::Result> { - self.get(&format!("tokens/{}", token)).send().await - } - - pub async fn token_price( - &self, - token: &TokenLike, - kind: TokenPriceKind, - ) -> client::Result> { - self.get(&format!("tokens/{}/price", token)) - .query(&TokenPriceQuery { kind }) - .send() - .await - } -} diff --git a/core/lib/api_client/src/rest/v1/transactions.rs b/core/lib/api_client/src/rest/v1/transactions.rs index 43bfe063a2..e69de29bb2 100644 --- a/core/lib/api_client/src/rest/v1/transactions.rs +++ b/core/lib/api_client/src/rest/v1/transactions.rs @@ -1,210 +0,0 @@ -//! Transactions part of API implementation. - -// Built-in uses - -// External uses -use serde::{Deserialize, Serialize}; - -// Workspace uses -use zksync_types::{ - tx::{EthBatchSignatures, EthSignData, TxEthSignatureVariant, TxHash}, - Address, BatchFee, BlockNumber, Fee, SignedZkSyncTx, TokenLike, TxFeeTypes, ZkSyncTx, -}; - -// Local uses -use super::{client::Client, client::ClientError, Pagination}; - -// Data transfer objects. - -#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)] -#[serde(rename_all = "camelCase")] -pub struct FastProcessingQuery { - pub fast_processing: Option, -} - -/// This structure has the same layout as [`SignedZkSyncTx`], -/// the only difference is that it uses "camelCase" for serialization. -/// -/// [`SignedZkSyncTx`]: zksync_types::SignedZkSyncTx -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct TxData { - /// Underlying zkSync transaction. - pub tx: ZkSyncTx, - /// Tuple of the Ethereum signature and the message - /// which user should have signed with their private key. - /// Can be `None` if the Ethereum signature is not required. - pub eth_sign_data: Option, -} - -/// This struct has the same layout as `SignedZkSyncTx`, expect that it used -/// `TxEthSignature` directly instead of `EthSignData`. -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct IncomingTx { - pub tx: ZkSyncTx, - pub signature: TxEthSignatureVariant, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct IncomingTxForFee { - pub tx_type: TxFeeTypes, - pub address: Address, - pub token_like: TokenLike, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct IncomingTxBatchForFee { - pub tx_types: Vec, - pub addresses: Vec
, - pub token_like: TokenLike, -} - -#[derive(Debug, Serialize, Deserialize, Clone)] -#[serde(rename_all = "camelCase")] -pub struct IncomingTxBatch { - pub txs: Vec, - pub signature: EthBatchSignatures, -} - -/// Transaction (or priority operation) receipt. -#[derive(Debug, Deserialize, Serialize, Clone, PartialEq)] -#[serde(tag = "status", rename_all = "camelCase")] -pub enum Receipt { - /// The transaction is awaiting execution in the memorypool. - Pending, - /// The transaction has been executed, but the block containing this transaction has not - /// yet been committed. - Executed, - /// The block which contains this transaction has been committed. - Committed { block: BlockNumber }, - /// The block which contains this transaction has been verified. - Verified { block: BlockNumber }, - /// The transaction has been rejected for some reasons. - Rejected { reason: Option }, -} - -impl From for SignedZkSyncTx { - fn from(inner: TxData) -> Self { - Self { - tx: inner.tx, - eth_sign_data: inner.eth_sign_data, - } - } -} - -impl From for TxData { - fn from(inner: SignedZkSyncTx) -> Self { - Self { - tx: inner.tx, - eth_sign_data: inner.eth_sign_data, - } - } -} - -/// Transactions API part. -impl Client { - /// Sends a new transaction to the memory pool. - pub async fn submit_tx( - &self, - tx: ZkSyncTx, - signature: TxEthSignatureVariant, - fast_processing: Option, - ) -> Result { - self.post("transactions/submit") - .query(&FastProcessingQuery { fast_processing }) - .body(&IncomingTx { tx, signature }) - .send() - .await - } - - /// Get fee for single transaction. - pub async fn get_txs_fee( - &self, - tx_type: TxFeeTypes, - address: Address, - token_like: TokenLike, - ) -> Result { - self.post("transactions/fee") - .body(&IncomingTxForFee { - tx_type, - address, - token_like, - }) - .send() - .await - } - - /// Get txs fee for batch. - pub async fn get_batched_txs_fee( - &self, - tx_types: Vec, - addresses: Vec
, - token_like: TokenLike, - ) -> Result { - self.post("transactions/fee/batch") - .body(&IncomingTxBatchForFee { - tx_types, - addresses, - token_like, - }) - .send() - .await - } - - /// Sends a new transactions batch to the memory pool. - pub async fn submit_tx_batch( - &self, - txs: Vec, - signature: EthBatchSignatures, - ) -> Result, ClientError> { - self.post("transactions/submit/batch") - .body(&IncomingTxBatch { txs, signature }) - .send() - .await - } - - /// Gets actual transaction receipt. - pub async fn tx_status(&self, tx_hash: TxHash) -> Result, ClientError> { - self.get(&format!("transactions/{}", tx_hash.to_string())) - .send() - .await - } - - /// Gets transaction content. - pub async fn tx_data(&self, tx_hash: TxHash) -> Result, ClientError> { - self.get(&format!("transactions/{}/data", tx_hash.to_string())) - .send() - .await - } - - /// Gets transaction receipt by ID. - pub async fn tx_receipt_by_id( - &self, - tx_hash: TxHash, - receipt_id: u32, - ) -> Result, ClientError> { - self.get(&format!( - "transactions/{}/receipts/{}", - tx_hash.to_string(), - receipt_id - )) - .send() - .await - } - - /// Gets transaction receipts. - pub async fn tx_receipts( - &self, - tx_hash: TxHash, - from: Pagination, - limit: u32, - ) -> Result, ClientError> { - self.get(&format!("transactions/{}/receipts", tx_hash.to_string())) - .query(&from.into_query(limit)) - .send() - .await - } -} diff --git a/core/lib/api_types/Cargo.toml b/core/lib/api_types/Cargo.toml new file mode 100644 index 0000000000..801a473ebb --- /dev/null +++ b/core/lib/api_types/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "zksync_api_types" +version = "1.0.0" +edition = "2018" +authors = ["The Matter Labs Team "] +homepage = "https://zksync.io/" +repository = "https://github.com/matter-labs/zksync" +license = "Apache-2.0" +keywords = ["blockchain", "zksync"] +categories = ["cryptography"] + +[dependencies] +zksync_types = { path = "../types", version = "1.0" } +zksync_utils = { path = "../utils", version = "1.0" } +zksync_crypto = { path = "../crypto", version = "1.0" } +zksync_config = { path = "../config", version = "1.0" } + +serde = "1.0" +serde_json = "1.0" +chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } +hex = "0.4" +num = "0.3" +bigdecimal = { version = "0.2.0", features = ["serde"]} +either = "1.6.1" +thiserror = "1.0" diff --git a/core/lib/api_types/src/lib.rs b/core/lib/api_types/src/lib.rs new file mode 100644 index 0000000000..a45f74a7bd --- /dev/null +++ b/core/lib/api_types/src/lib.rs @@ -0,0 +1,16 @@ +pub use either::Either; +use serde::{Deserialize, Serialize}; +use zksync_types::{tx::TxHash, H256}; + +pub mod v02; + +/// Combined identifier of the priority operations for the lookup. +#[derive(Debug, Serialize, Deserialize)] +pub enum PriorityOpLookupQuery { + /// Query priority operation using zkSync hash, which is calculated based on the priority operation metadata. + BySyncHash(TxHash), + /// Query priority operation using the corresponding Ethereum transaction hash. + ByEthHash(H256), + /// Query priority operation using any of both hashes. + ByAnyHash(TxHash), +} diff --git a/core/lib/api_types/src/v02/account.rs b/core/lib/api_types/src/v02/account.rs new file mode 100644 index 0000000000..d0a34dbee3 --- /dev/null +++ b/core/lib/api_types/src/v02/account.rs @@ -0,0 +1,36 @@ +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; +use zksync_types::{AccountId, Address, BlockNumber, Nonce, PubKeyHash}; +use zksync_utils::BigUintSerdeWrapper; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub struct AccountState { + pub committed: Option, + pub finalized: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Account { + pub account_id: AccountId, + pub address: Address, + pub nonce: Nonce, + pub pub_key_hash: PubKeyHash, + pub last_update_in_block: BlockNumber, + pub balances: BTreeMap, + pub account_type: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub enum AccountAddressOrId { + Address(Address), + Id(AccountId), +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +pub enum EthAccountType { + Owned, + CREATE2, +} diff --git a/core/lib/api_types/src/v02/block.rs b/core/lib/api_types/src/v02/block.rs new file mode 100644 index 0000000000..2b38b75307 --- /dev/null +++ b/core/lib/api_types/src/v02/block.rs @@ -0,0 +1,25 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use zksync_crypto::{serialization::FrSerde, Fr}; +use zksync_types::{BlockNumber, H256}; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum BlockStatus { + Committed, + Finalized, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +pub struct BlockInfo { + pub block_number: BlockNumber, + #[serde(with = "FrSerde")] + pub new_state_root: Fr, + pub block_size: u64, + pub commit_tx_hash: Option, + pub verify_tx_hash: Option, + pub committed_at: DateTime, + pub finalized_at: Option>, + pub status: BlockStatus, +} diff --git a/core/lib/api_types/src/v02/fee.rs b/core/lib/api_types/src/v02/fee.rs new file mode 100644 index 0000000000..598587f47e --- /dev/null +++ b/core/lib/api_types/src/v02/fee.rs @@ -0,0 +1,82 @@ +use num::BigUint; +use serde::{Deserialize, Serialize}; +use zksync_types::{tokens::ChangePubKeyFeeTypeArg, Address, BatchFee, Fee, TokenLike, TxFeeTypes}; +use zksync_utils::BigUintSerdeAsRadix10Str; + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ApiFee { + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub gas_fee: BigUint, + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub zkp_fee: BigUint, + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub total_fee: BigUint, +} + +impl From for ApiFee { + fn from(fee: Fee) -> Self { + ApiFee { + gas_fee: fee.gas_fee, + zkp_fee: fee.zkp_fee, + total_fee: fee.total_fee, + } + } +} + +impl From for ApiFee { + fn from(fee: BatchFee) -> Self { + ApiFee { + gas_fee: fee.gas_fee, + zkp_fee: fee.zkp_fee, + total_fee: fee.total_fee, + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum ApiTxFeeTypes { + /// Fee for the `Withdraw` transaction. + Withdraw, + /// Fee for the `Withdraw` operation that requires fast processing. + FastWithdraw, + /// Fee for the `Transfer` operation. + Transfer, + /// Fee for the `ChangePubKey` operation. + ChangePubKey(ChangePubKeyFeeTypeArg), + /// Fee for the `ForcedExit` transaction. + ForcedExit, +} + +impl From for TxFeeTypes { + fn from(fee_type: ApiTxFeeTypes) -> TxFeeTypes { + match fee_type { + ApiTxFeeTypes::Withdraw | ApiTxFeeTypes::ForcedExit => TxFeeTypes::Withdraw, + ApiTxFeeTypes::FastWithdraw => TxFeeTypes::FastWithdraw, + ApiTxFeeTypes::Transfer => TxFeeTypes::Transfer, + ApiTxFeeTypes::ChangePubKey(cpk_arg) => TxFeeTypes::ChangePubKey(cpk_arg), + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TxFeeRequest { + pub tx_type: ApiTxFeeTypes, + pub address: Address, + pub token_like: TokenLike, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TxInBatchFeeRequest { + pub tx_type: ApiTxFeeTypes, + pub address: Address, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct BatchFeeRequest { + pub transactions: Vec, + pub token_like: TokenLike, +} diff --git a/core/lib/api_types/src/v02/mod.rs b/core/lib/api_types/src/v02/mod.rs new file mode 100644 index 0000000000..65f5073786 --- /dev/null +++ b/core/lib/api_types/src/v02/mod.rs @@ -0,0 +1,51 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use zksync_types::network::Network; + +pub mod account; +pub mod block; +pub mod fee; +pub mod pagination; +pub mod status; +pub mod token; +pub mod transaction; + +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub enum ZksyncVersion { + ContractV4, +} + +#[derive(Debug, Serialize, Deserialize, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum ApiVersion { + V02, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub enum ResultStatus { + Success, + Error, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Request { + pub network: Network, + pub api_version: ApiVersion, + pub resource: String, + pub args: HashMap, + pub timestamp: DateTime, +} + +#[derive(Debug, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Response { + pub request: Request, + pub status: ResultStatus, + pub error: Option, + pub result: Option, +} diff --git a/core/lib/api_types/src/v02/pagination.rs b/core/lib/api_types/src/v02/pagination.rs new file mode 100644 index 0000000000..f7960f1ddc --- /dev/null +++ b/core/lib/api_types/src/v02/pagination.rs @@ -0,0 +1,139 @@ +use either::Either; +use serde::{Deserialize, Serialize, Serializer}; +use std::str::FromStr; +use thiserror::Error; +use zksync_types::{tx::TxHash, AccountId, Address, BlockNumber, SerialId}; + +pub const MAX_LIMIT: u32 = 100; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum PaginationDirection { + Newer, + Older, +} + +/// The struct for defining `latest` option in pagination query +#[derive(Debug)] +pub struct Latest; + +impl Serialize for Latest { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + String::serialize(&"latest".to_string(), serializer) + } +} + +#[derive(Debug, Error, PartialEq)] +#[error("Cannot parse `from` query parameter: {0}")] +pub struct UnknownFromParameter(pub String); + +#[derive(Debug, Serialize)] +#[serde(transparent)] +pub struct ApiEither { + #[serde(with = "either::serde_untagged")] + pub inner: Either, +} + +impl FromStr for ApiEither { + type Err = UnknownFromParameter; + + fn from_str(s: &str) -> Result { + match s { + "latest" => Ok(ApiEither { + inner: Either::Right(Latest), + }), + _ => { + if let Ok(value) = T::from_str(s) { + Ok(ApiEither::from(value)) + } else { + Err(UnknownFromParameter(s.to_string())) + } + } + } + } +} + +impl From for ApiEither { + fn from(value: T) -> ApiEither { + ApiEither { + inner: Either::Left(value), + } + } +} + +pub fn parse_query( + query: PaginationQuery, +) -> Result>, UnknownFromParameter> { + let from = FromStr::from_str(&query.from)?; + Ok(PaginationQuery { + from, + limit: query.limit, + direction: query.direction, + }) +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PaginationQuery { + pub from: Id, + pub limit: u32, + pub direction: PaginationDirection, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct PaginationDetails { + pub from: F, + pub limit: u32, + pub direction: PaginationDirection, + pub count: u32, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Paginated { + pub list: Vec, + pub pagination: PaginationDetails, +} + +impl Paginated { + pub fn new( + list: Vec, + from: F, + limit: u32, + direction: PaginationDirection, + count: u32, + ) -> Self { + Self { + list, + pagination: PaginationDetails { + from, + limit, + direction, + count, + }, + } + } +} + +#[derive(Debug, Serialize)] +pub struct BlockAndTxHash { + pub block_number: BlockNumber, + pub tx_hash: ApiEither, +} + +#[derive(Debug, Serialize)] +pub struct PendingOpsRequest { + pub address: Address, + pub account_id: Option, + pub serial_id: ApiEither, +} + +#[derive(Debug, Serialize)] +pub struct AccountTxsRequest { + pub address: Address, + pub tx_hash: ApiEither, +} diff --git a/core/lib/api_types/src/v02/status.rs b/core/lib/api_types/src/v02/status.rs new file mode 100644 index 0000000000..c10b7ddff0 --- /dev/null +++ b/core/lib/api_types/src/v02/status.rs @@ -0,0 +1,11 @@ +use serde::{Deserialize, Serialize}; +use zksync_types::BlockNumber; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct NetworkStatus { + pub last_committed: BlockNumber, + pub finalized: BlockNumber, + pub total_transactions: u32, + pub mempool_size: u32, +} diff --git a/core/lib/api_types/src/v02/token.rs b/core/lib/api_types/src/v02/token.rs new file mode 100644 index 0000000000..f56887f660 --- /dev/null +++ b/core/lib/api_types/src/v02/token.rs @@ -0,0 +1,35 @@ +use bigdecimal::BigDecimal; +use serde::{Deserialize, Serialize}; +use zksync_types::{Address, Token, TokenId}; + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ApiToken { + pub id: TokenId, + pub address: Address, + pub symbol: String, + pub decimals: u8, + pub enabled_for_fees: bool, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct TokenPrice { + pub token_id: TokenId, + pub token_symbol: String, + pub price_in: String, + pub decimals: u8, + pub price: BigDecimal, +} + +impl ApiToken { + pub fn from_token_and_eligibility(token: Token, eligibility: bool) -> Self { + ApiToken { + id: token.id, + address: token.address, + symbol: token.symbol, + decimals: token.decimals, + enabled_for_fees: eligibility, + } + } +} diff --git a/core/lib/api_types/src/v02/transaction.rs b/core/lib/api_types/src/v02/transaction.rs new file mode 100644 index 0000000000..86d39c3c3c --- /dev/null +++ b/core/lib/api_types/src/v02/transaction.rs @@ -0,0 +1,245 @@ +use super::block::BlockStatus; +use chrono::{DateTime, Utc}; +use num::BigUint; +use serde::{Deserialize, Serialize}; +use zksync_types::tx::TxEthSignatureVariant; +use zksync_types::{ + tx::{ + ChangePubKey, Close, EthBatchSignatures, ForcedExit, Transfer, TxEthSignature, TxHash, + Withdraw, + }, + AccountId, Address, BlockNumber, EthBlockId, SerialId, TokenId, ZkSyncOp, ZkSyncPriorityOp, + ZkSyncTx, H256, +}; +use zksync_utils::{BigUintSerdeAsRadix10Str, ZeroPrefixHexSerde}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct IncomingTxBatch { + pub txs: Vec, + pub signature: EthBatchSignatures, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct IncomingTx { + pub tx: ZkSyncTx, + pub signature: TxEthSignatureVariant, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy)] +#[serde(rename_all = "camelCase")] +pub enum TxInBlockStatus { + Queued, + Committed, + Finalized, + Rejected, +} + +impl From for TxInBlockStatus { + fn from(status: BlockStatus) -> Self { + match status { + BlockStatus::Committed => TxInBlockStatus::Committed, + BlockStatus::Finalized => TxInBlockStatus::Finalized, + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TxData { + pub tx: Transaction, + pub eth_signature: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct L1Receipt { + pub status: TxInBlockStatus, + pub eth_block: EthBlockId, + pub rollup_block: Option, + pub id: SerialId, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct L2Receipt { + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub tx_hash: TxHash, + pub rollup_block: Option, + pub status: TxInBlockStatus, + pub fail_reason: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum Receipt { + L1(L1Receipt), + L2(L2Receipt), +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Transaction { + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub tx_hash: TxHash, + pub block_number: Option, + pub op: TransactionData, + pub status: TxInBlockStatus, + pub fail_reason: Option, + pub created_at: Option>, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum TransactionData { + L1(L1Transaction), + L2(L2Transaction), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type")] +pub enum L2Transaction { + Transfer(Box), + Withdraw(Box), + #[doc(hidden)] + Close(Box), + ChangePubKey(Box), + ForcedExit(Box), +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ForcedExitData { + #[serde(flatten)] + pub tx: ForcedExit, + pub eth_tx_hash: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct WithdrawData { + #[serde(flatten)] + pub tx: Withdraw, + pub eth_tx_hash: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(tag = "type")] +pub enum L1Transaction { + Deposit(ApiDeposit), + FullExit(ApiFullExit), +} + +impl L1Transaction { + pub fn from_executed_op( + op: ZkSyncOp, + eth_hash: H256, + id: SerialId, + tx_hash: TxHash, + ) -> Option { + match op { + ZkSyncOp::Deposit(deposit) => Some(Self::Deposit(ApiDeposit { + from: deposit.priority_op.from, + token_id: deposit.priority_op.token, + amount: deposit.priority_op.amount, + to: deposit.priority_op.to, + account_id: Some(deposit.account_id), + eth_hash, + id, + tx_hash, + })), + ZkSyncOp::FullExit(deposit) => Some(Self::FullExit(ApiFullExit { + token_id: deposit.priority_op.token, + account_id: deposit.priority_op.account_id, + eth_hash, + id, + tx_hash, + })), + _ => None, + } + } + + pub fn from_pending_op( + op: ZkSyncPriorityOp, + eth_hash: H256, + id: SerialId, + tx_hash: TxHash, + ) -> Self { + match op { + ZkSyncPriorityOp::Deposit(deposit) => Self::Deposit(ApiDeposit { + from: deposit.from, + token_id: deposit.token, + amount: deposit.amount, + to: deposit.to, + account_id: None, + eth_hash, + id, + tx_hash, + }), + ZkSyncPriorityOp::FullExit(deposit) => Self::FullExit(ApiFullExit { + token_id: deposit.token, + account_id: deposit.account_id, + eth_hash, + id, + tx_hash, + }), + } + } +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ApiDeposit { + pub from: Address, + pub token_id: TokenId, + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub amount: BigUint, + pub to: Address, + pub account_id: Option, + pub eth_hash: H256, + pub id: SerialId, + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub tx_hash: TxHash, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ApiFullExit { + pub account_id: AccountId, + pub token_id: TokenId, + pub eth_hash: H256, + pub id: SerialId, + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub tx_hash: TxHash, +} + +#[derive(Clone, Debug, Serialize, Deserialize, Default, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct TxHashSerializeWrapper( + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] pub TxHash, +); + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct SubmitBatchResponse { + pub transaction_hashes: Vec, + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub batch_hash: TxHash, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ApiTxBatch { + #[serde(serialize_with = "ZeroPrefixHexSerde::serialize")] + pub batch_hash: TxHash, + pub transaction_hashes: Vec, + pub created_at: DateTime, + pub batch_status: BatchStatus, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct BatchStatus { + pub updated_at: DateTime, + pub last_state: TxInBlockStatus, +} diff --git a/core/lib/storage/Cargo.toml b/core/lib/storage/Cargo.toml index c700cb5026..209326776c 100644 --- a/core/lib/storage/Cargo.toml +++ b/core/lib/storage/Cargo.toml @@ -14,6 +14,7 @@ default = [] db_test = [] [dependencies] +zksync_api_types = { path = "../api_types", version = "1.0" } zksync_types = { path = "../types", version = "1.0" } zksync_crypto = { path = "../crypto", version = "1.0" } zksync_utils = { path = "../utils", version = "1.0" } diff --git a/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/down.sql b/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/down.sql new file mode 100644 index 0000000000..dc54ecc518 --- /dev/null +++ b/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/down.sql @@ -0,0 +1,5 @@ +ALTER TABLE executed_priority_operations + DROP COLUMN IF EXISTS eth_block_index; +ALTER TABLE executed_priority_operations + DROP COLUMN IF EXISTS tx_hash; +DROP TABLE IF EXISTS txs_batches_hashes; diff --git a/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/up.sql b/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/up.sql new file mode 100644 index 0000000000..f5f297ffb5 --- /dev/null +++ b/core/lib/storage/migrations/2021-04-12-144535_priority_ops_and_batches_hash/up.sql @@ -0,0 +1,42 @@ +ALTER TABLE executed_priority_operations + ADD eth_block_index bigint; +ALTER TABLE executed_priority_operations + ADD tx_hash bytea NOT NULL DEFAULT ''::bytea; +-- Calculates hashes for existing priority ops. +UPDATE executed_priority_operations + SET tx_hash = sha256(eth_hash::bytea || int8send(eth_block)::bytea || int8send(0)::bytea); + +CREATE TABLE txs_batches_hashes ( + batch_id BIGSERIAL PRIMARY KEY, + batch_hash bytea NOT NULL +); +-- Calculates hashes for existing batches. It gets transactions from +-- executed_transactions table for every batch_id that exists in db and calculates sha256 +-- of their tx_hashes concat. +DO $$ +DECLARE +max_batch_id bigint; +agg_hash bytea; +rec record; +BEGIN + SELECT MAX(batch_id) FROM executed_transactions + WHERE batch_id IS NOT NULL + INTO max_batch_id; + IF max_batch_id IS NOT NULL THEN + FOR i IN 0..max_batch_id + LOOP + agg_hash = ''; + FOR rec in SELECT tx_hash FROM executed_transactions + WHERE batch_id = i + ORDER BY created_at ASC, block_index ASC + LOOP + agg_hash = agg_hash || rec.tx_hash; + END LOOP; + IF length(agg_hash) != 0 THEN + INSERT INTO txs_batches_hashes (batch_id, batch_hash) + VALUES (i, sha256(agg_hash)); + END IF; + END LOOP; + END IF; +END; +$$; diff --git a/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/down.sql b/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/down.sql new file mode 100644 index 0000000000..8e3528f890 --- /dev/null +++ b/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/down.sql @@ -0,0 +1,17 @@ +CREATE INDEX IF NOT EXISTS executed_transactions_hash_index + ON "executed_transactions" USING btree (tx_hash); +DROP INDEX IF EXISTS executed_transactions_tx_hash_idx; + +CREATE INDEX IF NOT EXISTS executed_priority_operations_from_account_index + ON "executed_priority_operations" USING btree (from_account); +CREATE INDEX IF NOT EXISTS executed_priority_operations_to_account_index + ON "executed_priority_operations" USING btree (to_account); +CREATE INDEX IF NOT EXISTS executed_priority_operations_eth_hash_index + ON "executed_priority_operations" USING btree (eth_hash); +DROP INDEX IF EXISTS executed_priority_operations_from_account_idx; +DROP INDEX IF EXISTS executed_priority_operations_to_account_idx; +DROP INDEX IF EXISTS executed_priority_operations_eth_hash_idx; + +CREATE INDEX IF NOT EXISTS mempool_txs_hash_index + ON "mempool_txs" USING btree (tx_hash); +DROP INDEX IF EXISTS mempool_txs_tx_hash_idx; diff --git a/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/up.sql b/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/up.sql new file mode 100644 index 0000000000..09f8eab1e4 --- /dev/null +++ b/core/lib/storage/migrations/2021-06-04-114545_hash_indices_for_txs_tables/up.sql @@ -0,0 +1,17 @@ +DROP INDEX IF EXISTS executed_transactions_hash_index; +CREATE INDEX IF NOT EXISTS executed_transactions_tx_hash_idx + ON "executed_transactions" USING hash (tx_hash); + +DROP INDEX IF EXISTS executed_priority_operations_from_account_index; +DROP INDEX IF EXISTS executed_priority_operations_to_account_index; +DROP INDEX IF EXISTS executed_priority_operations_eth_hash_index; +CREATE INDEX IF NOT EXISTS executed_priority_operations_from_account_idx + ON "executed_priority_operations" USING hash (from_account); +CREATE INDEX IF NOT EXISTS executed_priority_operations_to_account_idx + ON "executed_priority_operations" USING hash (to_account); +CREATE INDEX IF NOT EXISTS executed_priority_operations_eth_hash_idx + ON "executed_priority_operations" USING hash (eth_hash); + +DROP INDEX IF EXISTS mempool_txs_hash_index; +CREATE INDEX IF NOT EXISTS mempool_txs_tx_hash_idx + ON "mempool_txs" USING hash (tx_hash); diff --git a/core/lib/storage/sqlx-data.json b/core/lib/storage/sqlx-data.json index 9362c5053e..22fc4d3f77 100644 --- a/core/lib/storage/sqlx-data.json +++ b/core/lib/storage/sqlx-data.json @@ -42,65 +42,6 @@ ] } }, - "016aecf9d717ed60a6b650315e6209463f60f9ccfe842fddb97795bf55746fe9": { - "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, COALESCE(block_index, -1) ASC\n LIMIT $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "block_index?", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "tx_hash", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "success", - "type_info": "Bool" - }, - { - "ordinal": 4, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 6, - "name": "verify_tx_hash?", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int4", - "Int8" - ] - }, - "nullable": [ - false, - true, - false, - false, - true, - true, - true - ] - } - }, "04069d09246f16a6d03be04decaa05456556dc05b964adea34742af0eaef91aa": { "query": "\n SELECT * FROM tokens\n WHERE symbol = $1\n LIMIT 1\n ", "describe": { @@ -161,6 +102,20 @@ "nullable": [] } }, + "0713d87afe5e398f68014f617cbef4653110ddda1d2cd793a2095bb113478231": { + "query": "\n INSERT INTO nft_factory ( creator_id, factory_address, creator_address )\n VALUES ( $1, $2, $3 )\n ON CONFLICT ( creator_id )\n DO UPDATE\n SET factory_address = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int4", + "Text", + "Text" + ] + }, + "nullable": [] + } + }, "088013a67d0b8118980a606386ff38b394a26abfed0f209d17a6a583a297679b": { "query": "\n SELECT * FROM account_creates\n WHERE account_id = $1 AND block_number > $2\n ", "describe": { @@ -387,6 +342,63 @@ "nullable": [] } }, + "0e08e4712d3e2b359bde63476ff591939e97e877e7d5bcf1eb65345969c9ff21": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n ),\n aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE\n blocks.number <= $1\n ORDER BY blocks.number DESC\n LIMIT $2;\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "new_state_root!", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "block_size!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "commit_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 4, + "name": "verify_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "committed_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "verified_at?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int8", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false + ] + } + }, "0e390d0f58d24733d76253da2e4d9c9a0f5c96702d164fe3ad64af8aec43ee49": { "query": "\n SELECT * FROM account_balance_updates\n WHERE account_id = $1 AND block_number > $2\n ", "describe": { @@ -528,6 +540,77 @@ "nullable": [] } }, + "11ea80c9fc30de34793305ef362d3257d52fdfab3988caa8f8c90d2234ed70ff": { + "query": "\n SELECT * FROM tokens\n WHERE id >= $1 and is_nft = false\n ORDER BY id ASC\n LIMIT $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "address", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "symbol", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "decimals", + "type_info": "Int2" + }, + { + "ordinal": 4, + "name": "is_nft", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Int4", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + false, + false + ] + } + }, + "1401ea10d9e110da48aac1ebfa7aeb855c273adf34f6ee92b0fdaaf7de603049": { + "query": "\n SELECT tx_hash, created_at\n FROM mempool_txs\n INNER JOIN txs_batches_hashes\n ON txs_batches_hashes.batch_id = mempool_txs.batch_id\n WHERE batch_hash = $1\n ORDER BY id ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false + ] + } + }, "1453c487619584da255ac032a521e5813934324f443d07d77cbf894e071202b5": { "query": "SELECT * FROM mint_nft_updates", "describe": { @@ -732,65 +815,6 @@ "nullable": [] } }, - "1a3122983ff3dc5c9a1b6e2b5d68f10e93f9db6aac216c105157048ea5b802ed": { - "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index as \"block_index?\",\n tx_hash,\n success,\n fail_reason as \"fail_reason?\",\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_transactions\n LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number\n WHERE (\n (primary_account_address = $1 OR from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n COALESCE(block_index, -1) <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, COALESCE(block_index, -1) DESC\n LIMIT $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "block_index?", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "tx_hash", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "success", - "type_info": "Bool" - }, - { - "ordinal": 4, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 6, - "name": "verify_tx_hash?", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int4", - "Int8" - ] - }, - "nullable": [ - false, - true, - false, - false, - true, - true, - true - ] - } - }, "1a8ff6100bfc7521b3728c817a4014355e09d6ca1c251bbcee6f7cf013b6800d": { "query": "SELECT * FROM mint_nft_updates WHERE block_number > $1 AND block_number <= $2 ", "describe": { @@ -876,17 +900,67 @@ "nullable": [] } }, - "1e491f4afb54c10a9e4f2ea467bd7f219e7a32bdf741691cb6f350d50caae417": { - "query": "\n UPDATE forced_exit_requests\n SET fulfilled_at = $1\n WHERE id = $2\n ", + "1d4884ae73b5c60662e2d778b43230a9fa2c7b683402bb67966428f70db2d900": { + "query": "\n WITH transactions AS (\n SELECT\n '0x' || encode(tx_hash, 'hex') as tx_hash,\n tx as op,\n block_number,\n success,\n fail_reason,\n created_at\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT\n '0x' || encode(eth_hash, 'hex') as tx_hash,\n operation as op,\n block_number,\n true as success,\n Null as fail_reason,\n created_at\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n created_at as \"created_at!\"\n FROM everything\n ORDER BY created_at DESC\n ", "describe": { - "columns": [], - "parameters": { - "Left": [ - "Timestamptz", - "Int8" - ] - }, - "nullable": [] + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Text" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 4, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 5, + "name": "created_at!", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null + ] + } + }, + "1e491f4afb54c10a9e4f2ea467bd7f219e7a32bdf741691cb6f350d50caae417": { + "query": "\n UPDATE forced_exit_requests\n SET fulfilled_at = $1\n WHERE id = $2\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Int8" + ] + }, + "nullable": [] } }, "222e3946401772e3f6e0d9ce9909e8e7ac2dc830c5ecfcd522f56b3bf70fd679": { @@ -1188,27 +1262,6 @@ ] } }, - "285c1453d6e486c92a2b9b73f75c17ac00f0ca553d2b9e9a689e0da9e7471482": { - "query": "INSERT INTO executed_priority_operations (block_number, block_index, operation, from_account, to_account, priority_op_serialid, deadline_block, eth_hash, eth_block, created_at)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)\n ON CONFLICT (priority_op_serialid)\n DO NOTHING", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8", - "Int4", - "Jsonb", - "Bytea", - "Bytea", - "Int8", - "Int8", - "Bytea", - "Int8", - "Timestamptz" - ] - }, - "nullable": [] - } - }, "28f120a906bc5fd893293d391913ac53ed79855274b85979a0cb38c3307e9ee9": { "query": "SELECT * FROM eth_operations WHERE id <= $1 ORDER BY ID DESC LIMIT 1", "describe": { @@ -1271,6 +1324,56 @@ ] } }, + "29d2ac9094d660ffa445ed8787f303e35deefc41b403b23557f6a0850cdbf4c6": { + "query": "\n SELECT * FROM nft\n WHERE creator_account_id = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "token_id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "creator_account_id", + "type_info": "Int4" + }, + { + "ordinal": 2, + "name": "creator_address", + "type_info": "Bytea" + }, + { + "ordinal": 3, + "name": "serial_id", + "type_info": "Int4" + }, + { + "ordinal": 4, + "name": "address", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "content_hash", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Int4" + ] + }, + "nullable": [ + false, + false, + false, + false, + false, + false + ] + } + }, "2e92926816053cda2de6d571867a625fab5bb9668840db94bd18c411f96dc39b": { "query": "SELECT * FROM blocks WHERE number = $1", "describe": { @@ -1345,6 +1448,32 @@ ] } }, + "311eb879affeff3c877d978d0a7ac443d904edbc606a213ecc5959dbedc16b3e": { + "query": "SELECT created_at, block_number FROM executed_priority_operations\n WHERE tx_hash = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 1, + "name": "block_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false + ] + } + }, "3538961dd16f0eb374b50b33cae9a656426720c7fdf5d26ac406f44f47692e01": { "query": "SELECT COUNT(*) FROM executed_transactions WHERE success = true", "describe": { @@ -1536,6 +1665,26 @@ "nullable": [] } }, + "43469652ff4f68b9990157c9eefb0cce5347db70e6c00df12d9fb23f701bbafd": { + "query": "\n SELECT MAX(block_number)\n FROM(\n SELECT block_number FROM account_balance_updates\n WHERE account_id = $1\n UNION ALL\n SELECT block_number FROM account_creates\n WHERE account_id = $1\n UNION ALL\n SELECT block_number FROM account_pubkey_updates\n WHERE account_id = $1\n ) as subquery\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + } + }, "439d0083a3b98066071cde5909969b4e9ce744bc1bfa761116c6fb5bcc356075": { "query": "DELETE FROM account_balance_updates WHERE block_number > $1", "describe": { @@ -1548,41 +1697,47 @@ "nullable": [] } }, - "4469f85caafd8e489247f5a16d567910a113975fb5911622e40440b09eac7e4f": { - "query": "DELETE FROM account_pubkey_updates WHERE block_number > $1", + "444fd4dfaf4e0fdf1675c857b17feb5cf1f02ea0eb3e1e7bb60deb093fadb19f": { + "query": "\n WITH transactions AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ", "describe": { - "columns": [], + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + } + ], "parameters": { "Left": [ "Int8" ] }, - "nullable": [] + "nullable": [ + null + ] } }, - "44b276fda62734e9c9d9853f493340265116ab7f13599674d27aafe3d3887391": { - "query": "UPDATE eth_operations \n SET last_used_gas_price = $1, last_deadline_block = $2\n WHERE id = $3", + "4469f85caafd8e489247f5a16d567910a113975fb5911622e40440b09eac7e4f": { + "query": "DELETE FROM account_pubkey_updates WHERE block_number > $1", "describe": { "columns": [], "parameters": { "Left": [ - "Numeric", - "Int8", "Int8" ] }, "nullable": [] } }, - "44c99a84f28659435e9cae90670179cf2e49b789abeee4754a8f830a2eead483": { - "query": "\n INSERT INTO nft_factory ( creator_id, factory_address, creator_address )\n VALUES ( $1, $2, $3 )\n ON CONFLICT ( creator_id ) \n DO UPDATE \n SET factory_address = $2 \n ", + "44b276fda62734e9c9d9853f493340265116ab7f13599674d27aafe3d3887391": { + "query": "UPDATE eth_operations \n SET last_used_gas_price = $1, last_deadline_block = $2\n WHERE id = $3", "describe": { "columns": [], "parameters": { "Left": [ - "Int4", - "Text", - "Text" + "Numeric", + "Int8", + "Int8" ] }, "nullable": [] @@ -1715,6 +1870,70 @@ ] } }, + "49b2154dd103b5aee0ee03aa964e8e7eb6b1dff1abe69f63c06a8c06ea6d3ece": { + "query": "\n WITH transactions AS (\n SELECT\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index\n FROM executed_transactions\n WHERE (from_account = $1 OR to_account = $1 OR primary_account_address = $1)\n AND created_at >= $2\n ), priority_ops AS (\n SELECT\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index\n FROM executed_priority_operations\n WHERE (from_account = $1 OR to_account = $1) AND created_at >= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\"\n FROM everything\n ORDER BY created_at ASC, block_index ASC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 7, + "name": "priority_op_serialid?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "4a0bc713a57201aa894b96acdb462c03d3ad63cf4fbc8a14b9ac5e2e02121207": { "query": "\n SELECT * FROM ticker_market_volume\n WHERE token_id = $1\n LIMIT 1\n ", "describe": { @@ -1911,13 +2130,133 @@ "nullable": [] } }, - "4fc97e18f8e63d63d3a52db84ddd38243a865011e69a60061af37ebc2a8f1566": { - "query": "SELECT * FROM complete_withdrawals_transactions\n WHERE pending_withdrawals_queue_start_index <= $1\n AND $1 < pending_withdrawals_queue_end_index\n LIMIT 1\n ", + "4de513de380df55d42b5e799020c9dbe46a94671b53d5d4d822fec7c83accf56": { + "query": "\n WITH transaction AS (\n SELECT\n tx_hash,\n block_number,\n success,\n fail_reason,\n Null::bigint as eth_block,\n Null::bigint as priority_op_serialid\n FROM executed_transactions\n WHERE tx_hash = $1\n ), priority_op AS (\n SELECT\n tx_hash,\n block_number,\n true as success,\n Null as fail_reason,\n eth_block,\n priority_op_serialid\n FROM executed_priority_operations\n WHERE tx_hash = $1 OR eth_hash = $1\n ), mempool_tx AS (\n SELECT\n decode(tx_hash, 'hex'),\n Null::bigint as block_number,\n Null::boolean as success,\n Null as fail_reason,\n Null::bigint as eth_block,\n Null::bigint as priority_op_serialid\n FROM mempool_txs\n WHERE tx_hash = $2\n ),\n everything AS (\n SELECT * FROM transaction\n UNION ALL\n SELECT * FROM priority_op\n UNION ALL\n SELECT * FROM mempool_tx\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number?\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n eth_block as \"eth_block?\",\n priority_op_serialid as \"priority_op_serialid?\"\n FROM everything\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "tx_hash", + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "block_number?", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 3, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "eth_block?", + "type_info": "Int8" + }, + { + "ordinal": 5, + "name": "priority_op_serialid?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Text" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null + ] + } + }, + "4ef355dbbaaf749ea6ef1a9d447b7528877d81ecb56d41d41f99b69a46d738c8": { + "query": "\n WITH transaction AS (\n SELECT\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n eth_sign_data\n FROM executed_transactions\n WHERE tx_hash = $1\n ), priority_op AS (\n SELECT\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n Null::jsonb as eth_sign_data\n FROM executed_priority_operations\n WHERE tx_hash = $1 OR eth_hash = $1\n ), mempool_tx AS (\n SELECT\n decode(tx_hash, 'hex'),\n tx as op,\n Null::bigint as block_number,\n created_at,\n Null::boolean as success,\n Null as fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n eth_sign_data\n FROM mempool_txs\n WHERE tx_hash = $2\n ),\n everything AS (\n SELECT * FROM transaction\n UNION ALL\n SELECT * FROM priority_op\n UNION ALL\n SELECT * FROM mempool_tx\n )\n SELECT\n tx_hash as \"tx_hash!\",\n op as \"op!\",\n block_number as \"block_number?\",\n created_at as \"created_at!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\",\n eth_sign_data as \"eth_sign_data?\"\n FROM everything\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 2, + "name": "block_number?", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "success?", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 7, + "name": "priority_op_serialid?", + "type_info": "Int8" + }, + { + "ordinal": 8, + "name": "eth_sign_data?", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Text" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, + "4fc97e18f8e63d63d3a52db84ddd38243a865011e69a60061af37ebc2a8f1566": { + "query": "SELECT * FROM complete_withdrawals_transactions\n WHERE pending_withdrawals_queue_start_index <= $1\n AND $1 < pending_withdrawals_queue_end_index\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash", "type_info": "Bytea" }, { @@ -1943,6 +2282,26 @@ ] } }, + "50007e206cca6a31ce868c1f626c601b548c236823b47bdf4b0399c5084973a0": { + "query": "SELECT count(*) as \"count!\" FROM executed_transactions WHERE block_number = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + } + }, "502e94a5b03c686539721f133998c66fa53f50a620167666d2e1b6084d3832b9": { "query": "\n SELECT * FROM forced_exit_requests\n WHERE fulfilled_at IS NULL AND created_at = (\n SELECT MIN(created_at) FROM forced_exit_requests\n WHERE fulfilled_at IS NULL\n )\n LIMIT 1\n ", "describe": { @@ -2128,6 +2487,27 @@ "nullable": [] } }, + "58d4d06fc0d3bc68286c7ec33f5e080a150461479dd08361bfe0e650a0015f0d": { + "query": "\n SELECT token_id\n FROM ticker_market_volume\n WHERE token_id = ANY($1) AND market_volume >= $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "token_id", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [ + "Int4Array", + "Numeric" + ] + }, + "nullable": [ + false + ] + } + }, "59c4e0d8255c2e4dd6eece1b24245daf3414d4f15b6cba7b369dc1ac32bed018": { "query": "\n SELECT * FROM accounts\n WHERE id = $1\n ", "describe": { @@ -2213,23 +2593,48 @@ "nullable": [] } }, - "60cf573e253358218a6319233221e8c2ff0561fd7ffbf8339a11a4509d955442": { - "query": "SELECT count(*) from mempool_txs\n WHERE tx_hash = $1", + "5e6bcb09720f9722091c80e4d90828b515d0398f9020abd4a328cfeca6c88b3a": { + "query": "\n SELECT * FROM tokens\n WHERE id <= $1\n ORDER BY id DESC\n LIMIT $2\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "count", - "type_info": "Int8" + "name": "id", + "type_info": "Int4" + }, + { + "ordinal": 1, + "name": "address", + "type_info": "Text" + }, + { + "ordinal": 2, + "name": "symbol", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "decimals", + "type_info": "Int2" + }, + { + "ordinal": 4, + "name": "is_nft", + "type_info": "Bool" } ], "parameters": { "Left": [ - "Text" + "Int4", + "Int8" ] }, "nullable": [ - null + false, + false, + false, + false, + false ] } }, @@ -2286,6 +2691,16 @@ "ordinal": 9, "name": "created_at", "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "eth_block_index", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "tx_hash", + "type_info": "Bytea" } ], "parameters": { @@ -2303,6 +2718,8 @@ false, false, false, + false, + true, false ] } @@ -2719,6 +3136,44 @@ "nullable": [] } }, + "7d22d9facba43b954a6ffbccffaee54feab17317910247d7752e0d59dcf3af9a": { + "query": "\n SELECT tx_hash, created_at, success, block_number\n FROM executed_transactions\n INNER JOIN txs_batches_hashes\n ON txs_batches_hashes.batch_id = COALESCE(executed_transactions.batch_id, 0)\n WHERE batch_hash = $1\n ORDER BY created_at ASC, block_index ASC\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 2, + "name": "success", + "type_info": "Bool" + }, + { + "ordinal": 3, + "name": "block_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false, + false, + false + ] + } + }, "7dfa76c3e12c301dc3d7fbf820ecf0be45e0b1c5f01ce13f7cdc1a82880804c1": { "query": "\n SELECT * FROM forced_exit_requests\n WHERE id = $1\n LIMIT 1\n ", "describe": { @@ -2842,6 +3297,26 @@ "nullable": [] } }, + "839caf265f3e87a43a788d8fc321ec8d3ada6987d46ce1179683aefb0bb1e789": { + "query": "SELECT COUNT(*) from mempool_txs\n WHERE tx_hash = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Text" + ] + }, + "nullable": [ + null + ] + } + }, "83cc9ff843c9dd1c974b651f5ed1e0c6bea94454db1d6f01b8fdf556cdd77d81": { "query": "DELETE FROM mempool_txs\n WHERE tx_hash = $1", "describe": { @@ -2898,16 +3373,73 @@ "nullable": [] } }, - "86a1592862553cfb07b950a5f4547a650ee40ba774ddb367d8e84b5e8166cbea": { - "query": "UPDATE prover_job_queue SET last_block = $1 WHERE last_block > $1", + "860cebd02464f314a5d2f7f9708beff689cce8891d8727189318732765f60a88": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n ),\n aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE false\n OR committed.final_hash = $1\n OR verified.final_hash = $1\n OR blocks.root_hash = $1\n OR blocks.number = $2\n ORDER BY blocks.number DESC\n LIMIT 1;\n ", "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] + "columns": [ + { + "ordinal": 0, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "new_state_root!", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "block_size!", + "type_info": "Int8" + }, + { + "ordinal": 3, + "name": "commit_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 4, + "name": "verify_tx_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "committed_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "verified_at?", + "type_info": "Timestamptz" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + }, + "nullable": [ + false, + false, + false, + true, + true, + false, + false + ] + } + }, + "86a1592862553cfb07b950a5f4547a650ee40ba774ddb367d8e84b5e8166cbea": { + "query": "UPDATE prover_job_queue SET last_block = $1 WHERE last_block > $1", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [] } }, "8a039b0bae78afb5d106d84f7d136be17670909814f92a8e8070ba99a9aea21c": { @@ -3059,6 +3591,83 @@ ] } }, + "8cc434d8801cbe1f957e54a29b0aa49182bd5b693d24b5c74c34290ed5768389": { + "query": "INSERT INTO txs_batches_hashes VALUES($1, $2)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [] + } + }, + "8e69d6ddc76fddeb674420cd6aa48c2911c74110917c9833d97911f4988c0755": { + "query": "\n WITH transactions AS (\n SELECT\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index\n FROM executed_transactions\n WHERE (from_account = $1 OR to_account = $1 OR primary_account_address = $1)\n AND created_at <= $2\n ), priority_ops AS (\n SELECT\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index\n FROM executed_priority_operations\n WHERE (from_account = $1 OR to_account = $1) AND created_at <= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 7, + "name": "priority_op_serialid?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "8eb8865ba9f727bf86cbb3713903241b60e61b02b200fcf60483c99ff7cdc57c": { "query": "INSERT INTO data_restore_rollup_block_ops (block_num, operation)\n SELECT $1, u.operation\n FROM UNNEST ($2::jsonb[])\n AS u(operation)", "describe": { @@ -3144,6 +3753,26 @@ ] } }, + "931b39aa534358963d02950c0821a1b28c4354db0d0dfc90a110a546549ef690": { + "query": "SELECT count(*) as \"count!\" FROM executed_priority_operations WHERE block_number = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + null + ] + } + }, "93fe4dceacf4e052ad807068272dc768eab33513e6c1e1ac62d2f989b1a26eee": { "query": "\n INSERT INTO eth_operations (op_type, nonce, last_deadline_block, last_used_gas_price, raw_tx)\n VALUES ($1, $2, $3, $4, $5)\n RETURNING id\n ", "describe": { @@ -3178,56 +3807,6 @@ "nullable": [] } }, - "9600a226f271e0939eb5fd0327a48e3f1dbbdfb673e91287c474c827cdea83ee": { - "query": "\n SELECT * FROM nft \n WHERE creator_account_id = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "token_id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "creator_account_id", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "creator_address", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "serial_id", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "address", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "content_hash", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - false, - false, - false, - false, - false, - false - ] - } - }, "961bbd13b54a7d53a58dfef2e60ce77752ff30807226edc69a47487b994b1468": { "query": "\n INSERT INTO tokens ( id, address, symbol, decimals, is_nft )\n VALUES ( $1, $2, $3, $4, $5 )\n ", "describe": { @@ -3256,6 +3835,27 @@ "nullable": [] } }, + "981fb627ef58081884cfa08f32153f356807406dafa291692276ba1f6459b855": { + "query": "\n SELECT COUNT(*) as \"count!\" FROM executed_transactions\n WHERE block_number <= $1 AND (from_account = $2 OR to_account = $2 OR primary_account_address = $2)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + null + ] + } + }, "98f87793202531586603307eab53987f75f4e07614af8706e6180413f808a1b4": { "query": "INSERT INTO txs_batches_signatures VALUES($1, $2)", "describe": { @@ -3279,6 +3879,24 @@ "nullable": [] } }, + "99b1aad6f25729e9189706d99c87b8487788b2de0a4ed7915d4f49daf37b62dc": { + "query": "SELECT COUNT(*) from mempool_txs", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + } + }, "9aeeb5e20f4f34d4b4e1987f1bf0a23ee931f12da071b134225069d32c1896de": { "query": "SELECT * FROM pending_block\n ORDER BY number DESC\n LIMIT 1", "describe": { @@ -3332,9 +3950,73 @@ "describe": { "columns": [], "parameters": { - "Left": [] + "Left": [] + }, + "nullable": [] + } + }, + "9fb67f0d0bc8387201e5358ca011da4b6d4d48c38c5de3f628c9818804c01376": { + "query": "\n WITH transactions AS (\n SELECT\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index\n FROM executed_transactions\n WHERE block_number = $1 AND created_at <= $2\n ), priority_ops AS (\n SELECT\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index\n FROM executed_priority_operations\n WHERE block_number = $1 AND created_at <= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 7, + "name": "priority_op_serialid?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Timestamptz", + "Int8" + ] }, - "nullable": [] + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null + ] } }, "9fbf3d0ae8610fb464ac74ff989860eb913f4bfb14790373021ef456b671ed96": { @@ -3490,6 +4172,70 @@ "nullable": [] } }, + "a36020585959caaf7354f66c84a5df7e796e95c5759256aaf861dd60597071f3": { + "query": "\n WITH transactions AS (\n SELECT\n tx_hash,\n tx as op,\n block_number,\n created_at,\n success,\n fail_reason,\n Null::bytea as eth_hash,\n Null::bigint as priority_op_serialid,\n block_index\n FROM executed_transactions\n WHERE block_number = $1 AND created_at >= $2\n ), priority_ops AS (\n SELECT\n tx_hash,\n operation as op,\n block_number,\n created_at,\n true as success,\n Null as fail_reason,\n eth_hash,\n priority_op_serialid,\n block_index\n FROM executed_priority_operations\n WHERE block_number = $1 AND created_at >= $2\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n created_at as \"created_at!\",\n success as \"success!\",\n fail_reason as \"fail_reason?\",\n eth_hash as \"eth_hash?\",\n priority_op_serialid as \"priority_op_serialid?\"\n FROM everything\n ORDER BY created_at ASC, block_index ASC\n LIMIT $3\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "block_number!", + "type_info": "Int8" + }, + { + "ordinal": 2, + "name": "op!", + "type_info": "Jsonb" + }, + { + "ordinal": 3, + "name": "created_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 4, + "name": "success!", + "type_info": "Bool" + }, + { + "ordinal": 5, + "name": "fail_reason?", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "eth_hash?", + "type_info": "Bytea" + }, + { + "ordinal": 7, + "name": "priority_op_serialid?", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Timestamptz", + "Int8" + ] + }, + "nullable": [ + null, + null, + null, + null, + null, + null, + null, + null + ] + } + }, "a4969ac155106f1d8dd9b305e71ce36b3ee39adf75574d40e123a617a502ffe4": { "query": "INSERT INTO executed_transactions (block_number, block_index, tx, operation, tx_hash, from_account, to_account, success, fail_reason, primary_account_address, nonce, created_at, eth_sign_data, batch_id)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14)\n ON CONFLICT (tx_hash)\n DO NOTHING", "describe": { @@ -3542,6 +4288,47 @@ "nullable": [] } }, + "a8db70ecce6347665cfbc4dd9f43bb99d6681ad811b14ebb77ca57b685d92630": { + "query": "\n SELECT COUNT(*) as \"count!\" FROM executed_priority_operations\n WHERE block_number <= $1 AND (from_account = $2 OR to_account = $2)\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count!", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Int8", + "Bytea" + ] + }, + "nullable": [ + null + ] + } + }, + "a9e416d1cf63d47318cf8da7904b62101a9fa413f5b646b832cd9114a6693158": { + "query": "\n WITH transactions AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_transactions\n WHERE from_account = $1 OR to_account = $1 OR primary_account_address = $1\n ), priority_ops AS (\n SELECT tx_hash, created_at, block_index\n FROM executed_priority_operations\n WHERE from_account = $1 OR to_account = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\"\n FROM everything\n ORDER BY created_at DESC, block_index DESC\n LIMIT 1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "tx_hash!", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + null + ] + } + }, "aaaf2bcea738151db11f6152772516a46ef7d23ae885936094226b837369ee3c": { "query": "DELETE FROM mempool_txs\n WHERE tx_hash = ANY($1)", "describe": { @@ -3622,6 +4409,47 @@ ] } }, + "abbdfc0983adb46d2051a4f6f2c0c46d17196bf312e1bb4747bf5b07f73d53d1": { + "query": "INSERT INTO executed_priority_operations (block_number, block_index, operation, from_account, to_account,\n priority_op_serialid, deadline_block, eth_hash, eth_block, created_at, eth_block_index, tx_hash)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)\n ON CONFLICT (priority_op_serialid)\n DO NOTHING", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Int8", + "Int4", + "Jsonb", + "Bytea", + "Bytea", + "Int8", + "Int8", + "Bytea", + "Int8", + "Timestamptz", + "Int8", + "Bytea" + ] + }, + "nullable": [] + } + }, + "b1b0fd6eab9edcac470d79302d6da97ad73181c41fe313b08c7a6bfa3b0ccd11": { + "query": "SELECT MAX(id) FROM tokens", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "max", + "type_info": "Int4" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + } + }, "b1c528c67d3c2ecea86e3ba1b2407cb4ee72149d66be0498be1c1162917c065d": { "query": "INSERT INTO block_witness (block, witness)\n VALUES ($1, $2)\n ON CONFLICT (block)\n DO NOTHING", "describe": { @@ -3793,63 +4621,6 @@ "nullable": [] } }, - "ba155dc95f19a097d1a16bf35f23371872f72dfb618cb871693752be93fed472": { - "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n ,aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE\n blocks.number <= $1\n ORDER BY blocks.number DESC\n LIMIT $2;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "new_state_root!", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_size!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "committed_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 6, - "name": "verified_at?", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Int8", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - true, - true, - false, - false - ] - } - }, "baaaff359564c5d1094fcf2650d53cf9dcac5d50fc3a549c6cff53dd472350f7": { "query": "\n SELECT * FROM ticker_price\n WHERE token_id = $1\n LIMIT 1\n ", "describe": { @@ -3949,67 +4720,17 @@ false ] } - }, - "bcb77615d5418437f8ef3a4b035ee320c2fb3f15467e8c7a89ecc1d743e24c18": { - "query": "DELETE FROM aggregate_operations WHERE from_block > $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [] - } - }, - "be887d91df5cb45059e7ac1a857e79829b42b931cc7d9f086536c7ec1f096b75": { - "query": "\n WITH transactions AS (\n SELECT\n '0x' || encode(tx_hash, 'hex') as tx_hash,\n tx as op,\n block_number,\n success,\n fail_reason,\n created_at\n FROM executed_transactions\n WHERE block_number = $1\n ), priority_ops AS (\n SELECT\n '0x' || encode(eth_hash, 'hex') as tx_hash,\n operation as op,\n block_number,\n true as success,\n Null as fail_reason,\n created_at\n FROM executed_priority_operations\n WHERE block_number = $1\n ), everything AS (\n SELECT * FROM transactions\n UNION ALL\n SELECT * FROM priority_ops\n )\n SELECT\n tx_hash as \"tx_hash!\",\n block_number as \"block_number!\",\n op as \"op!\",\n success as \"success?\",\n fail_reason as \"fail_reason?\",\n created_at as \"created_at!\"\n FROM everything\n ORDER BY created_at DESC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tx_hash!", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "op!", - "type_info": "Jsonb" - }, - { - "ordinal": 3, - "name": "success?", - "type_info": "Bool" - }, - { - "ordinal": 4, - "name": "fail_reason?", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "created_at!", - "type_info": "Timestamptz" - } - ], + }, + "bcb77615d5418437f8ef3a4b035ee320c2fb3f15467e8c7a89ecc1d743e24c18": { + "query": "DELETE FROM aggregate_operations WHERE from_block > $1", + "describe": { + "columns": [], "parameters": { "Left": [ "Int8" ] }, - "nullable": [ - null, - null, - null, - null, - null, - null - ] + "nullable": [] } }, "bec05747dcfbf729bfd6e5d6aedf8da39f6d0d4ab5f0eae8dfed6c07adac1ba8": { @@ -4200,6 +4921,16 @@ "ordinal": 9, "name": "created_at", "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "eth_block_index", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "tx_hash", + "type_info": "Bytea" } ], "parameters": { @@ -4217,6 +4948,8 @@ false, false, false, + false, + true, false ] } @@ -4428,39 +5161,24 @@ "nullable": [] } }, - "c7bc91425f35b3a77be36fe8ba80030445051a0bc2536fa4a0def7ac498fc5c2": { - "query": "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data)\n VALUES ($1, $2, $3, $4)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Jsonb", - "Timestamptz", - "Jsonb" - ] - }, - "nullable": [] - } - }, - "c842454191f93c4ab02e9845294b575dfd48a8eaae85996c5e76a36be997f969": { - "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index >= $3\n ) OR (\n block_number > $2\n )\n )\n )\n ORDER BY block_number ASC, block_index ASC\n LIMIT $4\n ", + "c76bdef17043c7f22c968ae7a27b861ef5967d0e30d9e6c298e741c203eadd2e": { + "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n ),\n aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE\n blocks.number >= $1\n ORDER BY blocks.number ASC\n LIMIT $2;\n ", "describe": { "columns": [ { "ordinal": 0, - "name": "block_number", + "name": "block_number!", "type_info": "Int8" }, { "ordinal": 1, - "name": "block_index", - "type_info": "Int4" + "name": "new_state_root!", + "type_info": "Bytea" }, { "ordinal": 2, - "name": "eth_hash", - "type_info": "Bytea" + "name": "block_size!", + "type_info": "Int8" }, { "ordinal": 3, @@ -4471,13 +5189,21 @@ "ordinal": 4, "name": "verify_tx_hash?", "type_info": "Bytea" + }, + { + "ordinal": 5, + "name": "committed_at!", + "type_info": "Timestamptz" + }, + { + "ordinal": 6, + "name": "verified_at?", + "type_info": "Timestamptz" } ], "parameters": { "Left": [ - "Bytea", "Int8", - "Int4", "Int8" ] }, @@ -4486,10 +5212,27 @@ false, false, true, - true + true, + false, + false ] } }, + "c7bc91425f35b3a77be36fe8ba80030445051a0bc2536fa4a0def7ac498fc5c2": { + "query": "INSERT INTO mempool_txs (tx_hash, tx, created_at, eth_sign_data)\n VALUES ($1, $2, $3, $4)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Text", + "Jsonb", + "Timestamptz", + "Jsonb" + ] + }, + "nullable": [] + } + }, "cb492484bab6e66f89a4d80649d3559566a681db153152a52449acf931a1d039": { "query": "SELECT * FROM block_witness WHERE block = $1", "describe": { @@ -5036,53 +5779,6 @@ "nullable": [] } }, - "e32e0ba9ec31e6e78de5972548dced78d2a6949ec723b71ce210627dbb92dfe4": { - "query": "\n WITH block_details AS (\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n , aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS details_block_number,\n committed.final_hash AS commit_tx_hash,\n verified.final_hash AS verify_tx_hash\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n )\n SELECT\n block_number, \n block_index,\n eth_hash,\n details.commit_tx_hash as \"commit_tx_hash?\",\n details.verify_tx_hash as \"verify_tx_hash?\"\n FROM executed_priority_operations\n LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number\n WHERE (\n (from_account = $1 OR to_account = $1)\n AND (\n block_number = $2 AND (\n block_index <= $3\n ) OR (\n block_number < $2\n )\n )\n )\n ORDER BY block_number DESC, block_index DESC\n LIMIT $4\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "block_index", - "type_info": "Int4" - }, - { - "ordinal": 2, - "name": "eth_hash", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8", - "Int4", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - true, - true - ] - } - }, "e3ee3cb9cbe8d05a635e71daea301cf6b2310f89f3d9f8fdabc28e7ebf8d3521": { "query": "\n INSERT INTO eth_account_types VALUES ( $1, $2 )\n ON CONFLICT (account_id) DO UPDATE SET account_type = $2\n ", "describe": { @@ -5273,63 +5969,6 @@ ] } }, - "e7b1a3e830945cfe5c876255bbaa97dae409e1f642539ec898fd5dc3bb991bfc": { - "query": "\n WITH aggr_comm AS (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n commit_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n ,aggr_exec as (\n SELECT \n aggregate_operations.created_at, \n eth_operations.final_hash, \n execute_aggregated_blocks_binding.block_number \n FROM aggregate_operations\n INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id\n INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id\n INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id\n WHERE aggregate_operations.confirmed = true \n )\n SELECT\n blocks.number AS \"block_number!\",\n blocks.root_hash AS \"new_state_root!\",\n blocks.block_size AS \"block_size!\",\n committed.final_hash AS \"commit_tx_hash?\",\n verified.final_hash AS \"verify_tx_hash?\",\n committed.created_at AS \"committed_at!\",\n verified.created_at AS \"verified_at?\"\n FROM blocks\n INNER JOIN aggr_comm committed ON blocks.number = committed.block_number\n LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number\n WHERE false\n OR committed.final_hash = $1\n OR verified.final_hash = $1\n OR blocks.root_hash = $1\n OR blocks.number = $2\n ORDER BY blocks.number DESC\n LIMIT 1;\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "block_number!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "new_state_root!", - "type_info": "Bytea" - }, - { - "ordinal": 2, - "name": "block_size!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "commit_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 4, - "name": "verify_tx_hash?", - "type_info": "Bytea" - }, - { - "ordinal": 5, - "name": "committed_at!", - "type_info": "Timestamptz" - }, - { - "ordinal": 6, - "name": "verified_at?", - "type_info": "Timestamptz" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Int8" - ] - }, - "nullable": [ - false, - false, - false, - true, - true, - false, - false - ] - } - }, "e99d990d2d9b1c6068efb623634d6d6cf49a3c7ec33a5a916b7ddaa745e24c9b": { "query": "\n SELECT * FROM prover_job_queue\n WHERE job_status = $1\n ORDER BY (job_priority, id, first_block)\n LIMIT 1\n ", "describe": { @@ -5753,6 +6392,16 @@ "ordinal": 9, "name": "created_at", "type_info": "Timestamptz" + }, + { + "ordinal": 10, + "name": "eth_block_index", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "tx_hash", + "type_info": "Bytea" } ], "parameters": { @@ -5770,6 +6419,8 @@ false, false, false, + false, + true, false ] } @@ -5830,48 +6481,6 @@ ] } }, - "f491f570438dd3a3ecd5988d0682751448723b41f1bd17198cc60d7f5d9aeb11": { - "query": "\n SELECT * FROM tokens WHERE is_nft = false\n ORDER BY id ASC\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "address", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "symbol", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "decimals", - "type_info": "Int2" - }, - { - "ordinal": 4, - "name": "is_nft", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - false, - false - ] - } - }, "f4aaa302a20921ae9ff490ac1a86083c49ee4a9afacf0faeb76aa8e1549f2fe7": { "query": "SELECT * FROM account_creates WHERE block_number > $1 AND block_number <= $2 ", "describe": { @@ -5976,6 +6585,32 @@ ] } }, + "f8f2208c71cbf2d42de633222bb888c773a47b82dd0095b76ad38f535d74fdce": { + "query": "SELECT created_at, block_number FROM executed_transactions\n WHERE tx_hash = $1", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "created_at", + "type_info": "Timestamptz" + }, + { + "ordinal": 1, + "name": "block_number", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false, + false + ] + } + }, "fabb011dfd474fd56c71b7fb1707bbe586e66f9a45deac15b486845ba5c87979": { "query": "SELECT * FROM mint_nft_updates WHERE block_number <= $1", "describe": { diff --git a/core/lib/storage/src/chain/account/mod.rs b/core/lib/storage/src/chain/account/mod.rs index 8341594800..c32a881b46 100644 --- a/core/lib/storage/src/chain/account/mod.rs +++ b/core/lib/storage/src/chain/account/mod.rs @@ -3,7 +3,7 @@ use std::time::Instant; // External imports use sqlx::Acquire; // Workspace imports -use zksync_types::{Account, AccountId, AccountUpdates, Address, TokenId}; +use zksync_types::{Account, AccountId, AccountUpdates, Address, BlockNumber, TokenId}; // Local imports use self::records::*; use crate::diff::StorageAccountDiff; @@ -76,21 +76,19 @@ impl<'a, 'c> AccountSchema<'a, 'c> { account_id: AccountId, ) -> QueryResult { let start = Instant::now(); - // Load committed & verified states, and return them. - let committed = self + let mut transaction = self.0.start_transaction().await?; + let (verified_state, committed_state) = transaction + .chain() + .account_schema() .last_committed_state_for_account(account_id) - .await? - .map(|a| (account_id, a)); - let verified = self - .last_verified_state_for_account(account_id) - .await? - .map(|a| (account_id, a)); + .await?; + transaction.commit().await?; metrics::histogram!("sql.chain.account.account_state_by_id", start.elapsed()); Ok(StoredAccountState { - committed, - verified, + committed: committed_state.map(|a| (account_id, a)), + verified: verified_state.1.map(|a| (account_id, a)), }) } @@ -119,10 +117,11 @@ impl<'a, 'c> AccountSchema<'a, 'c> { /// Loads the last committed (e.g. just added but no necessarily verified) state for /// account given its ID. + /// Returns both verified and committed states. pub async fn last_committed_state_for_account( &mut self, account_id: AccountId, - ) -> QueryResult> { + ) -> QueryResult<((i64, Option), Option)> { let start = Instant::now(); let mut transaction = self.0.start_transaction().await?; @@ -211,7 +210,7 @@ impl<'a, 'c> AccountSchema<'a, 'c> { let account_state = account_diff .into_iter() .map(|(_, upd)| upd) - .fold(account, Account::apply_update); + .fold(account.clone(), Account::apply_update); transaction.commit().await?; @@ -219,7 +218,7 @@ impl<'a, 'c> AccountSchema<'a, 'c> { "sql.chain.account.last_committed_state_for_account", start.elapsed() ); - Ok(account_state) + Ok(((last_block, account), account_state)) } /// Loads the last verified state for the account (i.e. the one obtained in the last block @@ -238,7 +237,7 @@ impl<'a, 'c> AccountSchema<'a, 'c> { } /// Obtains the last verified state of the account. - async fn account_and_last_block( + pub async fn account_and_last_block( &mut self, account_id: AccountId, ) -> QueryResult<(i64, Option)> { @@ -275,7 +274,7 @@ impl<'a, 'c> AccountSchema<'a, 'c> { let nfts: Vec = sqlx::query_as!( StorageNFT, " - SELECT * FROM nft + SELECT * FROM nft WHERE creator_account_id = $1 ", *account_id as i32 @@ -341,6 +340,41 @@ impl<'a, 'c> AccountSchema<'a, 'c> { Ok(address) } + /// Obtains the last committed block that affects the account. + pub async fn last_committed_block_with_update_for_acc( + &mut self, + account_id: AccountId, + ) -> QueryResult { + let start = Instant::now(); + + let block_number = sqlx::query!( + " + SELECT MAX(block_number) + FROM( + SELECT block_number FROM account_balance_updates + WHERE account_id = $1 + UNION ALL + SELECT block_number FROM account_creates + WHERE account_id = $1 + UNION ALL + SELECT block_number FROM account_pubkey_updates + WHERE account_id = $1 + ) as subquery + ", + i64::from(*account_id), + ) + .fetch_one(self.0.conn()) + .await? + .max + .unwrap_or(0); + + metrics::histogram!( + "sql.chain.account.last_committed_block_with_update_for_acc", + start.elapsed() + ); + Ok(BlockNumber(block_number as u32)) + } + // This method does not have metrics, since it is used only for the // migration for the nft regenesis. // Remove this function once the regenesis is complete and the tool is not diff --git a/core/lib/storage/src/chain/account/records.rs b/core/lib/storage/src/chain/account/records.rs index 1226bcb268..586bad8868 100644 --- a/core/lib/storage/src/chain/account/records.rs +++ b/core/lib/storage/src/chain/account/records.rs @@ -1,3 +1,5 @@ +// Workspace imports +use zksync_api_types::v02::account::EthAccountType as ApiEthAccountType; // External imports use sqlx::{types::BigDecimal, FromRow}; use zksync_types::{AccountId, Address, TokenId, H256, NFT}; @@ -87,6 +89,15 @@ pub enum EthAccountType { CREATE2, } +impl From for ApiEthAccountType { + fn from(account_type: EthAccountType) -> ApiEthAccountType { + match account_type { + EthAccountType::Owned => ApiEthAccountType::Owned, + EthAccountType::CREATE2 => ApiEthAccountType::CREATE2, + } + } +} + #[derive(Debug, Clone, FromRow)] pub struct StorageAccountType { pub account_id: i64, diff --git a/core/lib/storage/src/chain/block/conversion.rs b/core/lib/storage/src/chain/block/conversion.rs index 08142a5626..1fe264d9bc 100644 --- a/core/lib/storage/src/chain/block/conversion.rs +++ b/core/lib/storage/src/chain/block/conversion.rs @@ -6,18 +6,21 @@ use std::convert::TryFrom; // External imports // Workspace imports +use zksync_api_types::v02::transaction::{ + L1Transaction, Transaction, TransactionData, TxInBlockStatus, +}; use zksync_types::{ - Action, ActionType, Operation, SignedZkSyncTx, H256, - { - block::{ExecutedPriorityOp, ExecutedTx}, - BlockNumber, PriorityOp, ZkSyncOp, ZkSyncTx, - }, + aggregated_operations::AggregatedOperation, + block::{ExecutedPriorityOp, ExecutedTx}, + tx::TxHash, + Action, ActionType, BlockNumber, Operation, PriorityOp, SignedZkSyncTx, ZkSyncOp, ZkSyncTx, + H256, }; // Local imports use crate::chain::operations::records::StoredAggregatedOperation; use crate::{ chain::{ - block::BlockSchema, + block::{records::TransactionItem, BlockSchema}, operations::records::{ NewExecutedPriorityOperation, NewExecutedTransaction, StoredExecutedPriorityOperation, StoredExecutedTransaction, StoredOperation, @@ -26,7 +29,6 @@ use crate::{ prover::ProverSchema, QueryResult, StorageActionType, StorageProcessor, }; -use zksync_types::aggregated_operations::AggregatedOperation; impl StoredOperation { pub async fn into_op(self, conn: &mut StorageProcessor<'_>) -> QueryResult { @@ -88,6 +90,7 @@ impl StoredExecutedPriorityOperation { deadline_block: self.deadline_block as u64, eth_hash: H256::from_slice(&self.eth_hash), eth_block: self.eth_block as u64, + eth_block_index: self.eth_block_index.map(|index| index as u64), }, op: franklin_op, block_index: self.block_index as u32, @@ -102,6 +105,7 @@ impl NewExecutedPriorityOperation { block: BlockNumber, ) -> Self { let operation = serde_json::to_value(&exec_prior_op.op).unwrap(); + let tx_hash = exec_prior_op.priority_op.tx_hash().as_ref().to_vec(); let (from_account, to_account) = match exec_prior_op.op { ZkSyncOp::Deposit(deposit) => (deposit.priority_op.from, deposit.priority_op.to), @@ -126,6 +130,11 @@ impl NewExecutedPriorityOperation { eth_hash: exec_prior_op.priority_op.eth_hash.as_bytes().to_vec(), eth_block: exec_prior_op.priority_op.eth_block as i64, created_at: exec_prior_op.created_at, + eth_block_index: exec_prior_op + .priority_op + .eth_block_index + .map(|index| index as i64), + tx_hash, } } } @@ -209,3 +218,37 @@ impl StoredAggregatedOperation { ) } } + +impl TransactionItem { + pub fn transaction_from_item(item: TransactionItem, is_block_finalized: bool) -> Transaction { + let tx_hash = TxHash::from_slice(&item.tx_hash).unwrap(); + let block_number = Some(BlockNumber(item.block_number as u32)); + let status = if item.success { + if is_block_finalized { + TxInBlockStatus::Finalized + } else { + TxInBlockStatus::Committed + } + } else { + TxInBlockStatus::Rejected + }; + let op = if let Some(eth_hash) = item.eth_hash { + let eth_hash = H256::from_slice(ð_hash); + let id = item.priority_op_serialid.unwrap() as u64; + let operation: ZkSyncOp = serde_json::from_value(item.op).unwrap(); + TransactionData::L1( + L1Transaction::from_executed_op(operation, eth_hash, id, tx_hash).unwrap(), + ) + } else { + TransactionData::L2(serde_json::from_value(item.op).unwrap()) + }; + Transaction { + tx_hash, + block_number, + op, + status, + fail_reason: item.fail_reason, + created_at: Some(item.created_at), + } + } +} diff --git a/core/lib/storage/src/chain/block/mod.rs b/core/lib/storage/src/chain/block/mod.rs index d3a26a923a..3a36262556 100644 --- a/core/lib/storage/src/chain/block/mod.rs +++ b/core/lib/storage/src/chain/block/mod.rs @@ -2,6 +2,13 @@ use std::time::{Instant, SystemTime, UNIX_EPOCH}; // External imports // Workspace imports +use zksync_api_types::{ + v02::{ + pagination::{BlockAndTxHash, PaginationDirection, PaginationQuery}, + transaction::Transaction, + }, + Either, +}; use zksync_basic_types::{H256, U256}; use zksync_crypto::convert::FeConvert; use zksync_types::{ @@ -13,7 +20,7 @@ use zksync_types::{ // Local imports use self::records::{ AccountTreeCache, BlockTransactionItem, StorageBlock, StorageBlockDetails, - StorageBlockMetadata, StoragePendingBlock, + StorageBlockMetadata, StoragePendingBlock, TransactionItem, }; use crate::{ chain::account::records::EthAccountType, @@ -27,7 +34,7 @@ use crate::{ QueryResult, StorageProcessor, }; -mod conversion; +pub(crate) mod conversion; pub mod records; /// Block schema is a primary sidechain storage controller. @@ -231,7 +238,7 @@ impl<'a, 'c> BlockSchema<'a, 'c> { tx_hash as "tx_hash!", block_number as "block_number!", op as "op!", - success as "success?", + success as "success!", fail_reason as "fail_reason?", created_at as "created_at!" FROM everything @@ -309,8 +316,8 @@ impl<'a, 'c> BlockSchema<'a, 'c> { Ok(executed_operations) } - /// Loads the block headers for the given amount of blocks. - pub async fn load_block_range( + /// Loads the block headers for the given amount of blocks in the descending order. + pub async fn load_block_range_desc( &mut self, max_block: BlockNumber, limit: u32, @@ -336,8 +343,8 @@ impl<'a, 'c> BlockSchema<'a, 'c> { INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id WHERE aggregate_operations.confirmed = true - ) - ,aggr_exec as ( + ), + aggr_exec as ( SELECT aggregate_operations.created_at, eth_operations.final_hash, @@ -373,6 +380,87 @@ impl<'a, 'c> BlockSchema<'a, 'c> { Ok(details) } + /// Loads the block headers for the given amount of blocks in the ascending order. + pub async fn load_block_range_asc( + &mut self, + min_block: BlockNumber, + limit: u32, + ) -> QueryResult> { + let start = Instant::now(); + // This query does the following: + // - joins the `operations` and `eth_tx_hashes` (using the intermediate `eth_ops_binding` table) + // tables to collect the data: + // block number, ethereum transaction hash, action type and action creation timestamp; + // - joins the `blocks` table with result of the join twice: once for committed operations + // and verified operations; + // - collects the {limit} blocks in the ascending order with the data gathered above. + let details = sqlx::query_as!( + StorageBlockDetails, + r#" + WITH aggr_comm AS ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + commit_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ), + aggr_exec as ( + SELECT + aggregate_operations.created_at, + eth_operations.final_hash, + execute_aggregated_blocks_binding.block_number + FROM aggregate_operations + INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id + INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id + INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id + WHERE aggregate_operations.confirmed = true + ) + SELECT + blocks.number AS "block_number!", + blocks.root_hash AS "new_state_root!", + blocks.block_size AS "block_size!", + committed.final_hash AS "commit_tx_hash?", + verified.final_hash AS "verify_tx_hash?", + committed.created_at AS "committed_at!", + verified.created_at AS "verified_at?" + FROM blocks + INNER JOIN aggr_comm committed ON blocks.number = committed.block_number + LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number + WHERE + blocks.number >= $1 + ORDER BY blocks.number ASC + LIMIT $2; + "#, + i64::from(*min_block), + i64::from(limit) + ).fetch_all(self.0.conn()) + .await?; + + metrics::histogram!("sql.chain.block.load_block_range_asc", start.elapsed()); + Ok(details) + } + + /// Loads the block headers for the given pagination query + pub async fn load_block_page( + &mut self, + query: &PaginationQuery, + ) -> QueryResult> { + let details = match query.direction { + PaginationDirection::Newer => { + self.load_block_range_asc(query.from, query.limit).await? + } + PaginationDirection::Older => { + self.load_block_range_desc(query.from, query.limit).await? + } + }; + + Ok(details) + } + /// Helper method for `find_block_by_height_or_hash`. It checks whether /// provided string can be interpreted like a hash, and if so, returns the /// hexadecimal string without prefix. @@ -450,8 +538,8 @@ impl<'a, 'c> BlockSchema<'a, 'c> { INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id WHERE aggregate_operations.confirmed = true - ) - ,aggr_exec as ( + ), + aggr_exec as ( SELECT aggregate_operations.created_at, eth_operations.final_hash, @@ -558,6 +646,16 @@ impl<'a, 'c> BlockSchema<'a, 'c> { result } + pub async fn is_block_finalized(&mut self, block_number: BlockNumber) -> QueryResult { + let last_finalized_block = self + .0 + .chain() + .block_schema() + .get_last_verified_confirmed_block() + .await?; + Ok(block_number <= last_finalized_block) + } + /// Helper method for retrieving pending blocks from the database. async fn load_storage_pending_block(&mut self) -> QueryResult> { let start = Instant::now(); @@ -931,6 +1029,204 @@ impl<'a, 'c> BlockSchema<'a, 'c> { Ok(self.save_block(block).await?) } + /// Retrieves both L1 and L2 operations stored in the block for the given pagination query + pub async fn get_block_transactions_page( + &mut self, + query: &PaginationQuery, + ) -> QueryResult>> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + + let tx_hash = match query.from.tx_hash.inner { + Either::Left(tx_hash) => tx_hash, + Either::Right(_) => { + if let Some(tx_hash) = transaction + .chain() + .operations_ext_schema() + .get_block_last_tx_hash(query.from.block_number) + .await? + { + tx_hash + } else { + return Ok(Some(Vec::new())); + } + } + }; + let created_at_and_block = transaction + .chain() + .operations_ext_schema() + .get_tx_created_at_and_block_number(tx_hash) + .await?; + let block_txs = if let Some((time_from, block_number)) = created_at_and_block { + if block_number == query.from.block_number { + let raw_txs: Vec = match query.direction { + PaginationDirection::Newer => { + sqlx::query_as!( + TransactionItem, + r#" + WITH transactions AS ( + SELECT + tx_hash, + tx as op, + block_number, + created_at, + success, + fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + block_index + FROM executed_transactions + WHERE block_number = $1 AND created_at >= $2 + ), priority_ops AS ( + SELECT + tx_hash, + operation as op, + block_number, + created_at, + true as success, + Null as fail_reason, + eth_hash, + priority_op_serialid, + block_index + FROM executed_priority_operations + WHERE block_number = $1 AND created_at >= $2 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops + ) + SELECT + tx_hash as "tx_hash!", + block_number as "block_number!", + op as "op!", + created_at as "created_at!", + success as "success!", + fail_reason as "fail_reason?", + eth_hash as "eth_hash?", + priority_op_serialid as "priority_op_serialid?" + FROM everything + ORDER BY created_at ASC, block_index ASC + LIMIT $3 + "#, + i64::from(*block_number), + time_from, + i64::from(query.limit), + ) + .fetch_all(transaction.conn()) + .await? + } + PaginationDirection::Older => { + sqlx::query_as!( + TransactionItem, + r#" + WITH transactions AS ( + SELECT + tx_hash, + tx as op, + block_number, + created_at, + success, + fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + block_index + FROM executed_transactions + WHERE block_number = $1 AND created_at <= $2 + ), priority_ops AS ( + SELECT + tx_hash, + operation as op, + block_number, + created_at, + true as success, + Null as fail_reason, + eth_hash, + priority_op_serialid, + block_index + FROM executed_priority_operations + WHERE block_number = $1 AND created_at <= $2 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops + ) + SELECT + tx_hash as "tx_hash!", + block_number as "block_number!", + op as "op!", + created_at as "created_at!", + success as "success!", + fail_reason as "fail_reason?", + eth_hash as "eth_hash?", + priority_op_serialid as "priority_op_serialid?" + FROM everything + ORDER BY created_at DESC, block_index DESC + LIMIT $3 + "#, + i64::from(*block_number), + time_from, + i64::from(query.limit), + ) + .fetch_all(transaction.conn()) + .await? + } + }; + let is_block_finalized = transaction + .chain() + .block_schema() + .is_block_finalized(block_number) + .await?; + let txs: Vec = raw_txs + .into_iter() + .map(|tx| TransactionItem::transaction_from_item(tx, is_block_finalized)) + .collect(); + Some(txs) + } else { + None + } + } else { + None + }; + transaction.commit().await?; + + metrics::histogram!( + "sql.chain.block.get_block_transactions_page", + start.elapsed() + ); + Ok(block_txs) + } + + /// Returns count of both L1 and L2 operations stored in the block + pub async fn get_block_transactions_count( + &mut self, + block_number: BlockNumber, + ) -> QueryResult { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + + let tx_count = sqlx::query!( + r#"SELECT count(*) as "count!" FROM executed_transactions WHERE block_number = $1"#, + i64::from(*block_number) + ) + .fetch_one(transaction.conn()) + .await? + .count; + let priority_op_count = sqlx::query!( + r#"SELECT count(*) as "count!" FROM executed_priority_operations WHERE block_number = $1"#, + i64::from(*block_number) + ) + .fetch_one(transaction.conn()) + .await? + .count; + transaction.commit().await?; + + metrics::histogram!( + "sql.chain.block.get_block_transactions_count", + start.elapsed() + ); + Ok((tx_count + priority_op_count) as u32) + } + // Removes blocks with number greater than `last_block` pub async fn remove_blocks(&mut self, last_block: BlockNumber) -> QueryResult<()> { let start = Instant::now(); diff --git a/core/lib/storage/src/chain/block/records.rs b/core/lib/storage/src/chain/block/records.rs index 07e679a5cb..6128979246 100644 --- a/core/lib/storage/src/chain/block/records.rs +++ b/core/lib/storage/src/chain/block/records.rs @@ -62,11 +62,23 @@ pub struct BlockTransactionItem { pub tx_hash: String, pub block_number: i64, pub op: Value, - pub success: Option, + pub success: bool, pub fail_reason: Option, pub created_at: DateTime, } +#[derive(Debug, Serialize, Deserialize, FromRow, PartialEq)] +pub struct TransactionItem { + pub tx_hash: Vec, + pub block_number: i64, + pub op: Value, + pub created_at: DateTime, + pub success: bool, + pub fail_reason: Option, + pub eth_hash: Option>, + pub priority_op_serialid: Option, +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AccountTreeCache { pub block: i64, diff --git a/core/lib/storage/src/chain/mempool/mod.rs b/core/lib/storage/src/chain/mempool/mod.rs index 048dfc0229..c1b587f31b 100644 --- a/core/lib/storage/src/chain/mempool/mod.rs +++ b/core/lib/storage/src/chain/mempool/mod.rs @@ -1,15 +1,18 @@ // Built-in deps -use std::{collections::VecDeque, convert::TryFrom, time::Instant}; +use std::{collections::VecDeque, convert::TryFrom, str::FromStr, time::Instant}; // External imports use itertools::Itertools; // Workspace imports +use zksync_api_types::v02::transaction::{ + ApiTxBatch, BatchStatus, TxHashSerializeWrapper, TxInBlockStatus, +}; use zksync_types::{ mempool::SignedTxVariant, tx::{TxEthSignature, TxHash}, BlockNumber, SignedZkSyncTx, }; // Local imports -use self::records::MempoolTx; +use self::records::{MempoolTx, QueuedBatchTx}; use crate::{QueryResult, StorageProcessor}; pub mod records; @@ -137,13 +140,16 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { anyhow::bail!("Cannot insert an empty batch"); } + let mut transaction = self.0.start_transaction().await?; + let tx_hashes: Vec = txs.iter().map(|tx| tx.tx.hash()).collect(); + // The first transaction of the batch would be inserted manually // batch_id of the inserted transaction would be the id of this batch // Will be unique cause batch_id is bigserial // Special case: batch_id == 0 <==> transaction is not a part of some batch (uses in `insert_tx` function) let batch_id = { let first_tx_data = txs[0].clone(); - let tx_hash = hex::encode(first_tx_data.hash().as_ref()); + let tx_hash = hex::encode(tx_hashes[0].as_ref()); let tx = serde_json::to_value(&first_tx_data.tx) .expect("Unserializable TX provided to the database"); let eth_sign_data = first_tx_data @@ -159,7 +165,7 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { chrono::Utc::now(), eth_sign_data, ) - .execute(self.0.conn()) + .execute(transaction.conn()) .await?; sqlx::query_as!( @@ -168,19 +174,19 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { ORDER BY batch_id DESC LIMIT 1", ) - .fetch_optional(self.0.conn()) + .fetch_optional(transaction.conn()) .await? .ok_or_else(|| anyhow::format_err!("Can't get maximal batch_id from mempool_txs"))? .batch_id }; // Processing of all batch transactions, except the first - let mut tx_hashes = Vec::with_capacity(txs.len()); + let mut tx_hashes_strs = Vec::with_capacity(txs.len()); let mut tx_values = Vec::with_capacity(txs.len()); let mut txs_sign_data = Vec::with_capacity(txs.len()); - for tx_data in txs[1..].iter() { - tx_hashes.push(hex::encode(tx_data.hash().as_ref())); + for (tx_data, tx_hash) in txs[1..].iter().zip(tx_hashes[1..].iter()) { + tx_hashes_strs.push(hex::encode(tx_hash.as_ref())); tx_values.push( serde_json::to_value(&tx_data.tx) .expect("Unserializable TX provided to the database"), @@ -198,13 +204,13 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { SELECT u.tx_hash, u.tx, u.eth_sign_data, $4, $5 FROM UNNEST ($1::text[], $2::jsonb[], $3::jsonb[]) AS u(tx_hash, tx, eth_sign_data)", - &tx_hashes, + &tx_hashes_strs, &tx_values, &txs_sign_data, chrono::Utc::now(), batch_id ) - .execute(self.0.conn()) + .execute(transaction.conn()) .await?; // If there're signatures for the whole batch, store them too. @@ -215,10 +221,21 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { batch_id, signature ) - .execute(self.0.conn()) + .execute(transaction.conn()) .await?; } + let batch_hash = TxHash::batch_hash(&tx_hashes); + sqlx::query!( + "INSERT INTO txs_batches_hashes VALUES($1, $2)", + batch_id, + batch_hash.as_ref() + ) + .execute(transaction.conn()) + .await?; + + transaction.commit().await?; + metrics::histogram!("sql.chain.mempool.insert_batch", start.elapsed()); Ok(batch_id) } @@ -290,7 +307,7 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { let tx_hash = hex::encode(tx_hash.as_ref()); let row = sqlx::query!( - "SELECT count(*) from mempool_txs + "SELECT COUNT(*) from mempool_txs WHERE tx_hash = $1", &tx_hash ) @@ -304,10 +321,23 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { Ok(contains) } - /// Returns zkSync transaction with thr given hash. + /// Returns zkSync transaction with the given hash. pub async fn get_tx(&mut self, tx_hash: TxHash) -> QueryResult> { let start = Instant::now(); + let mempool_tx = self.get_mempool_tx(tx_hash).await?; + + metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); + mempool_tx + .map(SignedZkSyncTx::try_from) + .transpose() + .map_err(anyhow::Error::from) + } + + /// Returns mempool transaction as it is stored in the database. + pub async fn get_mempool_tx(&mut self, tx_hash: TxHash) -> QueryResult> { + let start = Instant::now(); + let tx_hash = hex::encode(tx_hash.as_ref()); let mempool_tx = sqlx::query_as!( @@ -320,10 +350,7 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { .await?; metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_tx"); - mempool_tx - .map(SignedZkSyncTx::try_from) - .transpose() - .map_err(anyhow::Error::from) + Ok(mempool_tx) } /// Removes transactions that are already committed. @@ -376,6 +403,65 @@ impl<'a, 'c> MempoolSchema<'a, 'c> { Ok(()) } + /// Returns mempool size. + pub async fn get_mempool_size(&mut self) -> QueryResult { + let start = Instant::now(); + + let size = sqlx::query!("SELECT COUNT(*) from mempool_txs") + .fetch_one(self.0.conn()) + .await? + .count; + + metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_mempool_size"); + Ok(size.unwrap_or(0) as u32) + } + + /// Get info about batch in mempool. + pub async fn get_queued_batch_info( + &mut self, + batch_hash: TxHash, + ) -> QueryResult> { + let start = Instant::now(); + + let batch_data = sqlx::query_as!( + QueuedBatchTx, + r#" + SELECT tx_hash, created_at + FROM mempool_txs + INNER JOIN txs_batches_hashes + ON txs_batches_hashes.batch_id = mempool_txs.batch_id + WHERE batch_hash = $1 + ORDER BY id ASC + "#, + batch_hash.as_ref() + ) + .fetch_all(self.0.conn()) + .await?; + let result = if !batch_data.is_empty() { + let created_at = batch_data[0].created_at; + let transaction_hashes: Vec = batch_data + .iter() + .map(|tx| { + TxHashSerializeWrapper(TxHash::from_str(&format!("0x{}", tx.tx_hash)).unwrap()) + }) + .collect(); + Some(ApiTxBatch { + batch_hash, + transaction_hashes, + created_at, + batch_status: BatchStatus { + updated_at: created_at, + last_state: TxInBlockStatus::Queued, + }, + }) + } else { + None + }; + + metrics::histogram!("sql.chain", start.elapsed(), "mempool" => "get_queued_batch_info"); + Ok(result) + } + // Returns executed txs back to mempool for blocks with number greater than `last_block` pub async fn return_executed_txs_to_mempool( &mut self, diff --git a/core/lib/storage/src/chain/mempool/records.rs b/core/lib/storage/src/chain/mempool/records.rs index 29106dcc77..42ba036426 100644 --- a/core/lib/storage/src/chain/mempool/records.rs +++ b/core/lib/storage/src/chain/mempool/records.rs @@ -33,3 +33,9 @@ impl TryFrom for SignedZkSyncTx { }) } } + +#[derive(Debug, FromRow, PartialEq)] +pub struct QueuedBatchTx { + pub tx_hash: String, + pub created_at: DateTime, +} diff --git a/core/lib/storage/src/chain/operations/mod.rs b/core/lib/storage/src/chain/operations/mod.rs index 58de6ff012..3270a752e8 100644 --- a/core/lib/storage/src/chain/operations/mod.rs +++ b/core/lib/storage/src/chain/operations/mod.rs @@ -116,8 +116,8 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { Ok(op) } - /// Retrieves priority operation from the database given its hash. - pub async fn get_executed_priority_operation_by_hash( + /// Retrieves priority operation from the database by its eth_hash. + pub async fn get_executed_priority_operation_by_eth_hash( &mut self, eth_hash: &[u8], ) -> QueryResult> { @@ -131,7 +131,7 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { .await?; metrics::histogram!( - "sql.chain.operations.get_executed_priority_operation_by_hash", + "sql.chain.operations.get_executed_priority_operation_by_eth_hash", start.elapsed() ); Ok(op) @@ -265,9 +265,11 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { operation: NewExecutedPriorityOperation, ) -> QueryResult<()> { let start = Instant::now(); + sqlx::query!( - "INSERT INTO executed_priority_operations (block_number, block_index, operation, from_account, to_account, priority_op_serialid, deadline_block, eth_hash, eth_block, created_at) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + "INSERT INTO executed_priority_operations (block_number, block_index, operation, from_account, to_account, + priority_op_serialid, deadline_block, eth_hash, eth_block, created_at, eth_block_index, tx_hash) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) ON CONFLICT (priority_op_serialid) DO NOTHING", operation.block_number, @@ -280,6 +282,8 @@ impl<'a, 'c> OperationsSchema<'a, 'c> { operation.eth_hash, operation.eth_block, operation.created_at, + operation.eth_block_index, + operation.tx_hash, ) .execute(self.0.conn()) .await?; diff --git a/core/lib/storage/src/chain/operations/records.rs b/core/lib/storage/src/chain/operations/records.rs index 3ecd6f6d27..43c49ae35f 100644 --- a/core/lib/storage/src/chain/operations/records.rs +++ b/core/lib/storage/src/chain/operations/records.rs @@ -15,7 +15,7 @@ pub struct StoredOperation { pub confirmed: bool, } -#[derive(Debug, Clone, FromRow)] +#[derive(Debug, Clone, FromRow, PartialEq)] pub struct StoredExecutedPriorityOperation { pub block_number: i64, pub block_index: i32, @@ -27,6 +27,9 @@ pub struct StoredExecutedPriorityOperation { pub eth_hash: Vec, pub eth_block: i64, pub created_at: DateTime, + /// This field must be optional because of backward compatibility. + pub eth_block_index: Option, + pub tx_hash: Vec, } #[derive(Debug, Clone, FromRow)] @@ -65,6 +68,9 @@ pub struct NewExecutedPriorityOperation { pub eth_hash: Vec, pub eth_block: i64, pub created_at: DateTime, + /// This field must be optional because of backward compatibility. + pub eth_block_index: Option, + pub tx_hash: Vec, } #[derive(Debug, Clone)] diff --git a/core/lib/storage/src/chain/operations_ext/conversion.rs b/core/lib/storage/src/chain/operations_ext/conversion.rs new file mode 100644 index 0000000000..d9aa2b38fe --- /dev/null +++ b/core/lib/storage/src/chain/operations_ext/conversion.rs @@ -0,0 +1,143 @@ +// Built-in deps +// External imports +// Workspace imports +use zksync_api_types::v02::transaction::{ + ForcedExitData, L1Receipt, L1Transaction, L2Receipt, L2Transaction, Receipt, Transaction, + TransactionData, TxData, TxInBlockStatus, WithdrawData, +}; +use zksync_types::{ + tx::{EthSignData, TxHash}, + BlockNumber, EthBlockId, ZkSyncOp, ZkSyncTx, H256, +}; +// Local imports +use super::records::{StorageTxData, StorageTxReceipt}; + +impl StorageTxReceipt { + pub fn receipt_from_storage_receipt( + receipt: StorageTxReceipt, + is_block_finalized: Option, + ) -> Receipt { + if receipt.block_number.is_some() { + let status = if receipt.success.unwrap() { + if is_block_finalized.unwrap() { + TxInBlockStatus::Finalized + } else { + TxInBlockStatus::Committed + } + } else { + TxInBlockStatus::Rejected + }; + if receipt.eth_block.is_some() { + Receipt::L1(L1Receipt { + status, + eth_block: EthBlockId(receipt.eth_block.unwrap() as u64), + rollup_block: receipt + .block_number + .map(|number| BlockNumber(number as u32)), + id: receipt.priority_op_serialid.unwrap() as u64, + }) + } else { + Receipt::L2(L2Receipt { + status, + tx_hash: TxHash::from_slice(&receipt.tx_hash).unwrap(), + rollup_block: receipt + .block_number + .map(|number| BlockNumber(number as u32)), + fail_reason: receipt.fail_reason, + }) + } + } else { + Receipt::L2(L2Receipt { + status: TxInBlockStatus::Queued, + tx_hash: TxHash::from_slice(&receipt.tx_hash).unwrap(), + rollup_block: None, + fail_reason: None, + }) + } + } +} + +impl StorageTxData { + pub fn tx_data_from_zksync_tx( + tx: ZkSyncTx, + complete_withdrawals_tx_hash: Option, + ) -> TransactionData { + let tx = match tx { + ZkSyncTx::ChangePubKey(tx) => L2Transaction::ChangePubKey(tx), + ZkSyncTx::Close(tx) => L2Transaction::Close(tx), + ZkSyncTx::ForcedExit(tx) => L2Transaction::ForcedExit(Box::new(ForcedExitData { + tx: *tx, + eth_tx_hash: complete_withdrawals_tx_hash, + })), + ZkSyncTx::Transfer(tx) => L2Transaction::Transfer(tx), + ZkSyncTx::Withdraw(tx) => L2Transaction::Withdraw(Box::new(WithdrawData { + tx: *tx, + eth_tx_hash: complete_withdrawals_tx_hash, + })), + ZkSyncTx::MintNFT(_) => unimplemented!(), + ZkSyncTx::Swap(_) => unimplemented!(), + ZkSyncTx::WithdrawNFT(_) => unimplemented!(), + }; + TransactionData::L2(tx) + } + + pub fn data_from_storage_data( + data: StorageTxData, + is_block_finalized: Option, + complete_withdrawals_tx_hash: Option, + ) -> TxData { + let tx_hash = TxHash::from_slice(&data.tx_hash).unwrap(); + let tx = if data.block_number.is_some() { + let block_number = data.block_number.map(|number| BlockNumber(number as u32)); + let status = if data.success.unwrap() { + if is_block_finalized.unwrap() { + TxInBlockStatus::Finalized + } else { + TxInBlockStatus::Committed + } + } else { + TxInBlockStatus::Rejected + }; + + let op = if data.eth_hash.is_some() { + let operation: ZkSyncOp = serde_json::from_value(data.op).unwrap(); + let eth_hash = H256::from_slice(&data.eth_hash.unwrap()); + let id = data.priority_op_serialid.unwrap() as u64; + TransactionData::L1( + L1Transaction::from_executed_op(operation, eth_hash, id, tx_hash).unwrap(), + ) + } else { + Self::tx_data_from_zksync_tx( + serde_json::from_value(data.op).unwrap(), + complete_withdrawals_tx_hash, + ) + }; + Transaction { + tx_hash, + block_number, + op, + status, + fail_reason: data.fail_reason, + created_at: Some(data.created_at), + } + } else { + let tx_data = Self::tx_data_from_zksync_tx( + serde_json::from_value(data.op).unwrap(), + complete_withdrawals_tx_hash, + ); + Transaction { + tx_hash, + block_number: None, + op: tx_data, + status: TxInBlockStatus::Queued, + fail_reason: None, + created_at: Some(data.created_at), + } + }; + let eth_signature = data.eth_sign_data.map(|eth_sign_data| { + let eth_sign_data: EthSignData = serde_json::from_value(eth_sign_data).unwrap(); + eth_sign_data.signature.to_string() + }); + TxData { tx, eth_signature } + } +} diff --git a/core/lib/storage/src/chain/operations_ext/mod.rs b/core/lib/storage/src/chain/operations_ext/mod.rs index 401278babf..ebe7b008b7 100644 --- a/core/lib/storage/src/chain/operations_ext/mod.rs +++ b/core/lib/storage/src/chain/operations_ext/mod.rs @@ -5,21 +5,37 @@ use std::time::Instant; use chrono::{DateTime, Utc}; // Workspace imports +use zksync_api_types::{ + v02::{ + pagination::{AccountTxsRequest, PaginationDirection, PaginationQuery}, + transaction::{ + ApiTxBatch, BatchStatus, Receipt, Transaction, TxData, TxHashSerializeWrapper, + TxInBlockStatus, + }, + }, + Either, +}; use zksync_crypto::params; -use zksync_types::aggregated_operations::AggregatedActionType; -use zksync_types::{Address, BlockNumber, TokenId}; +use zksync_types::{ + aggregated_operations::AggregatedActionType, + {tx::TxHash, Address, BlockNumber, TokenId}, +}; // Local imports use self::records::{ - AccountCreatedAt, AccountOpReceiptResponse, AccountTxReceiptResponse, - PriorityOpReceiptResponse, TransactionsHistoryItem, TxByHashResponse, TxReceiptResponse, + AccountCreatedAt, InBlockBatchTx, PriorityOpReceiptResponse, StorageTxData, StorageTxReceipt, + TransactionsHistoryItem, TxByHashResponse, TxReceiptResponse, }; use crate::{ - chain::operations::{records::StoredExecutedPriorityOperation, OperationsSchema}, + chain::{ + block::records::TransactionItem, + operations::{records::StoredExecutedPriorityOperation, OperationsSchema}, + }, tokens::TokensSchema, QueryResult, StorageProcessor, }; +pub(crate) mod conversion; pub mod records; /// Direction to perform search of transactions to. @@ -72,6 +88,208 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { result } + pub async fn tx_receipt_api_v02(&mut self, hash: &[u8]) -> QueryResult> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + let hash_str = hex::encode(hash); + let receipt: Option = sqlx::query_as!( + StorageTxReceipt, + r#" + WITH transaction AS ( + SELECT + tx_hash, + block_number, + success, + fail_reason, + Null::bigint as eth_block, + Null::bigint as priority_op_serialid + FROM executed_transactions + WHERE tx_hash = $1 + ), priority_op AS ( + SELECT + tx_hash, + block_number, + true as success, + Null as fail_reason, + eth_block, + priority_op_serialid + FROM executed_priority_operations + WHERE tx_hash = $1 OR eth_hash = $1 + ), mempool_tx AS ( + SELECT + decode(tx_hash, 'hex'), + Null::bigint as block_number, + Null::boolean as success, + Null as fail_reason, + Null::bigint as eth_block, + Null::bigint as priority_op_serialid + FROM mempool_txs + WHERE tx_hash = $2 + ), + everything AS ( + SELECT * FROM transaction + UNION ALL + SELECT * FROM priority_op + UNION ALL + SELECT * FROM mempool_tx + ) + SELECT + tx_hash as "tx_hash!", + block_number as "block_number?", + success as "success?", + fail_reason as "fail_reason?", + eth_block as "eth_block?", + priority_op_serialid as "priority_op_serialid?" + FROM everything + "#, + hash, + &hash_str + ) + .fetch_optional(transaction.conn()) + .await?; + + let result = if let Some(receipt) = receipt { + let is_block_finalized = if let Some(block_number) = receipt.block_number { + Some( + transaction + .chain() + .block_schema() + .is_block_finalized(BlockNumber(block_number as u32)) + .await?, + ) + } else { + None + }; + Some(StorageTxReceipt::receipt_from_storage_receipt( + receipt, + is_block_finalized, + )) + } else { + None + }; + + transaction.commit().await?; + metrics::histogram!( + "sql.chain.operations_ext.tx_receipt_api_v02", + start.elapsed() + ); + Ok(result) + } + + pub async fn tx_data_api_v02(&mut self, hash: &[u8]) -> QueryResult> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + let hash_str = hex::encode(hash); + let data: Option = sqlx::query_as!( + StorageTxData, + r#" + WITH transaction AS ( + SELECT + tx_hash, + tx as op, + block_number, + created_at, + success, + fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + eth_sign_data + FROM executed_transactions + WHERE tx_hash = $1 + ), priority_op AS ( + SELECT + tx_hash, + operation as op, + block_number, + created_at, + true as success, + Null as fail_reason, + eth_hash, + priority_op_serialid, + Null::jsonb as eth_sign_data + FROM executed_priority_operations + WHERE tx_hash = $1 OR eth_hash = $1 + ), mempool_tx AS ( + SELECT + decode(tx_hash, 'hex'), + tx as op, + Null::bigint as block_number, + created_at, + Null::boolean as success, + Null as fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + eth_sign_data + FROM mempool_txs + WHERE tx_hash = $2 + ), + everything AS ( + SELECT * FROM transaction + UNION ALL + SELECT * FROM priority_op + UNION ALL + SELECT * FROM mempool_tx + ) + SELECT + tx_hash as "tx_hash!", + op as "op!", + block_number as "block_number?", + created_at as "created_at!", + success as "success?", + fail_reason as "fail_reason?", + eth_hash as "eth_hash?", + priority_op_serialid as "priority_op_serialid?", + eth_sign_data as "eth_sign_data?" + FROM everything + "#, + hash, + &hash_str + ) + .fetch_optional(transaction.conn()) + .await?; + + let result = if let Some(data) = data { + let complete_withdrawals_tx_hash = if let Some(tx_type) = data.op.get("type") { + let tx_type = tx_type.as_str().unwrap(); + if tx_type == "Withdraw" || tx_type == "ForcedExit" { + transaction + .chain() + .operations_schema() + .eth_tx_for_withdrawal(&TxHash::from_slice(&data.tx_hash).unwrap()) + .await? + } else { + None + } + } else { + None + }; + + let is_block_finalized = if let Some(block_number) = data.block_number { + Some( + transaction + .chain() + .block_schema() + .is_block_finalized(BlockNumber(block_number as u32)) + .await?, + ) + } else { + None + }; + + Some(StorageTxData::data_from_storage_data( + data, + is_block_finalized, + complete_withdrawals_tx_hash, + )) + } else { + None + }; + + transaction.commit().await?; + metrics::histogram!("sql.chain.operations_ext.tx_data_api_v02", start.elapsed()); + Ok(result) + } + pub async fn get_priority_op_receipt( &mut self, op_id: u32, @@ -269,7 +487,7 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { let start = Instant::now(); // TODO: Maybe move the transformations to api_server (ZKS-114)? let tx: Option = OperationsSchema(self.0) - .get_executed_priority_operation_by_hash(hash) + .get_executed_priority_operation_by_eth_hash(hash) .await?; let result = if let Some(tx) = tx { @@ -719,321 +937,419 @@ impl<'a, 'c> OperationsExtSchema<'a, 'c> { Ok(tx_history) } - /// Loads the range of transaction receipts applied to the given account address - /// starting from the specified transaction location. Transaction location is defined - /// by the (`block_number`, `block index`) pair. This method can be used to get receipts - /// "older" than some location or "newer" than one. - /// - /// The response for "newer" receipts is sorted in ascending order by position and for "older" - /// ones in descending order. - pub async fn get_account_transactions_receipts( + pub async fn get_account_transactions( &mut self, - address: Address, - block_number: u64, - block_index: Option, - direction: SearchDirection, - limit: u64, - ) -> QueryResult> { + query: &PaginationQuery, + ) -> QueryResult>> { let start = Instant::now(); - - let block_number = block_number as i64; - let block_index = block_index.map(|x| x as i32).unwrap_or(-1); - - let receipts: Vec<_> = match direction { - SearchDirection::Newer => { - sqlx::query_as!( - AccountTxReceiptResponse, - r#" - WITH block_details AS ( - WITH aggr_comm AS ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - commit_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - , aggr_exec as ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - execute_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - SELECT - blocks.number AS details_block_number, - committed.final_hash AS commit_tx_hash, - verified.final_hash AS verify_tx_hash - FROM blocks - INNER JOIN aggr_comm committed ON blocks.number = committed.block_number - LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number - ) - SELECT - block_number, - block_index as "block_index?", - tx_hash, - success, - fail_reason as "fail_reason?", - details.commit_tx_hash as "commit_tx_hash?", - details.verify_tx_hash as "verify_tx_hash?" - FROM executed_transactions - LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number - WHERE ( - (primary_account_address = $1 OR from_account = $1 OR to_account = $1) - AND ( - block_number = $2 AND ( - COALESCE(block_index, -1) >= $3 - ) OR ( - block_number > $2 + let mut transaction = self.0.start_transaction().await?; + let tx_hash = match query.from.tx_hash.inner { + Either::Left(tx_hash) => tx_hash, + Either::Right(_) => { + if let Some(tx_hash) = transaction + .chain() + .operations_ext_schema() + .get_account_last_tx_hash(query.from.address) + .await? + { + tx_hash + } else { + return Ok(Some(Vec::new())); + } + } + }; + let created_at_and_block = transaction + .chain() + .operations_ext_schema() + .get_tx_created_at_and_block_number(tx_hash) + .await?; + let txs = if let Some((time_from, _)) = created_at_and_block { + let raw_txs: Vec = match query.direction { + PaginationDirection::Newer => { + sqlx::query_as!( + TransactionItem, + r#" + WITH transactions AS ( + SELECT + tx_hash, + tx as op, + block_number, + created_at, + success, + fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + block_index + FROM executed_transactions + WHERE (from_account = $1 OR to_account = $1 OR primary_account_address = $1) + AND created_at >= $2 + ), priority_ops AS ( + SELECT + tx_hash, + operation as op, + block_number, + created_at, + true as success, + Null as fail_reason, + eth_hash, + priority_op_serialid, + block_index + FROM executed_priority_operations + WHERE (from_account = $1 OR to_account = $1) AND created_at >= $2 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops ) - ) + SELECT + tx_hash as "tx_hash!", + block_number as "block_number!", + op as "op!", + created_at as "created_at!", + success as "success!", + fail_reason as "fail_reason?", + eth_hash as "eth_hash?", + priority_op_serialid as "priority_op_serialid?" + FROM everything + ORDER BY created_at ASC, block_index ASC + LIMIT $3 + "#, + query.from.address.as_bytes(), + time_from, + i64::from(query.limit), ) - ORDER BY block_number ASC, COALESCE(block_index, -1) ASC - LIMIT $4 - "#, - address.as_bytes(), - block_number, - block_index, - limit as i64, - ).fetch_all(self.0.conn()) - .await? - }, - - SearchDirection::Older => { - sqlx::query_as!( - AccountTxReceiptResponse, - r#" - WITH block_details AS ( - WITH aggr_comm AS ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - commit_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - , aggr_exec as ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - execute_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - SELECT - blocks.number AS details_block_number, - committed.final_hash AS commit_tx_hash, - verified.final_hash AS verify_tx_hash - FROM blocks - INNER JOIN aggr_comm committed ON blocks.number = committed.block_number - LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number - ) - SELECT - block_number, - block_index as "block_index?", - tx_hash, - success, - fail_reason as "fail_reason?", - details.commit_tx_hash as "commit_tx_hash?", - details.verify_tx_hash as "verify_tx_hash?" - FROM executed_transactions - LEFT JOIN block_details details ON details.details_block_number = executed_transactions.block_number - WHERE ( - (primary_account_address = $1 OR from_account = $1 OR to_account = $1) - AND ( - block_number = $2 AND ( - COALESCE(block_index, -1) <= $3 - ) OR ( - block_number < $2 + .fetch_all(transaction.conn()) + .await? + } + PaginationDirection::Older => { + sqlx::query_as!( + TransactionItem, + r#" + WITH transactions AS ( + SELECT + tx_hash, + tx as op, + block_number, + created_at, + success, + fail_reason, + Null::bytea as eth_hash, + Null::bigint as priority_op_serialid, + block_index + FROM executed_transactions + WHERE (from_account = $1 OR to_account = $1 OR primary_account_address = $1) + AND created_at <= $2 + ), priority_ops AS ( + SELECT + tx_hash, + operation as op, + block_number, + created_at, + true as success, + Null as fail_reason, + eth_hash, + priority_op_serialid, + block_index + FROM executed_priority_operations + WHERE (from_account = $1 OR to_account = $1) AND created_at <= $2 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops ) - ) + SELECT + tx_hash as "tx_hash!", + block_number as "block_number!", + op as "op!", + created_at as "created_at!", + success as "success!", + fail_reason as "fail_reason?", + eth_hash as "eth_hash?", + priority_op_serialid as "priority_op_serialid?" + FROM everything + ORDER BY created_at DESC, block_index DESC + LIMIT $3 + "#, + query.from.address.as_bytes(), + time_from, + i64::from(query.limit), ) - ORDER BY block_number DESC, COALESCE(block_index, -1) DESC - LIMIT $4 - "#, - address.as_bytes(), - block_number, - block_index, - limit as i64, - ).fetch_all(self.0.conn()) - .await? - } + .fetch_all(transaction.conn()) + .await? + } + }; + let last_finalized = transaction + .chain() + .block_schema() + .get_last_verified_confirmed_block() + .await?; + let txs: Vec = raw_txs + .into_iter() + .map(|tx| { + if tx.block_number as u32 <= *last_finalized { + TransactionItem::transaction_from_item(tx, true) + } else { + TransactionItem::transaction_from_item(tx, false) + } + }) + .collect(); + Some(txs) + } else { + None }; + transaction.commit().await?; metrics::histogram!( - "sql.chain.operations_ext.get_account_transactions_receipts", + "sql.chain.operations_ext.get_account_transactions", start.elapsed() ); - Ok(receipts) + Ok(txs) } - /// Loads the range of priority operation receipts applied to the given account address - /// starting from the specified operation location. Transaction location is defined - /// by the (`block_number`, `block index`) pair. This method can be used to get receipts - /// "older" than some location or "newer" than one. - /// - /// The response for "newer" receipts is sorted in ascending order by position and for "older" - /// ones in descending order. - pub async fn get_account_operations_receipts( + pub async fn get_account_last_tx_hash( &mut self, address: Address, - block_number: u64, - block_index: u32, - direction: SearchDirection, - limit: u64, - ) -> QueryResult> { + ) -> QueryResult> { let start = Instant::now(); - - let block_number = block_number as i64; - let block_index = block_index as i32; - - let receipts: Vec<_> = match direction { - SearchDirection::Newer => { - sqlx::query_as!( - AccountOpReceiptResponse, - r#" - WITH block_details AS ( - WITH aggr_comm AS ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - commit_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - , aggr_exec as ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - execute_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - SELECT - blocks.number AS details_block_number, - committed.final_hash AS commit_tx_hash, - verified.final_hash AS verify_tx_hash - FROM blocks - INNER JOIN aggr_comm committed ON blocks.number = committed.block_number - LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number - ) - SELECT - block_number, - block_index, - eth_hash, - details.commit_tx_hash as "commit_tx_hash?", - details.verify_tx_hash as "verify_tx_hash?" + let record = sqlx::query!( + r#" + WITH transactions AS ( + SELECT tx_hash, created_at, block_index + FROM executed_transactions + WHERE from_account = $1 OR to_account = $1 OR primary_account_address = $1 + ), priority_ops AS ( + SELECT tx_hash, created_at, block_index FROM executed_priority_operations - LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number - WHERE ( - (from_account = $1 OR to_account = $1) - AND ( - block_number = $2 AND ( - block_index >= $3 - ) OR ( - block_number > $2 - ) - ) - ) - ORDER BY block_number ASC, block_index ASC - LIMIT $4 - "#, - address.as_bytes(), - block_number, - block_index, - limit as i64, - ).fetch_all(self.0.conn()) - .await? - }, - - SearchDirection::Older => { - sqlx::query_as!( - AccountOpReceiptResponse, - r#" - WITH block_details AS ( - WITH aggr_comm AS ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - commit_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN commit_aggregated_blocks_binding ON aggregate_operations.id = commit_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - , aggr_exec as ( - SELECT - aggregate_operations.created_at, - eth_operations.final_hash, - execute_aggregated_blocks_binding.block_number - FROM aggregate_operations - INNER JOIN execute_aggregated_blocks_binding ON aggregate_operations.id = execute_aggregated_blocks_binding.op_id - INNER JOIN eth_aggregated_ops_binding ON aggregate_operations.id = eth_aggregated_ops_binding.op_id - INNER JOIN eth_operations ON eth_operations.id = eth_aggregated_ops_binding.eth_op_id - WHERE aggregate_operations.confirmed = true - ) - SELECT - blocks.number AS details_block_number, - committed.final_hash AS commit_tx_hash, - verified.final_hash AS verify_tx_hash - FROM blocks - INNER JOIN aggr_comm committed ON blocks.number = committed.block_number - LEFT JOIN aggr_exec verified ON blocks.number = verified.block_number - ) - SELECT - block_number, - block_index, - eth_hash, - details.commit_tx_hash as "commit_tx_hash?", - details.verify_tx_hash as "verify_tx_hash?" + WHERE from_account = $1 OR to_account = $1 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops + ) + SELECT + tx_hash as "tx_hash!" + FROM everything + ORDER BY created_at DESC, block_index DESC + LIMIT 1 + "#, + address.as_bytes(), + ) + .fetch_optional(self.0.conn()) + .await?; + + metrics::histogram!( + "sql.chain.operations_ext.get_account_last_tx_hash", + start.elapsed() + ); + Ok(record.map(|record| TxHash::from_slice(&record.tx_hash).unwrap())) + } + + pub async fn get_block_last_tx_hash( + &mut self, + block_number: BlockNumber, + ) -> QueryResult> { + let start = Instant::now(); + let record = sqlx::query!( + r#" + WITH transactions AS ( + SELECT tx_hash, created_at, block_index + FROM executed_transactions + WHERE block_number = $1 + ), priority_ops AS ( + SELECT tx_hash, created_at, block_index FROM executed_priority_operations - LEFT JOIN block_details details ON details.details_block_number = executed_priority_operations.block_number - WHERE ( - (from_account = $1 OR to_account = $1) - AND ( - block_number = $2 AND ( - block_index <= $3 - ) OR ( - block_number < $2 - ) - ) - ) - ORDER BY block_number DESC, block_index DESC - LIMIT $4 - "#, - address.as_bytes(), - block_number, - block_index, - limit as i64, - ).fetch_all(self.0.conn()) - .await? - } + WHERE block_number = $1 + ), everything AS ( + SELECT * FROM transactions + UNION ALL + SELECT * FROM priority_ops + ) + SELECT + tx_hash as "tx_hash!" + FROM everything + ORDER BY created_at DESC, block_index DESC + LIMIT 1 + "#, + i64::from(*block_number) + ) + .fetch_optional(self.0.conn()) + .await?; + + metrics::histogram!( + "sql.chain.operations_ext.get_block_last_tx_hash", + start.elapsed() + ); + Ok(record.map(|record| TxHash::from_slice(&record.tx_hash).unwrap())) + } + + pub async fn get_account_transactions_count(&mut self, address: Address) -> QueryResult { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + let last_committed = transaction + .chain() + .block_schema() + .get_last_committed_confirmed_block() + .await?; + let tx_count = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" FROM executed_transactions + WHERE block_number <= $1 AND (from_account = $2 OR to_account = $2 OR primary_account_address = $2) + "#, + i64::from(*last_committed), + address.as_bytes() + ) + .fetch_one(transaction.conn()) + .await? + .count; + + let priority_op_count = sqlx::query!( + r#" + SELECT COUNT(*) as "count!" FROM executed_priority_operations + WHERE block_number <= $1 AND (from_account = $2 OR to_account = $2) + "#, + i64::from(*last_committed), + address.as_bytes() + ) + .fetch_one(transaction.conn()) + .await? + .count; + transaction.commit().await?; + + metrics::histogram!( + "sql.chain.operations_ext.get_account_transactions_count", + start.elapsed() + ); + Ok((tx_count + priority_op_count) as u32) + } + + /// Returns `created_at` and `block_number` fields for transaction with given hash. + pub async fn get_tx_created_at_and_block_number( + &mut self, + tx_hash: TxHash, + ) -> QueryResult, BlockNumber)>> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + + let record = sqlx::query!( + "SELECT created_at, block_number FROM executed_transactions + WHERE tx_hash = $1", + tx_hash.as_ref() + ) + .fetch_optional(transaction.conn()) + .await?; + + let result = if let Some(record) = record { + Some((record.created_at, BlockNumber(record.block_number as u32))) + } else { + let record = sqlx::query!( + "SELECT created_at, block_number FROM executed_priority_operations + WHERE tx_hash = $1", + tx_hash.as_ref() + ) + .fetch_optional(transaction.conn()) + .await?; + + record.map(|record| (record.created_at, BlockNumber(record.block_number as u32))) }; + transaction.commit().await?; metrics::histogram!( - "sql.chain.operations_ext.get_account_operations_receipts", + "sql.chain.block.get_tx_created_at_and_block_number", start.elapsed() ); - Ok(receipts) + Ok(result) + } + + pub async fn get_in_block_batch_info( + &mut self, + batch_hash: TxHash, + ) -> QueryResult> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + + let batch_data: Vec = sqlx::query_as!( + InBlockBatchTx, + r#" + SELECT tx_hash, created_at, success, block_number + FROM executed_transactions + INNER JOIN txs_batches_hashes + ON txs_batches_hashes.batch_id = COALESCE(executed_transactions.batch_id, 0) + WHERE batch_hash = $1 + ORDER BY created_at ASC, block_index ASC + "#, + batch_hash.as_ref() + ) + .fetch_all(transaction.conn()) + .await?; + let result = if !batch_data.is_empty() { + let created_at = batch_data[0].created_at; + let transaction_hashes: Vec = batch_data + .iter() + .map(|tx| TxHashSerializeWrapper(TxHash::from_slice(&tx.tx_hash).unwrap())) + .collect(); + let block_number = BlockNumber(batch_data[0].block_number as u32); + let batch_status = if batch_data[0].success { + if let Some(op) = transaction + .chain() + .operations_schema() + .get_stored_aggregated_operation( + block_number, + AggregatedActionType::ExecuteBlocks, + ) + .await + { + BatchStatus { + updated_at: op.created_at, + last_state: TxInBlockStatus::Finalized, + } + } else { + BatchStatus { + updated_at: created_at, + last_state: TxInBlockStatus::Committed, + } + } + } else { + BatchStatus { + updated_at: created_at, + last_state: TxInBlockStatus::Rejected, + } + }; + Some(ApiTxBatch { + batch_hash, + transaction_hashes, + created_at, + batch_status, + }) + } else { + None + }; + transaction.commit().await?; + + metrics::histogram!("sql.chain.block.get_in_block_batch_info", start.elapsed()); + Ok(result) + } + + pub async fn get_batch_info(&mut self, batch_hash: TxHash) -> QueryResult> { + let start = Instant::now(); + let mut transaction = self.0.start_transaction().await?; + + let result = if let Some(batch_info) = transaction + .chain() + .operations_ext_schema() + .get_in_block_batch_info(batch_hash) + .await? + { + Some(batch_info) + } else { + transaction + .chain() + .mempool_schema() + .get_queued_batch_info(batch_hash) + .await? + }; + transaction.commit().await?; + + metrics::histogram!("sql.chain.block.get_batch_info", start.elapsed()); + Ok(result) } } diff --git a/core/lib/storage/src/chain/operations_ext/records.rs b/core/lib/storage/src/chain/operations_ext/records.rs index 9007288ad2..5619ec0a37 100644 --- a/core/lib/storage/src/chain/operations_ext/records.rs +++ b/core/lib/storage/src/chain/operations_ext/records.rs @@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize}; use serde_json::value::Value; use sqlx::FromRow; // Workspace imports - // Local imports use crate::prover::records::ProverRun; @@ -141,3 +140,33 @@ pub struct AccountOpReceiptResponse { /// given priority operation. pub verify_tx_hash: Option>, } + +#[derive(Debug, FromRow, PartialEq)] +pub struct InBlockBatchTx { + pub tx_hash: Vec, + pub created_at: DateTime, + pub success: bool, + pub block_number: i64, +} + +#[derive(Debug, FromRow, PartialEq)] +pub struct StorageTxReceipt { + pub tx_hash: Vec, + pub block_number: Option, + pub success: Option, + pub fail_reason: Option, + pub eth_block: Option, + pub priority_op_serialid: Option, +} + +pub struct StorageTxData { + pub tx_hash: Vec, + pub block_number: Option, + pub op: Value, + pub success: Option, + pub fail_reason: Option, + pub created_at: DateTime, + pub eth_hash: Option>, + pub priority_op_serialid: Option, + pub eth_sign_data: Option, +} diff --git a/core/lib/storage/src/event/mod.rs b/core/lib/storage/src/event/mod.rs index d3518580c5..8923c1c021 100644 --- a/core/lib/storage/src/event/mod.rs +++ b/core/lib/storage/src/event/mod.rs @@ -121,7 +121,7 @@ impl<'a, 'c> EventSchema<'a, 'c> { let block_details = transaction .chain() .block_schema() - .load_block_range(block_number, 1) + .load_block_range_desc(block_number, 1) .await?; // If there're no block details for the given block number, // ignore the event. Since the `eth_sender` is currently diff --git a/core/lib/storage/src/tests/chain/accounts.rs b/core/lib/storage/src/tests/chain/accounts.rs index 6f1913ee86..b2e666ef4b 100644 --- a/core/lib/storage/src/tests/chain/accounts.rs +++ b/core/lib/storage/src/tests/chain/accounts.rs @@ -47,8 +47,18 @@ async fn stored_accounts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { let block_size = 100; - let accounts = AccountMap::default(); + let (last_finalized, _) = AccountSchema(&mut storage) + .account_and_last_block(AccountId(1)) + .await?; + let last_committed = AccountSchema(&mut storage) + .last_committed_block_with_update_for_acc(AccountId(1)) + .await?; + assert_eq!(last_finalized, 0); + assert_eq!(*last_committed, 0); + // Create several accounts. + let accounts = AccountMap::default(); + let (mut accounts_block, mut updates_block) = apply_random_updates(accounts, &mut rng); let mut nft_updates = vec![]; @@ -81,6 +91,16 @@ async fn stored_accounts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { // Get the accounts by their addresses. for (account_id, account) in accounts_block.iter() { let mut account = account.clone(); + + let (last_finalized, _) = AccountSchema(&mut storage) + .account_and_last_block(*account_id) + .await?; + let last_committed = AccountSchema(&mut storage) + .last_committed_block_with_update_for_acc(*account_id) + .await?; + assert_eq!(last_finalized, 0); + assert_eq!(*last_committed, 1); + let account_state = AccountSchema(&mut storage) .account_state_by_address(account.address) .await?; @@ -108,7 +128,8 @@ async fn stored_accounts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { assert_eq!( AccountSchema(&mut storage) .last_committed_state_for_account(*account_id) - .await?, + .await? + .1, Some(got_account) ); @@ -141,6 +162,15 @@ async fn stored_accounts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { // After that all the accounts should have a verified state. for (account_id, account) in accounts_block { + let (last_finalized, _) = AccountSchema(&mut storage) + .account_and_last_block(account_id) + .await?; + let last_committed = AccountSchema(&mut storage) + .last_committed_block_with_update_for_acc(account_id) + .await?; + assert_eq!(last_finalized, 1); + assert_eq!(*last_committed, 1); + let account_state = AccountSchema(&mut storage) .account_state_by_id(account_id) .await?; diff --git a/core/lib/storage/src/tests/chain/block.rs b/core/lib/storage/src/tests/chain/block.rs index db37f07cd9..b2af7ade6c 100644 --- a/core/lib/storage/src/tests/chain/block.rs +++ b/core/lib/storage/src/tests/chain/block.rs @@ -1,11 +1,19 @@ // External imports // Workspace imports +use zksync_api_types::v02::pagination::{ + ApiEither, BlockAndTxHash, PaginationDirection, PaginationQuery, +}; use zksync_crypto::{convert::FeConvert, rand::XorShiftRng}; use zksync_types::{ - aggregated_operations::AggregatedActionType, helpers::apply_updates, tx::ChangePubKeyType, + aggregated_operations::AggregatedActionType, + helpers::apply_updates, + tx::{ChangePubKeyType, TxHash}, AccountId, AccountMap, AccountUpdate, AccountUpdates, BlockNumber, TokenId, H256, }; // Local imports +use super::operations_ext::{ + commit_block, commit_schema_data, setup::TransactionsHistoryTestSetup, verify_block, +}; use crate::{ chain::{ block::{records::StorageBlockDetails, BlockSchema}, @@ -328,12 +336,12 @@ async fn test_find_block_by_height_or_hash(mut storage: StorageProcessor<'_>) -> Ok(()) } -/// Checks that `load_block_range` method loads the range of blocks correctly. +/// Checks that `load_block_page` method loads the range of blocks correctly. #[db_test] -async fn test_block_range(mut storage: StorageProcessor<'_>) -> QueryResult<()> { - /// Loads the block range and checks that every block in the response is +async fn test_block_page(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + /// Loads the block range in desc order and checks that every block in the response is /// equal to the one obtained from `find_block_by_height_or_hash` method. - async fn check_block_range( + async fn check_block_range_desc( storage: &mut StorageProcessor<'_>, max_block: BlockNumber, limit: u32, @@ -344,8 +352,16 @@ async fn test_block_range(mut storage: StorageProcessor<'_>) -> QueryResult<()> BlockNumber(1) }; let block_range = BlockSchema(storage) - .load_block_range(max_block, limit) + .load_block_range_desc(max_block, limit) + .await?; + let block_page = BlockSchema(storage) + .load_block_page(&PaginationQuery { + from: max_block, + limit, + direction: PaginationDirection::Older, + }) .await?; + assert_eq!(block_range, block_page); // Go in the reversed order, since the blocks themselves are ordered backwards. for (idx, block_number) in (*start_block..=*max_block).rev().enumerate() { let expected = BlockSchema(storage) @@ -364,6 +380,47 @@ async fn test_block_range(mut storage: StorageProcessor<'_>) -> QueryResult<()> Ok(()) } + /// Loads the block range in asc order and checks that every block in the response is + /// equal to the one obtained from `find_block_by_height_or_hash` method. + async fn check_block_range_asc( + storage: &mut StorageProcessor<'_>, + min_block: BlockNumber, + limit: u32, + max_block: BlockNumber, + ) -> QueryResult<()> { + let last_block = if *max_block >= *min_block + limit { + min_block + limit - 1 + } else { + max_block + }; + let block_range = BlockSchema(storage) + .load_block_range_asc(min_block, limit) + .await?; + let block_page = BlockSchema(storage) + .load_block_page(&PaginationQuery { + from: min_block, + limit, + direction: PaginationDirection::Newer, + }) + .await?; + assert_eq!(block_range, block_page); + for (idx, block_number) in (*min_block..=*last_block).enumerate() { + let expected = BlockSchema(storage) + .find_block_by_height_or_hash(block_number.to_string()) + .await + .unwrap_or_else(|| { + panic!( + "Can't load the existing block with the index {}", + block_number + ) + }); + let got = &block_range[idx]; + assert_eq!(got, &expected); + } + + Ok(()) + } + // Below lies the initialization of the data for the test. let mut rng = create_rng(); @@ -469,10 +526,26 @@ async fn test_block_range(mut storage: StorageProcessor<'_>) -> QueryResult<()> (n_commited_block_number, 1), (n_commited_block_number, 0), (n_commited_block_number, 100), + (n_verified_block_number + 1, n_verified), ]; for (max_block, limit) in test_vector { - check_block_range(&mut storage, max_block, limit).await?; + check_block_range_desc(&mut storage, max_block, limit).await?; + } + + let test_vector = vec![ + (BlockNumber(1), n_committed), + (BlockNumber(1), n_verified + 1), + (BlockNumber(2), n_verified + 1), + (n_verified_block_number + 1, n_committed - n_verified), + (BlockNumber(2), 0), + (BlockNumber(2), 1), + (BlockNumber(2), 3), + (BlockNumber(2), 10), + ]; + + for (max_block, limit) in test_vector { + check_block_range_asc(&mut storage, max_block, limit, n_commited_block_number).await?; } Ok(()) @@ -592,7 +665,7 @@ async fn unconfirmed_transaction(mut storage: StorageProcessor<'_>) -> QueryResu .is_none()); let block_range = BlockSchema(&mut storage) - .load_block_range(BlockNumber(n_committed), 100) + .load_block_range_desc(BlockNumber(n_committed), 100) .await?; assert_eq!(block_range.len(), n_commited_confirmed as usize); @@ -930,6 +1003,39 @@ async fn test_operations_counter(mut storage: StorageProcessor<'_>) -> QueryResu Ok(()) } +/// Checks that `get_block_status_and_last_updated` method works correctly. +#[db_test] +async fn test_is_block_finalized(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let block_number = BlockNumber(1); + + let result = storage + .chain() + .block_schema() + .is_block_finalized(BlockNumber(1)) + .await?; + assert!(!result); + + commit_block(&mut storage, block_number).await?; + + let result = storage + .chain() + .block_schema() + .is_block_finalized(BlockNumber(1)) + .await?; + assert!(!result); + + verify_block(&mut storage, block_number).await?; + + let result = storage + .chain() + .block_schema() + .is_block_finalized(BlockNumber(1)) + .await?; + assert!(result); + + Ok(()) +} + /// Check that blocks are removed correctly. #[db_test] async fn test_remove_blocks(mut storage: StorageProcessor<'_>) -> QueryResult<()> { @@ -993,6 +1099,135 @@ async fn test_remove_pending_block(mut storage: StorageProcessor<'_>) -> QueryRe Ok(()) } +/// Checks that `get_block_transactions_page` method works correctly. +#[db_test] +async fn test_get_block_transactions_page(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + setup.add_block(1); + setup.add_block(2); + commit_schema_data(&mut storage, &setup).await?; + + let tx_hashes = vec![ + setup.get_tx_hash(0, 0), + setup.get_tx_hash(0, 1), + setup.get_tx_hash(0, 2), + setup.get_tx_hash(0, 3), + setup.get_tx_hash(0, 4), + setup.get_tx_hash(0, 5), + setup.get_tx_hash(0, 6), + ]; + + for (tx_hash, limit, direction, expected, test_name) in vec![ + ( + tx_hashes[0], + 5, + PaginationDirection::Newer, + tx_hashes[0..5].to_vec(), + "First 5 txs", + ), + ( + tx_hashes[0], + 1, + PaginationDirection::Newer, + tx_hashes[0..1].to_vec(), + "1 tx (newer)", + ), + ( + tx_hashes[1], + 5, + PaginationDirection::Newer, + tx_hashes[1..6].to_vec(), + "Middle 5 txs (newer)", + ), + ( + tx_hashes[5], + 100, + PaginationDirection::Newer, + tx_hashes[5..].to_vec(), + "Big limit (newer)", + ), + ( + tx_hashes[6], + 5, + PaginationDirection::Older, + tx_hashes[2..=6].iter().rev().cloned().collect(), + "Last 5 txs", + ), + ( + tx_hashes[6], + 1, + PaginationDirection::Older, + tx_hashes[6..=6].iter().rev().cloned().collect(), + "1 tx (older)", + ), + ( + tx_hashes[5], + 5, + PaginationDirection::Older, + tx_hashes[1..=5].iter().rev().cloned().collect(), + "Middle 5 txs (older)", + ), + ( + tx_hashes[5], + 100, + PaginationDirection::Older, + tx_hashes[..=5].iter().rev().cloned().collect(), + "Big limit (older)", + ), + ] { + let actual: Vec = storage + .chain() + .block_schema() + .get_block_transactions_page(&PaginationQuery { + from: BlockAndTxHash { + block_number: BlockNumber(1), + tx_hash: ApiEither::from(tx_hash), + }, + limit, + direction, + }) + .await? + .unwrap() + .into_iter() + .map(|tx| tx.tx_hash) + .collect(); + assert_eq!(actual, expected, "\"{}\", failed", test_name); + } + + // Check that it returns None for unknown tx_hash + setup.add_block(3); + let result = storage + .chain() + .block_schema() + .get_block_transactions_page(&PaginationQuery { + from: BlockAndTxHash { + block_number: BlockNumber(3), + tx_hash: ApiEither::from(setup.get_tx_hash(2, 0)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await?; + assert!(result.is_none()); + + // Check that it returns None if block of `from` tx differs from `block_number` + let result = storage + .chain() + .block_schema() + .get_block_transactions_page(&PaginationQuery { + from: BlockAndTxHash { + block_number: BlockNumber(2), + tx_hash: ApiEither::from(setup.get_tx_hash(0, 0)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await?; + assert!(result.is_none()); + + Ok(()) +} + /// Check that account tree cache is removed correctly. #[db_test] async fn test_remove_account_tree_cache(mut storage: StorageProcessor<'_>) -> QueryResult<()> { diff --git a/core/lib/storage/src/tests/chain/mempool.rs b/core/lib/storage/src/tests/chain/mempool.rs index 5d35aaf228..40addef1ec 100644 --- a/core/lib/storage/src/tests/chain/mempool.rs +++ b/core/lib/storage/src/tests/chain/mempool.rs @@ -3,6 +3,7 @@ use zksync_crypto::rand::{Rng, SeedableRng, XorShiftRng}; // Workspace imports use zksync_types::{ mempool::SignedTxVariant, + tx::TxHash, tx::{ChangePubKey, Transfer, Withdraw}, AccountId, Address, BlockNumber, Nonce, SignedZkSyncTx, TokenId, ZkSyncTx, }; @@ -13,6 +14,7 @@ use crate::{ chain::{ mempool::MempoolSchema, operations::{records::NewExecutedTransaction, OperationsSchema}, + operations_ext::OperationsExtSchema, }, QueryResult, StorageProcessor, }; @@ -348,6 +350,32 @@ async fn contains_and_get_tx(mut storage: StorageProcessor<'_>) -> QueryResult<( Ok(()) } +/// Checks that batch is got from mempool correctly +#[db_test] +async fn test_get_batch_info_from_mempool(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let txs = gen_transfers(5); + MempoolSchema(&mut storage) + .insert_batch(&txs, Vec::new()) + .await?; + + let tx_hashes: Vec = txs.into_iter().map(|tx| tx.hash()).collect(); + let batch_hash = TxHash::batch_hash(&tx_hashes); + + let batch = OperationsExtSchema(&mut storage) + .get_batch_info(batch_hash) + .await? + .unwrap(); + + let actual_tx_hashes: Vec = batch + .transaction_hashes + .into_iter() + .map(|tx_hash| tx_hash.0) + .collect(); + assert_eq!(actual_tx_hashes, tx_hashes); + + Ok(()) +} + /// Checks that returning executed txs to mempool works correctly. #[db_test] async fn test_return_executed_txs_to_mempool(mut storage: StorageProcessor<'_>) -> QueryResult<()> { diff --git a/core/lib/storage/src/tests/chain/operations.rs b/core/lib/storage/src/tests/chain/operations.rs index cb97b1e7bd..4e6433d87d 100644 --- a/core/lib/storage/src/tests/chain/operations.rs +++ b/core/lib/storage/src/tests/chain/operations.rs @@ -104,6 +104,8 @@ async fn executed_priority_operations(mut storage: StorageProcessor<'_>) -> Quer eth_hash: vec![0xDE, 0xAD, 0xBE, 0xEF], eth_block: 10, created_at: chrono::Utc::now(), + tx_hash: Default::default(), + eth_block_index: Some(1), }; OperationsSchema(&mut storage) .store_executed_priority_op(executed_tx.clone()) @@ -162,6 +164,8 @@ async fn duplicated_operations(mut storage: StorageProcessor<'_>) -> QueryResult eth_hash: vec![0xDE, 0xAD, 0xBE, 0xEF], eth_block: 10, created_at: chrono::Utc::now(), + tx_hash: Default::default(), + eth_block_index: Some(1), }; // Save the same operations twice. @@ -371,6 +375,50 @@ async fn remove_rejected_transactions(mut storage: StorageProcessor<'_>) -> Quer Ok(()) } +/// Checks that getting executed priority operation by `eth_hash` is working correctly. +#[db_test] +async fn priority_ops_hashes(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let executed_priority_op = NewExecutedPriorityOperation { + block_number: 1, + block_index: 1, + operation: Default::default(), + from_account: Default::default(), + to_account: Default::default(), + priority_op_serialid: 1, + deadline_block: 100, + eth_hash: vec![0xAA, 0xAA, 0xAA, 0xAA], + eth_block: 10, + created_at: chrono::Utc::now(), + tx_hash: vec![0xBB, 0xBB, 0xBB, 0xBB], + eth_block_index: Some(1), + }; + // Store executed priority op and try to get it by `eth_hash`. + storage + .chain() + .operations_schema() + .store_executed_priority_op(executed_priority_op.clone()) + .await?; + let op_by_eth_hash = storage + .chain() + .operations_schema() + .get_executed_priority_operation_by_eth_hash(&executed_priority_op.eth_hash) + .await?; + assert_eq!( + op_by_eth_hash.unwrap().priority_op_serialid, + executed_priority_op.priority_op_serialid + ); + + // Checks that it doesn't find unexisting operation + let op = storage + .chain() + .operations_schema() + .get_executed_priority_operation_by_eth_hash(&[0xDE, 0xAD, 0xBE, 0xEF]) + .await?; + assert!(op.is_none()); + + Ok(()) +} + /// Checks if executed_priority_operations are removed correctly. #[db_test] async fn test_remove_executed_priority_operations( @@ -389,6 +437,8 @@ async fn test_remove_executed_priority_operations( eth_hash: vec![0xDE, 0xAD, 0xBE, 0xEF], eth_block: 10, created_at: chrono::Utc::now(), + eth_block_index: Some(1), + tx_hash: Default::default(), }; OperationsSchema(&mut storage) .store_executed_priority_op(executed_priority_op) diff --git a/core/lib/storage/src/tests/chain/operations_ext/mod.rs b/core/lib/storage/src/tests/chain/operations_ext/mod.rs index d779ac486b..513a3e0910 100644 --- a/core/lib/storage/src/tests/chain/operations_ext/mod.rs +++ b/core/lib/storage/src/tests/chain/operations_ext/mod.rs @@ -2,16 +2,21 @@ use std::collections::HashMap; // External imports // Workspace imports -use zksync_types::aggregated_operations::AggregatedActionType; +use zksync_api_types::v02::{ + pagination::{AccountTxsRequest, ApiEither, PaginationDirection, PaginationQuery}, + transaction::{Receipt, TxInBlockStatus}, +}; +use zksync_types::{ + aggregated_operations::{AggregatedActionType, AggregatedOperation}, + tx::TxHash, + BlockNumber, ExecutedOperations, +}; // Local imports use self::setup::TransactionsHistoryTestSetup; use crate::{ chain::block::BlockSchema, chain::operations::OperationsSchema, - chain::operations_ext::{ - records::{AccountOpReceiptResponse, AccountTxReceiptResponse}, - SearchDirection, - }, + chain::operations_ext::SearchDirection, test_data::{ dummy_ethereum_tx_hash, gen_sample_block, gen_unique_aggregated_operation, BLOCK_SIZE_CHUNKS, @@ -20,13 +25,11 @@ use crate::{ tokens::StoreTokenError, QueryResult, StorageProcessor, }; -use zksync_types::aggregated_operations::AggregatedOperation; -use zksync_types::BlockNumber; -mod setup; +pub mod setup; /// Commits the data from the test setup to the database. -async fn commit_schema_data( +pub async fn commit_schema_data( storage: &mut StorageProcessor<'_>, setup: &TransactionsHistoryTestSetup, ) -> QueryResult<()> { @@ -73,36 +76,34 @@ async fn confirm_eth_op( Ok(()) } -// Make first block committed and verified. -async fn update_blocks_status(mut storage: &mut StorageProcessor<'_>) -> QueryResult<()> { +pub async fn commit_block( + mut storage: &mut StorageProcessor<'_>, + block_number: BlockNumber, +) -> QueryResult<()> { // Required since we use `EthereumSchema` in this test. storage.ethereum_schema().initialize_eth_data().await?; - // Make first block committed. BlockSchema(&mut storage) .save_block(gen_sample_block( - BlockNumber(1), + block_number, BLOCK_SIZE_CHUNKS, Default::default(), )) .await?; OperationsSchema(&mut storage) .store_aggregated_action(gen_unique_aggregated_operation( - BlockNumber(1), + block_number, AggregatedActionType::CommitBlocks, BLOCK_SIZE_CHUNKS, )) .await?; let (id, aggregated_op) = OperationsSchema(&mut storage) - .get_aggregated_op_that_affects_block( - AggregatedActionType::CommitBlocks, - BlockNumber(1_u32), - ) + .get_aggregated_op_that_affects_block(AggregatedActionType::CommitBlocks, block_number) .await? .unwrap(); storage .chain() .state_schema() - .commit_state_update(BlockNumber(1), &[], 0) + .commit_state_update(block_number, &[], 0) .await?; confirm_eth_op( storage, @@ -111,19 +112,22 @@ async fn update_blocks_status(mut storage: &mut StorageProcessor<'_>) -> QueryRe ) .await?; - // Make first block verified. + Ok(()) +} + +pub async fn verify_block( + mut storage: &mut StorageProcessor<'_>, + block_number: BlockNumber, +) -> QueryResult<()> { OperationsSchema(&mut storage) .store_aggregated_action(gen_unique_aggregated_operation( - BlockNumber(1), + block_number, AggregatedActionType::ExecuteBlocks, BLOCK_SIZE_CHUNKS, )) .await?; let (id, op) = OperationsSchema(&mut storage) - .get_aggregated_op_that_affects_block( - AggregatedActionType::ExecuteBlocks, - BlockNumber(1_u32), - ) + .get_aggregated_op_that_affects_block(AggregatedActionType::ExecuteBlocks, block_number) .await? .unwrap(); confirm_eth_op(storage, (id, op), AggregatedActionType::ExecuteBlocks).await?; @@ -131,36 +135,6 @@ async fn update_blocks_status(mut storage: &mut StorageProcessor<'_>) -> QueryRe Ok(()) } -#[derive(Debug, Copy, Clone, PartialEq)] -struct ReceiptRequest { - block_number: u64, - block_index: Option, - limit: u64, - direction: SearchDirection, -} - -#[derive(Debug, Copy, Clone, PartialEq)] -struct ReceiptLocation { - block_number: i64, - block_index: Option, -} - -impl ReceiptLocation { - fn from_tx(item: AccountTxReceiptResponse) -> Self { - Self { - block_number: item.block_number, - block_index: item.block_index, - } - } - - fn from_op(item: AccountOpReceiptResponse) -> Self { - Self { - block_number: item.block_number, - block_index: Some(item.block_index), - } - } -} - /// Here we take the account transactions using `get_account_transactions` and /// check `get_account_transactions_history` to match obtained results. #[db_test] @@ -337,258 +311,145 @@ async fn get_account_transactions_history_from( Ok(()) } -/// Checks that all the transaction receipts related to account address can be loaded -/// with the `get_account_transactions_receipts` method and the result will be +pub struct ReceiptRequest { + tx_hash: TxHash, + direction: PaginationDirection, + limit: u32, +} + +/// Checks that all the transaction related to account address can be loaded +/// with the `get_account_transactions` method and the result will be /// same as expected. #[db_test] -async fn get_account_transactions_receipts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { +async fn get_account_transactions(mut storage: StorageProcessor<'_>) -> QueryResult<()> { let mut setup = TransactionsHistoryTestSetup::new(); + let from = setup.from_zksync_account.address; + let to = setup.to_zksync_account.address; setup.add_block(1); setup.add_block_with_rejected_op(2); + // Check that it doesn't return not committed txs. + let txs = storage + .chain() + .operations_ext_schema() + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: from, + tx_hash: ApiEither::from(setup.get_tx_hash(0, 0)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await?; + assert!(txs.is_none()); + // execute_operation commit_schema_data(&mut storage, &setup).await?; - let from = setup.from_zksync_account.address; - let to = setup.to_zksync_account.address; + // Make blocks committed + commit_block(&mut storage, BlockNumber(1)).await?; + commit_block(&mut storage, BlockNumber(2)).await?; + let test_data = vec![ ( "Get first five transactions.", ReceiptRequest { - block_number: 0, - block_index: None, - direction: SearchDirection::Newer, + tx_hash: setup.get_tx_hash(0, 0), + direction: PaginationDirection::Newer, limit: 5, }, vec![ - ReceiptLocation { - block_number: 1, - block_index: Some(1), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(2), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(3), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(4), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(5), - }, + setup.get_tx_hash(0, 0), + setup.get_tx_hash(0, 1), + setup.get_tx_hash(0, 2), + setup.get_tx_hash(0, 3), + setup.get_tx_hash(0, 4), ], ), ( "Get a single transaction. (newer)", ReceiptRequest { - block_number: 1, - block_index: Some(2), - direction: SearchDirection::Newer, + tx_hash: setup.get_tx_hash(0, 2), + direction: PaginationDirection::Newer, limit: 1, }, - vec![ReceiptLocation { - block_number: 1, - block_index: Some(2), - }], + vec![setup.get_tx_hash(0, 2)], ), ( - "Get a failed transaction. (newer)", - ReceiptRequest { - block_number: 2, - block_index: None, - direction: SearchDirection::Newer, - limit: 1, - }, - vec![ReceiptLocation { - block_number: 2, - block_index: None, - }], - ), - ( - "Get some transations from the next block.", + "Get five transactions from some index.", ReceiptRequest { - block_number: 1, - block_index: Some(100), - direction: SearchDirection::Newer, + tx_hash: setup.get_tx_hash(0, 4), + direction: PaginationDirection::Newer, limit: 5, }, vec![ - ReceiptLocation { - block_number: 2, - block_index: None, - }, - ReceiptLocation { - block_number: 2, - block_index: Some(1), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(2), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(3), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(4), - }, + setup.get_tx_hash(0, 4), + setup.get_tx_hash(0, 5), + setup.get_tx_hash(0, 6), + setup.get_tx_hash(1, 0), + setup.get_tx_hash(1, 1), ], ), ( - "Get five transactions from some index.", + "Limit is more than number of txs. (Newer)", ReceiptRequest { - block_number: 1, - block_index: Some(3), - direction: SearchDirection::Newer, + tx_hash: setup.get_tx_hash(1, 5), + direction: PaginationDirection::Newer, limit: 5, }, - vec![ - ReceiptLocation { - block_number: 1, - block_index: Some(3), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(4), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(5), - }, - ReceiptLocation { - block_number: 2, - block_index: None, - }, - ReceiptLocation { - block_number: 2, - block_index: Some(1), - }, - ], + vec![setup.get_tx_hash(1, 5), setup.get_tx_hash(1, 6)], ), // Older search direction ( "Get last five transactions.", ReceiptRequest { - block_number: i64::MAX as u64, - block_index: Some(i32::MAX as u32), - direction: SearchDirection::Older, + tx_hash: setup.get_tx_hash(1, 6), + direction: PaginationDirection::Older, limit: 5, }, vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(4), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(3), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(2), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(1), - }, - ReceiptLocation { - block_number: 2, - block_index: None, - }, + setup.get_tx_hash(1, 6), + setup.get_tx_hash(1, 5), + setup.get_tx_hash(1, 4), + setup.get_tx_hash(1, 3), + setup.get_tx_hash(1, 2), ], ), ( - "Get a single transaction (older).", + "Get a single transaction. (older)", ReceiptRequest { - block_number: 1, - block_index: Some(2), - direction: SearchDirection::Older, + tx_hash: setup.get_tx_hash(0, 2), + direction: PaginationDirection::Older, limit: 1, }, - vec![ReceiptLocation { - block_number: 1, - block_index: Some(2), - }], + vec![setup.get_tx_hash(0, 2)], ), ( - "Get a failed transaction. (older)", + "Get some transactions from the previous block.", ReceiptRequest { - block_number: 2, - block_index: None, - direction: SearchDirection::Older, - limit: 1, - }, - vec![ReceiptLocation { - block_number: 2, - block_index: None, - }], - ), - ( - "Get some transations from the previous block.", - ReceiptRequest { - block_number: 2, - block_index: None, - direction: SearchDirection::Older, + tx_hash: setup.get_tx_hash(1, 2), + direction: PaginationDirection::Older, limit: 5, }, vec![ - ReceiptLocation { - block_number: 2, - block_index: None, - }, - ReceiptLocation { - block_number: 1, - block_index: Some(5), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(4), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(3), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(2), - }, + setup.get_tx_hash(1, 2), + setup.get_tx_hash(1, 1), + setup.get_tx_hash(1, 0), + setup.get_tx_hash(0, 6), + setup.get_tx_hash(0, 5), ], ), ( - "Get five transactions up to some index.", + "Limit is more than number of txs. (Older)", ReceiptRequest { - block_number: 2, - block_index: Some(2), - direction: SearchDirection::Older, + tx_hash: setup.get_tx_hash(0, 2), + direction: PaginationDirection::Older, limit: 5, }, vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(2), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(1), - }, - ReceiptLocation { - block_number: 2, - block_index: None, - }, - ReceiptLocation { - block_number: 1, - block_index: Some(5), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(4), - }, + setup.get_tx_hash(0, 2), + setup.get_tx_hash(0, 1), + setup.get_tx_hash(0, 0), ], ), ]; @@ -597,292 +458,488 @@ async fn get_account_transactions_receipts(mut storage: StorageProcessor<'_>) -> let items = storage .chain() .operations_ext_schema() - .get_account_transactions_receipts( - from, - request.block_number, - request.block_index, - request.direction, - request.limit, - ) + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: from, + tx_hash: ApiEither::from(request.tx_hash), + }, + limit: request.limit, + direction: request.direction, + }) .await?; - - let actual_resp = items - .into_iter() - .map(ReceiptLocation::from_tx) - .collect::>(); + let actual_resp: Vec = items.unwrap().into_iter().map(|tx| tx.tx_hash).collect(); assert_eq!(actual_resp, expected_resp, "\"{}\", failed", test_name); } - // Make first block committed and verified - update_blocks_status(&mut storage).await?; + let failed_tx = storage + .chain() + .operations_ext_schema() + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: from, + tx_hash: ApiEither::from(setup.get_tx_hash(1, 2)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await? + .unwrap(); + assert_eq!(failed_tx[0].status, TxInBlockStatus::Rejected); - let receipts = storage + verify_block(&mut storage, BlockNumber(1)).await?; + let txs = storage .chain() .operations_ext_schema() - .get_account_transactions_receipts(from, 1, Some(1), SearchDirection::Newer, 1) - .await?; + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: from, + tx_hash: ApiEither::from(setup.get_tx_hash(0, 6)), + }, + limit: 2, + direction: PaginationDirection::Newer, + }) + .await? + .unwrap(); + assert_eq!(txs[0].status, TxInBlockStatus::Finalized); + assert_eq!(txs[1].status, TxInBlockStatus::Committed); - // Check that `commit_tx_hash` and `verify_tx_hash` now exist. - let reciept = receipts.into_iter().next().unwrap(); - assert!(reciept.commit_tx_hash.is_some()); - assert!(reciept.verify_tx_hash.is_some()); // Make sure that the receiver see the same receipts. - let receipts = storage + let from_txs = storage + .chain() + .operations_ext_schema() + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: from, + tx_hash: ApiEither::from(setup.get_tx_hash(0, 2)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await? + .unwrap(); + let to_txs = storage .chain() .operations_ext_schema() - .get_account_transactions_receipts(to, 1, Some(1), SearchDirection::Newer, 1) + .get_account_transactions(&PaginationQuery { + from: AccountTxsRequest { + address: to, + tx_hash: ApiEither::from(setup.get_tx_hash(0, 2)), + }, + limit: 1, + direction: PaginationDirection::Newer, + }) + .await? + .unwrap(); + let from_txs_hashes: Vec = from_txs.into_iter().map(|tx| tx.tx_hash).collect(); + let to_txs_hashes: Vec = to_txs.into_iter().map(|tx| tx.tx_hash).collect(); + assert_eq!(from_txs_hashes, to_txs_hashes); + + Ok(()) +} + +/// Test `get_tx_created_at_and_block_number` method +#[db_test] +async fn get_tx_created_at_and_block_number(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + setup.add_block(1); + commit_schema_data(&mut storage, &setup).await?; + + // Get priority op created_at and block_number + let tx_hash = setup.get_tx_hash(0, 0); + let result = storage + .chain() + .operations_ext_schema() + .get_tx_created_at_and_block_number(tx_hash) .await?; - assert_eq!( - storage + assert!(result.is_some()); + assert_eq!(result.unwrap().1, BlockNumber(1)); + + // Get transaction created_at and block_number + let tx_hash = setup.get_tx_hash(0, 1); + let result = storage + .chain() + .operations_ext_schema() + .get_tx_created_at_and_block_number(tx_hash) + .await?; + assert!(result.is_some()); + assert_eq!(result.unwrap().1, BlockNumber(1)); + + // Try to get unexisting tx + setup.add_block(2); + let tx_hash = setup.get_tx_hash(1, 0); + let result = storage + .chain() + .operations_ext_schema() + .get_tx_created_at_and_block_number(tx_hash) + .await?; + assert!(result.is_none()); + + Ok(()) +} + +/// Test `get_batch_info` method +#[db_test] +async fn get_batch_info(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + + // `batch_id` will be added after we insert batch into mempool. + setup.add_block_with_batch(1, true); + setup.add_block_with_batch(2, false); + + for i in 0..2 { + let txs: Vec<_> = setup.blocks[i] + .block_transactions + .iter() + .map(|tx| tx.get_executed_tx().unwrap().signed_tx.clone()) + .collect(); + let batch_id = storage .chain() - .operations_ext_schema() - .get_account_transactions_receipts(to, 1, Some(1), SearchDirection::Older, 1) - .await?, - receipts + .mempool_schema() + .insert_batch(&txs, Vec::new()) + .await?; + setup.blocks[i] + .block_transactions + .iter_mut() + .for_each(|tx| match tx { + ExecutedOperations::Tx(tx) => { + tx.batch_id = Some(batch_id); + } + _ => unreachable!(), + }); + } + + // Get batch from mempool + let tx_hashes = vec![ + setup.get_tx_hash(0, 0), + setup.get_tx_hash(0, 1), + setup.get_tx_hash(0, 2), + ]; + let batch_hash = TxHash::batch_hash(&tx_hashes); + let batch_info = storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await? + .unwrap(); + + let actual_tx_hashes: Vec = batch_info + .transaction_hashes + .into_iter() + .map(|tx_hash| tx_hash.0) + .collect(); + assert_eq!(batch_info.batch_hash, batch_hash); + assert_eq!(actual_tx_hashes, tx_hashes); + assert_eq!(batch_info.batch_status.last_state, TxInBlockStatus::Queued); + + // Get batch from queued block. + commit_schema_data(&mut storage, &setup).await?; + storage.chain().mempool_schema().collect_garbage().await?; + + let batch_info = storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await? + .unwrap(); + + let actual_tx_hashes: Vec = batch_info + .transaction_hashes + .into_iter() + .map(|tx_hash| tx_hash.0) + .collect(); + assert_eq!(batch_info.batch_hash, batch_hash); + assert_eq!(actual_tx_hashes, tx_hashes); + assert_eq!( + batch_info.batch_status.last_state, + TxInBlockStatus::Committed + ); + + // Get batch from committed block. + commit_block(&mut storage, BlockNumber(1)).await?; + + let batch_info = storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await? + .unwrap(); + assert_eq!( + batch_info.batch_status.last_state, + TxInBlockStatus::Committed + ); + + // Get batch from finalized block. + verify_block(&mut storage, BlockNumber(1)).await?; + let batch_info = storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await? + .unwrap(); + assert_eq!( + batch_info.batch_status.last_state, + TxInBlockStatus::Finalized + ); + + // Get failed batch. + let tx_hashes = vec![ + setup.get_tx_hash(1, 0), + setup.get_tx_hash(1, 1), + setup.get_tx_hash(1, 2), + ]; + let batch_hash = TxHash::batch_hash(&tx_hashes); + let batch_info = storage + .chain() + .operations_ext_schema() + .get_batch_info(batch_hash) + .await? + .unwrap(); + assert_eq!( + batch_info.batch_status.last_state, + TxInBlockStatus::Rejected ); Ok(()) } -/// Checks that all the operations receipts related to account address can be loaded -/// with the `get_account_operations_receipts` method and the result will be -/// same as expected. +/// Test `get_account_transactions_count` method #[db_test] -async fn get_account_operations_receipts(mut storage: StorageProcessor<'_>) -> QueryResult<()> { +async fn account_transactions_count(mut storage: StorageProcessor<'_>) -> QueryResult<()> { let mut setup = TransactionsHistoryTestSetup::new(); setup.add_block(1); - setup.add_block_with_rejected_op(2); + commit_schema_data(&mut storage, &setup).await?; - // execute_operation + let count_before_commit = storage + .chain() + .operations_ext_schema() + .get_account_transactions_count(setup.from_zksync_account.address) + .await?; + assert_eq!(count_before_commit, 0); + + commit_block(&mut storage, BlockNumber(1)).await?; + + let count_after_commit = storage + .chain() + .operations_ext_schema() + .get_account_transactions_count(setup.from_zksync_account.address) + .await?; + assert_eq!(count_after_commit, 7); + + Ok(()) +} + +/// Test `get_account_last_tx_hash` method +#[db_test] +async fn account_last_tx_hash(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + + // Checks that it returns None for unexisting account + let last_tx_hash = storage + .chain() + .operations_ext_schema() + .get_account_last_tx_hash(setup.from_zksync_account.address) + .await?; + assert!(last_tx_hash.is_none()); + + setup.add_block(1); commit_schema_data(&mut storage, &setup).await?; - let from = setup.from_zksync_account.address; - let to = setup.to_zksync_account.address; - let test_data = vec![ - ( - "Get first five operations.", - ReceiptRequest { - block_number: 0, - block_index: Some(0), - direction: SearchDirection::Newer, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 1, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(6), - }, - ], - ), - ( - "Get a single operation. (newer)", - ReceiptRequest { - block_number: 1, - block_index: Some(0), - direction: SearchDirection::Newer, - limit: 1, - }, - vec![ReceiptLocation { - block_number: 1, - block_index: Some(0), - }], - ), - ( - "Get some operations from the next block.", - ReceiptRequest { - block_number: 1, - block_index: Some(100), - direction: SearchDirection::Newer, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(6), - }, - ], - ), - ( - "Get five operations from some index.", - ReceiptRequest { - block_number: 1, - block_index: Some(3), - direction: SearchDirection::Newer, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 1, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(6), - }, - ], - ), - // Older search direction - ( - "Get last five operations.", - ReceiptRequest { - block_number: i64::MAX as u64, - block_index: Some(i32::MAX as u32), - direction: SearchDirection::Older, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(0), - }, - ], - ), - ( - "Get a single operation (older).", - ReceiptRequest { - block_number: 1, - block_index: Some(6), - direction: SearchDirection::Older, - limit: 1, - }, - vec![ReceiptLocation { - block_number: 1, - block_index: Some(6), - }], - ), - ( - "Get some operations from the previous block.", - ReceiptRequest { - block_number: 2, - block_index: Some(0), - direction: SearchDirection::Older, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(0), - }, - ], - ), - ( - "Get five operations up to some index.", - ReceiptRequest { - block_number: 2, - block_index: Some(10), - direction: SearchDirection::Older, - limit: 5, - }, - vec![ - ReceiptLocation { - block_number: 2, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 2, - block_index: Some(0), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(6), - }, - ReceiptLocation { - block_number: 1, - block_index: Some(0), - }, - ], - ), - ]; + let last_tx_hash = storage + .chain() + .operations_ext_schema() + .get_account_last_tx_hash(setup.from_zksync_account.address) + .await?; + assert_eq!(last_tx_hash, Some(setup.get_tx_hash(0, 6))); - for (test_name, request, expected_resp) in test_data { - let items = storage - .chain() - .operations_ext_schema() - .get_account_operations_receipts( - from, - request.block_number, - request.block_index.unwrap(), - request.direction, - request.limit, - ) - .await?; + Ok(()) +} - let actual_resp = items - .into_iter() - .map(ReceiptLocation::from_op) - .collect::>(); +/// Test `get_block_last_tx_hash` method +#[db_test] +async fn block_last_tx_hash(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); - assert_eq!(actual_resp, expected_resp, "\"{}\", failed", test_name); + // Checks that it returns None for unexisting block + let last_tx_hash = storage + .chain() + .operations_ext_schema() + .get_block_last_tx_hash(BlockNumber(1)) + .await?; + assert!(last_tx_hash.is_none()); + + setup.add_block(1); + commit_schema_data(&mut storage, &setup).await?; + + let last_tx_hash = storage + .chain() + .operations_ext_schema() + .get_block_last_tx_hash(BlockNumber(1)) + .await?; + assert_eq!(last_tx_hash, Some(setup.get_tx_hash(0, 6))); + Ok(()) +} + +/// Test `tx_receipt_api_v02` method +#[db_test] +async fn tx_receipt(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + + // Checks that it returns None for unexisting tx + let receipt = storage + .chain() + .operations_ext_schema() + .tx_receipt_api_v02(&[0xDE, 0xAD, 0xBE, 0xEF]) + .await?; + assert!(receipt.is_none()); + + setup.add_block(1); + commit_schema_data(&mut storage, &setup).await?; + + // Test receipt for L1 op. + let (expected_id, eth_hash) = match setup.blocks[0].block_transactions[0].clone() { + ExecutedOperations::PriorityOp(op) => (op.priority_op.serial_id, op.priority_op.eth_hash), + ExecutedOperations::Tx(_) => { + panic!("Should be L1 op") + } + }; + + let l1_receipt_by_tx_hash = storage + .chain() + .operations_ext_schema() + .tx_receipt_api_v02(setup.get_tx_hash(0, 0).as_ref()) + .await?; + match l1_receipt_by_tx_hash.unwrap() { + Receipt::L1(receipt) => { + assert_eq!(receipt.id, expected_id); + } + Receipt::L2(_) => { + panic!("Should be L1 receipt"); + } } - // Make first block committed and verified - update_blocks_status(&mut storage).await?; + let l1_receipt_by_eth_hash = storage + .chain() + .operations_ext_schema() + .tx_receipt_api_v02(eth_hash.as_ref()) + .await?; + match l1_receipt_by_eth_hash.unwrap() { + Receipt::L1(receipt) => { + assert_eq!(receipt.id, expected_id); + } + Receipt::L2(_) => { + panic!("Should be L1 receipt"); + } + } - let receipts = storage + // Test receipt for executed L2 tx. + let l2_receipt = storage .chain() .operations_ext_schema() - .get_account_operations_receipts(from, 1, 0, SearchDirection::Newer, 1) + .tx_receipt_api_v02(setup.get_tx_hash(0, 2).as_ref()) .await?; + match l2_receipt.unwrap() { + Receipt::L2(receipt) => { + assert_eq!(receipt.tx_hash, setup.get_tx_hash(0, 2)); + } + Receipt::L1(_) => { + panic!("Should be L2 receipt"); + } + } - // Check that `commit_tx_hash` and `verify_tx_hash` now exist. - let reciept = receipts.into_iter().next().unwrap(); - assert!(reciept.commit_tx_hash.is_some()); - assert!(reciept.verify_tx_hash.is_some()); - // Make sure that the receiver see the same receipts. - let receipts = storage + // Test receipt for tx from mempool. + setup.add_block(2); + let tx = match setup.blocks[1].block_transactions[2].clone() { + ExecutedOperations::Tx(tx) => tx.signed_tx, + ExecutedOperations::PriorityOp(_) => { + panic!("Should be L2 tx") + } + }; + storage.chain().mempool_schema().insert_tx(&tx).await?; + let l2_receipt = storage + .chain() + .operations_ext_schema() + .tx_receipt_api_v02(tx.hash().as_ref()) + .await?; + match l2_receipt.unwrap() { + Receipt::L2(receipt) => { + assert_eq!(receipt.tx_hash, tx.hash()); + } + Receipt::L1(_) => { + panic!("Should be L2 receipt"); + } + } + + Ok(()) +} + +/// Test `tx_data_api_v02` method +#[db_test] +async fn tx_data(mut storage: StorageProcessor<'_>) -> QueryResult<()> { + let mut setup = TransactionsHistoryTestSetup::new(); + + // Checks that it returns None for unexisting tx + let data = storage + .chain() + .operations_ext_schema() + .tx_data_api_v02(&[0xDE, 0xAD, 0xBE, 0xEF]) + .await?; + assert!(data.is_none()); + + setup.add_block(1); + commit_schema_data(&mut storage, &setup).await?; + + // Test data for L1 op. + let eth_hash = match setup.blocks[0].block_transactions[0].clone() { + ExecutedOperations::PriorityOp(op) => op.priority_op.eth_hash, + ExecutedOperations::Tx(_) => { + panic!("Should be L1 op") + } + }; + + let l1_data_by_tx_hash = storage .chain() .operations_ext_schema() - .get_account_operations_receipts(to, 1, 0, SearchDirection::Newer, 1) + .tx_data_api_v02(setup.get_tx_hash(0, 0).as_ref()) .await?; assert_eq!( - storage - .chain() - .operations_ext_schema() - .get_account_operations_receipts(to, 1, 0, SearchDirection::Older, 1) - .await?, - receipts + l1_data_by_tx_hash.unwrap().tx.tx_hash, + setup.get_tx_hash(0, 0) + ); + + let l1_data_by_eth_hash = storage + .chain() + .operations_ext_schema() + .tx_data_api_v02(eth_hash.as_ref()) + .await?; + assert_eq!( + l1_data_by_eth_hash.unwrap().tx.tx_hash, + setup.get_tx_hash(0, 0) ); + // Test data for executed L2 tx. + let l2_data = storage + .chain() + .operations_ext_schema() + .tx_data_api_v02(setup.get_tx_hash(0, 2).as_ref()) + .await?; + assert_eq!(l2_data.unwrap().tx.tx_hash, setup.get_tx_hash(0, 2)); + + // Test data for tx from mempool. + setup.add_block(2); + let tx = match setup.blocks[1].block_transactions[2].clone() { + ExecutedOperations::Tx(tx) => tx.signed_tx, + ExecutedOperations::PriorityOp(_) => { + panic!("Should be L2 tx") + } + }; + storage.chain().mempool_schema().insert_tx(&tx).await?; + let l2_data = storage + .chain() + .operations_ext_schema() + .tx_data_api_v02(tx.hash().as_ref()) + .await?; + assert_eq!(l2_data.unwrap().tx.tx_hash, tx.hash()); + Ok(()) } diff --git a/core/lib/storage/src/tests/chain/operations_ext/setup.rs b/core/lib/storage/src/tests/chain/operations_ext/setup.rs index 7f25c79f90..3687bd9ae7 100644 --- a/core/lib/storage/src/tests/chain/operations_ext/setup.rs +++ b/core/lib/storage/src/tests/chain/operations_ext/setup.rs @@ -4,15 +4,15 @@ use chrono::{DateTime, Duration, Utc}; use num::BigUint; // Workspace imports use zksync_basic_types::H256; -use zksync_crypto::franklin_crypto::bellman::pairing::ff::Field; -use zksync_crypto::Fr; +use zksync_crypto::{franklin_crypto::bellman::pairing::ff::Field, Fr}; use zksync_test_account::ZkSyncAccount; -use zksync_types::block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; -use zksync_types::operations::{ChangePubKeyOp, ZkSyncOp}; -use zksync_types::priority_ops::PriorityOp; use zksync_types::{ - tx::ChangePubKeyType, AccountId, Address, BlockNumber, CloseOp, Deposit, DepositOp, FullExit, - FullExitOp, Token, TokenId, TransferOp, TransferToNewOp, WithdrawOp, + block::{Block, ExecutedOperations, ExecutedPriorityOp, ExecutedTx}, + operations::{ChangePubKeyOp, ZkSyncOp}, + priority_ops::PriorityOp, + tx::{ChangePubKeyType, TxHash}, + AccountId, Address, BlockNumber, CloseOp, Deposit, DepositOp, FullExit, FullExitOp, Token, + TokenId, TransferOp, TransferToNewOp, WithdrawOp, }; // Local imports @@ -59,6 +59,13 @@ impl TransactionsHistoryTestSetup { } } + pub fn get_tx_hash(&self, block_number: usize, block_index: usize) -> TxHash { + match &self.blocks[block_number].block_transactions[block_index] { + ExecutedOperations::PriorityOp(op) => op.priority_op.tx_hash(), + ExecutedOperations::Tx(tx) => tx.signed_tx.hash(), + } + } + pub fn add_block(&mut self, block_id: u32) { let prior_op_unique_serial_id = u64::from(block_id * 2); let executed_deposit_op = self.create_deposit_op(prior_op_unique_serial_id, block_id, 0); @@ -72,12 +79,12 @@ impl TransactionsHistoryTestSetup { let operations = vec![ executed_deposit_op, - executed_full_exit_op, executed_transfer_to_new_op, executed_transfer_op, - executed_withdraw_op, executed_close_op, executed_change_pubkey_op, + executed_withdraw_op, + executed_full_exit_op, ]; let block = Block::new( @@ -109,12 +116,12 @@ impl TransactionsHistoryTestSetup { let operations = vec![ executed_deposit_op, - executed_full_exit_op, executed_transfer_to_new_op, rejected_transfer_op, - executed_withdraw_op, executed_close_op, executed_change_pubkey_op, + executed_withdraw_op, + executed_full_exit_op, ]; let block = Block::new( @@ -133,6 +140,34 @@ impl TransactionsHistoryTestSetup { self.blocks.push(block); } + pub fn add_block_with_batch(&mut self, block_id: u32, success: bool) { + let block_indexes = if success { + vec![Some(0), Some(1), Some(2)] + } else { + vec![None, None, None] + }; + let transfer_op_0 = self.create_transfer_tx(block_indexes[0]); + let transfer_op_1 = self.create_transfer_tx(block_indexes[1]); + let transfer_op_2 = self.create_transfer_tx(block_indexes[2]); + + let operations = vec![transfer_op_0, transfer_op_1, transfer_op_2]; + + let block = Block::new( + BlockNumber(block_id), + Fr::zero(), + AccountId(0), + operations, + (0, 0), // Not important + 100, + 1_000_000.into(), // Not important + 1_500_000.into(), // Not important + Default::default(), + 0, + ); + + self.blocks.push(block); + } + fn create_deposit_op( &mut self, serial_id: u64, @@ -158,6 +193,7 @@ impl TransactionsHistoryTestSetup { &hex::decode(format!("{:0>64}", format!("{}{}", block, block_index))).unwrap(), ), eth_block: 10, + eth_block_index: Some(1), }, op: deposit_op, block_index, @@ -196,6 +232,7 @@ impl TransactionsHistoryTestSetup { &hex::decode(format!("{:0>64}", format!("{}{}", block, block_index))).unwrap(), ), eth_block: 11, + eth_block_index: Some(1), }, op: full_exit_op, block_index, diff --git a/core/lib/storage/src/tokens/mod.rs b/core/lib/storage/src/tokens/mod.rs index 059f0a1c41..0ff3b81395 100644 --- a/core/lib/storage/src/tokens/mod.rs +++ b/core/lib/storage/src/tokens/mod.rs @@ -1,11 +1,12 @@ // Built-in deps -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::time::Instant; // External imports use num::{rational::Ratio, BigUint}; use thiserror::Error; // Workspace imports +use zksync_api_types::v02::pagination::{PaginationDirection, PaginationQuery}; use zksync_types::{ tokens::ApiNFT, AccountId, Address, Token, TokenId, TokenLike, TokenPrice, NFT, }; @@ -121,17 +122,24 @@ impl<'a, 'c> TokensSchema<'a, 'c> { Ok(()) } - /// Loads all the stored tokens from the database. - /// Alongside with the tokens added via `store_token` method, the default `ETH` token - /// is returned. - pub async fn load_tokens(&mut self) -> QueryResult> { + /// Loads tokens from the database starting from the given id with the given limit in the ascending order. + pub async fn load_tokens_asc( + &mut self, + from: TokenId, + limit: Option, + ) -> QueryResult> { let start = Instant::now(); + let limit = limit.map(i64::from); let tokens = sqlx::query_as!( DbToken, r#" - SELECT * FROM tokens WHERE is_nft = false + SELECT * FROM tokens + WHERE id >= $1 and is_nft = false ORDER BY id ASC + LIMIT $2 "#, + from.0 as u32, + limit ) .fetch_all(self.0.conn()) .await?; @@ -143,11 +151,66 @@ impl<'a, 'c> TokensSchema<'a, 'c> { (token.id, token) }) .collect(); + metrics::histogram!("sql.token.load_tokens_asc", start.elapsed()); + Ok(result) + } + + /// Loads tokens from the database starting from the given id with the given limit in the descending order. + pub async fn load_tokens_desc( + &mut self, + from: TokenId, + limit: Option, + ) -> QueryResult> { + let start = Instant::now(); + let limit = limit.map(i64::from); + let tokens = sqlx::query_as!( + DbToken, + r#" + SELECT * FROM tokens + WHERE id <= $1 + ORDER BY id DESC + LIMIT $2 + "#, + from.0 as u32, + limit + ) + .fetch_all(self.0.conn()) + .await?; - metrics::histogram!("sql.token.load_tokens", start.elapsed()); + let result = tokens + .into_iter() + .map(|t| { + let token: Token = t.into(); + (token.id, token) + }) + .collect(); + metrics::histogram!("sql.token.load_tokens_desc", start.elapsed()); Ok(result) } + /// Loads all the stored tokens from the database. + /// Alongside with the tokens added via `store_token` method, the default `ETH` token + /// is returned. + pub async fn load_tokens(&mut self) -> QueryResult> { + self.load_tokens_asc(TokenId(0), None).await + } + + /// Loads tokens for the given pagination query + pub async fn load_token_page( + &mut self, + query: &PaginationQuery, + ) -> QueryResult> { + let tokens = match query.direction { + PaginationDirection::Newer => { + self.load_tokens_asc(query.from, Some(query.limit)).await? + } + PaginationDirection::Older => { + self.load_tokens_desc(query.from, Some(query.limit)).await? + } + }; + Ok(tokens) + } + /// Loads all the stored tokens, which have market_volume (ticker_market_volume table) /// not less than parameter (min_market_volume) pub async fn load_tokens_by_market_volume( @@ -183,8 +246,40 @@ impl<'a, 'c> TokensSchema<'a, 'c> { result } - /// Gets the last used token ID from Database. - pub async fn get_last_token_id(&mut self) -> QueryResult { + /// Filters out tokens whose market volume is less than the specified limit (min_market_volume). + pub async fn filter_tokens_by_market_volume( + &mut self, + tokens_to_check: Vec, + min_market_volume: &Ratio, + ) -> QueryResult> { + let start = Instant::now(); + let tokens_to_check: Vec = tokens_to_check.into_iter().map(|id| *id as i32).collect(); + let tokens = sqlx::query!( + r#" + SELECT token_id + FROM ticker_market_volume + WHERE token_id = ANY($1) AND market_volume >= $2 + "#, + &tokens_to_check, + ratio_to_big_decimal(min_market_volume, STORED_USD_PRICE_PRECISION) + ) + .fetch_all(self.0.conn()) + .await?; + + let result = Ok(tokens + .into_iter() + .map(|t| TokenId(t.token_id as u32)) + .collect()); + + metrics::histogram!( + "sql.token.load_token_ids_that_enabled_for_fees", + start.elapsed() + ); + result + } + + /// Get the number of tokens from Database + pub async fn get_count(&mut self) -> QueryResult { let start = Instant::now(); let last_token_id = sqlx::query!( r#" @@ -193,11 +288,11 @@ impl<'a, 'c> TokensSchema<'a, 'c> { ) .fetch_optional(self.0.conn()) .await? - .map(|token| token.id as u32) + .map(|token| token.id) .unwrap_or(0); metrics::histogram!("sql.token.get_last_token_id", start.elapsed()); - Ok(TokenId(last_token_id)) + Ok(last_token_id as u32) } pub async fn get_nft(&mut self, token_id: TokenId) -> QueryResult> { @@ -396,6 +491,19 @@ impl<'a, 'c> TokensSchema<'a, 'c> { Ok(()) } + pub async fn get_last_token_id(&mut self) -> QueryResult { + let start = Instant::now(); + + let token_id = sqlx::query!("SELECT MAX(id) FROM tokens") + .fetch_one(self.0.conn()) + .await? + .max + .unwrap_or(0); + + metrics::histogram!("sql.token.get_last_token_id", start.elapsed()); + Ok(TokenId(token_id as u32)) + } + pub async fn store_nft_factory( &mut self, creator_id: AccountId, @@ -407,9 +515,9 @@ impl<'a, 'c> TokensSchema<'a, 'c> { r#" INSERT INTO nft_factory ( creator_id, factory_address, creator_address ) VALUES ( $1, $2, $3 ) - ON CONFLICT ( creator_id ) - DO UPDATE - SET factory_address = $2 + ON CONFLICT ( creator_id ) + DO UPDATE + SET factory_address = $2 "#, creator_id.0 as i32, address_to_stored_string(&factory_address), diff --git a/core/lib/types/src/fee.rs b/core/lib/types/src/fee.rs index cefc6a8fd1..39e53d2f30 100644 --- a/core/lib/types/src/fee.rs +++ b/core/lib/types/src/fee.rs @@ -2,7 +2,7 @@ use num::rational::Ratio; use num::BigUint; use serde::{Deserialize, Serialize}; -use crate::helpers::{closest_packable_fee_amount, pack_fee_amount, unpack_fee_amount}; +use crate::helpers::{pack_fee_amount, unpack_fee_amount}; use crate::tokens::ChangePubKeyFeeTypeArg; use zksync_utils::{round_precision, BigUintSerdeAsRadix10Str}; @@ -42,15 +42,29 @@ pub struct Fee { #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct BatchFee { + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub gas_fee: BigUint, + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub zkp_fee: BigUint, + #[serde(with = "BigUintSerdeAsRadix10Str")] + pub total_fee: BigUint, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TotalFee { #[serde(with = "BigUintSerdeAsRadix10Str")] pub total_fee: BigUint, } impl BatchFee { - pub fn new(zkp_fee: &Ratio, gas_fee: &Ratio) -> BatchFee { - let (_, _, mut total_fee) = total_fee(zkp_fee, gas_fee); - total_fee = closest_packable_fee_amount(&total_fee); - BatchFee { total_fee } + pub fn new(zkp_fee: Ratio, gas_fee: Ratio) -> Self { + let (zkp_fee, gas_fee, total_fee) = total_fee(&zkp_fee, &gas_fee); + Self { + gas_fee, + zkp_fee, + total_fee, + } } } diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 8f788cd6fa..8bf9491dc3 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -62,7 +62,7 @@ mod tests; pub use self::account::{Account, AccountUpdate, PubKeyHash}; pub use self::block::{ExecutedOperations, ExecutedPriorityOp, ExecutedTx}; -pub use self::fee::{BatchFee, Fee, OutputFeeType}; +pub use self::fee::{BatchFee, Fee, OutputFeeType, TotalFee}; pub use self::operations::{ ChangePubKeyOp, DepositOp, ForcedExitOp, FullExitOp, MintNFTOp, SwapOp, TransferOp, TransferToNewOp, WithdrawNFTOp, WithdrawOp, ZkSyncOp, diff --git a/core/lib/types/src/priority_ops/mod.rs b/core/lib/types/src/priority_ops/mod.rs index 18bdc2a3af..31656b4b4c 100644 --- a/core/lib/types/src/priority_ops/mod.rs +++ b/core/lib/types/src/priority_ops/mod.rs @@ -2,6 +2,7 @@ use ethabi::{decode, ParamType}; use num::{BigUint, ToPrimitive}; +use parity_crypto::digest::sha256; use serde::{Deserialize, Serialize}; use std::convert::{TryFrom, TryInto}; use zksync_basic_types::{Address, Log, H256, U256}; @@ -13,6 +14,7 @@ use zksync_utils::BigUintSerdeAsRadix10Str; use super::{ operations::{DepositOp, FullExitOp}, + tx::TxHash, utils::h256_as_vec, AccountId, SerialId, TokenId, }; @@ -357,6 +359,9 @@ pub struct PriorityOp { pub eth_hash: H256, /// Block in which Ethereum transaction was included. pub eth_block: u64, + /// Transaction index in Ethereum block. + /// This field must be optional because of backward compatibility. + pub eth_block_index: Option, } impl TryFrom for PriorityOp { @@ -416,6 +421,7 @@ impl TryFrom for PriorityOp { .block_number .expect("Event block number is missing") .as_u64(), + eth_block_index: event.transaction_index.map(|index| index.as_u64()), }) } } @@ -426,4 +432,16 @@ impl PriorityOp { queue_entries.iter().map(|priority_op| &priority_op.data), ) } + + pub fn tx_hash(&self) -> TxHash { + let mut bytes = Vec::with_capacity(48); + bytes.extend_from_slice(self.eth_hash.as_bytes()); + bytes.extend_from_slice(&self.eth_block.to_be_bytes()); + bytes.extend_from_slice(&self.eth_block_index.unwrap_or(0).to_be_bytes()); + + let hash = sha256(&bytes); + let mut out = [0u8; 32]; + out.copy_from_slice(&hash); + TxHash { data: out } + } } diff --git a/core/lib/types/src/priority_ops/tests.rs b/core/lib/types/src/priority_ops/tests.rs index 2a1c270b0a..65e7b1e04a 100644 --- a/core/lib/types/src/priority_ops/tests.rs +++ b/core/lib/types/src/priority_ops/tests.rs @@ -16,6 +16,7 @@ mod backward_compatibility { deadline_block: u64, eth_hash: Vec, eth_block: u64, + eth_block_index: u64, } fn old_value() -> OldPriorityOp { @@ -31,6 +32,7 @@ mod backward_compatibility { deadline_block: 100, eth_hash: vec![2; 32], eth_block: 0, + eth_block_index: 0, } } diff --git a/core/lib/types/src/tests/utils.rs b/core/lib/types/src/tests/utils.rs index 6961817547..fda250b590 100644 --- a/core/lib/types/src/tests/utils.rs +++ b/core/lib/types/src/tests/utils.rs @@ -16,6 +16,7 @@ pub fn create_full_exit_op() -> ExecutedOperations { deadline_block: 0, eth_hash: H256::zero(), eth_block: 0, + eth_block_index: None, }, op: ZkSyncOp::FullExit(Box::new(FullExitOp { priority_op, diff --git a/core/lib/types/src/tx/primitives/eth_signature.rs b/core/lib/types/src/tx/primitives/eth_signature.rs index 7188020daa..a4ea0af2ca 100644 --- a/core/lib/types/src/tx/primitives/eth_signature.rs +++ b/core/lib/types/src/tx/primitives/eth_signature.rs @@ -1,5 +1,6 @@ use crate::tx::{EIP1271Signature, PackedEthSignature}; use serde::{Deserialize, Serialize}; +use std::fmt::{Display, Formatter}; /// Representation of the signature secured by L1. /// May be either a signature generated via Ethereum private key @@ -55,3 +56,14 @@ impl TxEthSignatureVariant { } } } + +impl Display for TxEthSignature { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::EthereumSignature(sign) => { + write!(f, "0x{}", hex::encode(sign.serialize_packed())) + } + Self::EIP1271Signature(sign) => write!(f, "0x{}", hex::encode(sign.0.clone())), + } + } +} diff --git a/core/lib/types/src/tx/primitives/tx_hash.rs b/core/lib/types/src/tx/primitives/tx_hash.rs index 92049bd47f..201ba82939 100644 --- a/core/lib/types/src/tx/primitives/tx_hash.rs +++ b/core/lib/types/src/tx/primitives/tx_hash.rs @@ -1,3 +1,4 @@ +use parity_crypto::digest::sha256; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::{convert::TryInto, str::FromStr}; use thiserror::Error; @@ -23,6 +24,11 @@ impl TxHash { Some(out) } } + + pub fn batch_hash(tx_hashes: &[TxHash]) -> TxHash { + let bytes: Vec = tx_hashes.iter().flat_map(AsRef::as_ref).cloned().collect(); + TxHash::from_slice(&*sha256(&bytes)).unwrap() + } } impl AsRef<[u8]> for TxHash { @@ -41,10 +47,14 @@ impl FromStr for TxHash { type Err = TxHashDecodeError; fn from_str(s: &str) -> Result { - if !s.starts_with("sync-tx:") { + let s = if let Some(s) = s.strip_prefix("0x") { + s + } else if let Some(s) = s.strip_prefix("sync-tx:") { + s + } else { return Err(TxHashDecodeError::PrefixError); - } - let bytes = hex::decode(&s[8..])?; + }; + let bytes = hex::decode(&s)?; if bytes.len() != 32 { return Err(TxHashDecodeError::IncorrectHashLength); } @@ -56,7 +66,7 @@ impl FromStr for TxHash { #[derive(Debug, Error)] pub enum TxHashDecodeError { - #[error("TxHash should start with sync-tx:")] + #[error("TxHash should start with 0x or sync-tx:")] PrefixError, #[error("Cannot decode Hex: {0}")] DecodeHex(#[from] hex::FromHexError), diff --git a/core/lib/utils/src/serde_wrappers.rs b/core/lib/utils/src/serde_wrappers.rs index 13bb0eff76..f4ec937237 100644 --- a/core/lib/utils/src/serde_wrappers.rs +++ b/core/lib/utils/src/serde_wrappers.rs @@ -150,7 +150,7 @@ pub struct BytesToHexSerde

{ } impl BytesToHexSerde

{ - pub fn serialize(value: &[u8], serializer: S) -> Result + pub fn serialize(value: impl AsRef<[u8]>, serializer: S) -> Result where S: Serializer, { diff --git a/core/tests/loadtest/src/api/data_pool.rs b/core/tests/loadtest/src/api/data_pool.rs index d644244e80..58c9a04add 100644 --- a/core/tests/loadtest/src/api/data_pool.rs +++ b/core/tests/loadtest/src/api/data_pool.rs @@ -11,7 +11,6 @@ use rand::{thread_rng, Rng}; use tokio::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; // Workspace uses -use zksync_api_client::rest::v1::Pagination; use zksync_types::{tx::TxHash, AccountId, Address, BlockNumber, PriorityOp, ZkSyncPriorityOp}; // Local uses @@ -130,22 +129,6 @@ impl ApiDataPoolInner { self.random_tx_location().0 } - /// Generates a random pagination block range. - pub fn random_block_range(&self) -> (Pagination, u32) { - let mut rng = thread_rng(); - - let block_number = self.random_block(); - let pagination = match rng.gen_range(0, 3) { - 0 => Pagination::Before(block_number), - 1 => Pagination::After(block_number), - 2 => Pagination::Last, - _ => unreachable!(), - }; - - let limit = rng.gen_range(1, MAX_REQUEST_LIMIT as u32 + 1); - (pagination, limit) - } - /// Generates a random transaction identifier (block number, position in block). pub fn random_tx_location(&self) -> (BlockNumber, usize) { let from = *self.blocks.keys().next().unwrap(); diff --git a/core/tests/loadtest/src/api/mod.rs b/core/tests/loadtest/src/api/mod.rs index a8432912bc..dc27d6149b 100644 --- a/core/tests/loadtest/src/api/mod.rs +++ b/core/tests/loadtest/src/api/mod.rs @@ -25,7 +25,6 @@ use crate::{ mod data_pool; mod old_rest_api_tests; -mod rest_api_v1_tests; mod sdk_tests; // TODO: Make it configurable (ZKS-104). @@ -191,7 +190,6 @@ pub fn run(monitor: Monitor) -> (ApiTestsFuture, CancellationToken) { let mut builder = ApiTestsBuilder::new(token.clone()); builder = sdk_tests::wire_tests(builder, &monitor); builder = old_rest_api_tests::wire_tests(builder, &monitor); - builder = rest_api_v1_tests::wire_tests(builder, &monitor); let report = builder.run().await; vlog::info!("API tests finished"); diff --git a/core/tests/loadtest/src/api/rest_api_v1_tests.rs b/core/tests/loadtest/src/api/rest_api_v1_tests.rs deleted file mode 100644 index 7cf2c4bdce..0000000000 --- a/core/tests/loadtest/src/api/rest_api_v1_tests.rs +++ /dev/null @@ -1,280 +0,0 @@ -//! Tests for the relevant API methods declared in the -//! `core/bin/zksync_api/src/api_server/rest/v1` module. - -// Built-in uses -use std::str::FromStr; - -// External uses -use futures::prelude::*; -use rand::{thread_rng, Rng}; - -// Workspace uses -use zksync_api_client::rest::v1::{ - accounts::{AccountQuery, AccountReceipts}, - Client, TokenPriceKind, MAX_LIMIT, -}; -use zksync_config::test_config::TestConfig; -use zksync_types::{Address, TokenId, TokenLike}; - -// Local uses -use super::{ApiDataPool, ApiTestsBuilder}; -use crate::monitor::Monitor; - -struct RestApiTestsBuilder<'a> { - inner: ApiTestsBuilder<'a>, - monitor: &'a Monitor, - client: Client, -} - -impl<'a> RestApiTestsBuilder<'a> { - fn new(inner: ApiTestsBuilder<'a>, monitor: &'a Monitor) -> Self { - let rest_api_url = TestConfig::load().api.rest_api_url; - let client = Client::new(rest_api_url); - - Self { - inner, - monitor, - client, - } - } - - fn append(self, category: &str, factory: F) -> Self - where - F: Fn(Client, &'a Monitor) -> Fut + Send + 'a, - Fut: Future> + Send + 'a, - { - let monitor = self.monitor; - let client = self.client.clone(); - - let category = format!("rest/v1/{}", category); - let inner = self - .inner - .append(&category, move || factory(client.clone(), monitor)); - - Self { - inner, - monitor, - client: self.client, - } - } - - fn into_inner(self) -> ApiTestsBuilder<'a> { - self.inner - } -} - -fn random_token(tokens: &[TokenLike]) -> TokenLike { - let mut rng = thread_rng(); - - let index = rng.gen_range(0, tokens.len()); - tokens[index].clone() -} - -async fn random_account_query(pool: &ApiDataPool) -> AccountQuery { - let (address, account_id); - // We should only use accounts with the settled account ID. - let mut attempts: u32 = 0; - loop { - let inner = pool.read().await; - let data = inner.random_address(); - if let Some(id) = data.1.account_id { - address = data.0; - account_id = id; - break; - } - - attempts += 1; - if attempts >= MAX_LIMIT { - unreachable!( - "Unable to find the appropriate account {} attempts.", - MAX_LIMIT - ); - } - } - - if thread_rng().gen::() { - AccountQuery::Id(account_id) - } else { - AccountQuery::Address(address) - } -} - -async fn random_account_receipts_query(pool: &ApiDataPool) -> AccountReceipts { - let location = pool.read().await.random_tx_location(); - match thread_rng().gen_range(0, 3) { - 0 => AccountReceipts::older_than(location.0, Some(location.1 as u32)), - 1 => AccountReceipts::newer_than(location.0, Some(location.1 as u32)), - 2 => AccountReceipts::Latest, - _ => unreachable!(), - } -} - -pub fn wire_tests<'a>(builder: ApiTestsBuilder<'a>, monitor: &'a Monitor) -> ApiTestsBuilder<'a> { - let builder = RestApiTestsBuilder::new(builder, monitor); - - // Prebuilt token-like requests - let tokens = [ - // Ethereum. - TokenLike::Id(TokenId(0)), - TokenLike::Symbol("ETH".to_string()), - TokenLike::Address(Address::default()), - // PHNX, see rest/v1/test_utils.rs - TokenLike::Id(TokenId(1)), - TokenLike::Symbol("PHNX".to_string()), - TokenLike::Address(Address::from_str("38A2fDc11f526Ddd5a607C1F251C065f40fBF2f7").unwrap()), - ]; - - builder - // accounts endpoints. - .append("accounts/info", |client, monitor| async move { - let address = random_account_query(&monitor.api_data_pool).await; - client.account_info(address).await?; - Ok(()) - }) - .append( - "accounts/transactions/receipts", - |client, monitor| async move { - let address = random_account_query(&monitor.api_data_pool).await; - let receipts = random_account_receipts_query(&monitor.api_data_pool).await; - client - .account_tx_receipts(address, receipts, MAX_LIMIT) - .await?; - Ok(()) - }, - ) - .append( - "accounts/operations/receipts", - |client, monitor| async move { - let address = random_account_query(&monitor.api_data_pool).await; - let receipts = random_account_receipts_query(&monitor.api_data_pool).await; - client - .account_op_receipts(address, receipts, MAX_LIMIT) - .await?; - Ok(()) - }, - ) - .append( - "accounts/operations/pending_receipts", - |client, monitor| async move { - let address = random_account_query(&monitor.api_data_pool).await; - client.account_pending_ops(address).await?; - Ok(()) - }, - ) - // blocks endpoints. - .append("blocks/info", |client, monitor| async move { - let block_number = monitor.api_data_pool.read().await.random_block(); - client.block_by_id(block_number).await?; - Ok(()) - }) - .append("blocks/range", |client, monitor| async move { - let (pagination, limit) = monitor.api_data_pool.read().await.random_block_range(); - client.blocks_range(pagination, limit).await?; - Ok(()) - }) - .append("blocks/transactions", |client, monitor| async move { - let block_number = monitor.api_data_pool.read().await.random_block(); - client.block_transactions(block_number).await?; - Ok(()) - }) - // config endpoints. - .append("config/contracts", |client, _monitor| async move { - client.contracts().await?; - Ok(()) - }) - .append( - "config/deposit_confirmations", - |client, _monitor| async move { - client.deposit_confirmations().await?; - Ok(()) - }, - ) - .append("config/network", |client, _monitor| async move { - client.network().await?; - Ok(()) - }) - // operations endpoints. - .append( - "operations/receipt/by_serial_id", - |client, monitor| async move { - let op = monitor.api_data_pool.read().await.random_priority_op(); - client.priority_op(op.serial_id).await?; - Ok(()) - }, - ) - .append( - "operations/receipt/eth_hash", - |client, monitor| async move { - let op = monitor.api_data_pool.read().await.random_priority_op(); - client.priority_op(op.eth_hash).await?; - Ok(()) - }, - ) - .append( - "operations/data/by_serial_id", - |client, monitor| async move { - let op = monitor.api_data_pool.read().await.random_priority_op(); - client.priority_op_data(op.serial_id).await?; - Ok(()) - }, - ) - .append("operations/data/eth_hash", |client, monitor| async move { - let op = monitor.api_data_pool.read().await.random_priority_op(); - client.priority_op_data(op.eth_hash).await?; - Ok(()) - }) - // search endpoints. - .append("search", |client, monitor| async move { - let block_id = monitor.api_data_pool.read().await.random_block(); - client.search_block(block_id).await?; - Ok(()) - }) - // tokens endpoints. - .append("tokens/list", |client, _monitor| async move { - client.tokens().await?; - Ok(()) - }) - .append("tokens/by_id", { - let tokens = tokens.clone(); - move |client, _monitor| { - let tokens = tokens.clone(); - async move { - let token = random_token(&tokens); - client.token_by_id(&token).await?; - Ok(()) - } - } - }) - .append("tokens/price", move |client, _monitor| { - let tokens = tokens.clone(); - async move { - let token = random_token(&tokens); - client.token_price(&token, TokenPriceKind::Currency).await?; - Ok(()) - } - }) - // transactions enpoints. - .append("transactions/status", move |client, monitor| async move { - let tx_hash = monitor.api_data_pool.read().await.random_tx_hash(); - client.tx_status(tx_hash).await?; - Ok(()) - }) - .append("transactions/data", move |client, monitor| async move { - let tx_hash = monitor.api_data_pool.read().await.random_tx_hash(); - client.tx_data(tx_hash).await?; - Ok(()) - }) - .append("transactions/receipts", move |client, monitor| async move { - let tx_hash = monitor.api_data_pool.read().await.random_tx_hash(); - let range = monitor.api_data_pool.read().await.random_block_range().0; - client.tx_receipts(tx_hash, range, MAX_LIMIT).await?; - Ok(()) - }) - .append("transactions/receipt", move |client, monitor| async move { - let tx_hash = monitor.api_data_pool.read().await.random_tx_hash(); - let receipt_id = thread_rng().gen_range(0, MAX_LIMIT); - client.tx_receipt_by_id(tx_hash, receipt_id).await?; - Ok(()) - }) - .into_inner() -} diff --git a/core/tests/ts-tests/tests/api.test.ts b/core/tests/ts-tests/tests/api.test.ts index e0f4f78931..b2a46b101d 100644 --- a/core/tests/ts-tests/tests/api.test.ts +++ b/core/tests/ts-tests/tests/api.test.ts @@ -1,19 +1,20 @@ -import { Wallet } from 'zksync'; +import { Wallet, RestProvider, getDefaultRestProvider, types } from 'zksync'; import { Tester } from './tester'; import './priority-ops'; import './change-pub-key'; import './transfer'; import './withdraw'; import './forced-exit'; +import { expect } from 'chai'; import * as api from './api'; -describe('ZkSync REST API tests', () => { +describe('ZkSync REST API V0.1 tests', () => { let tester: Tester; let alice: Wallet; before('create tester and test wallets', async () => { - tester = await Tester.init('localhost', 'HTTP'); + tester = await Tester.init('localhost', 'HTTP', 'RPC'); alice = await tester.fundedWallet('1.0'); let bob = await tester.emptyWallet(); for (const token of ['ETH', 'DAI']) { @@ -62,3 +63,145 @@ describe('ZkSync REST API tests', () => { } }); }); + +describe('ZkSync REST API V0.2 tests', () => { + let tester: Tester; + let alice: Wallet; + let bob: Wallet; + let provider: RestProvider; + let lastTxHash: string; + let lastTxReceipt: types.TransactionReceipt; + + before('create tester and test wallets', async () => { + provider = await getDefaultRestProvider('localhost'); + tester = await Tester.init('localhost', 'HTTP', 'REST'); + alice = await tester.fundedWallet('1.0'); + bob = await tester.emptyWallet(); + for (const token of ['ETH']) { + const thousand = tester.syncProvider.tokenSet.parseToken(token, '1000'); + await tester.testDeposit(alice, token, thousand, true); + await tester.testChangePubKey(alice, token, false); + await tester.testTransfer(alice, bob, token, thousand.div(4)); + } + + const handle = await alice.syncTransfer({ + to: bob.address(), + token: 'ETH', + amount: alice.provider.tokenSet.parseToken('ETH', '1') + }); + lastTxHash = handle.txHash; + lastTxHash.replace('sync-tx:', '0x'); + lastTxReceipt = await handle.awaitReceipt(); + }); + + it('should check api v0.2 account scope', async () => { + const committedState = await provider.accountInfo(alice.address(), 'committed'); + const finalizedState = await provider.accountInfo(alice.address(), 'finalized'); + const fullState = await provider.accountFullInfo(alice.address()); + expect(fullState.committed, 'committed state differs').to.eql(committedState); + expect(fullState.finalized, 'finalized state differs').to.eql(finalizedState); + + const txs = await provider.accountTxs(alice.accountId!, { + from: lastTxHash, + limit: 10, + direction: 'older' + }); + const expected = 4; + expect( + txs.list.length, + `Endpoint returned incorrect number of transactions: ${txs.list.length}, expected ${expected}` + ).to.eql(expected); + expect(txs.list[0].txHash, 'Endpoint did not return first tx correctly').to.be.eql(lastTxHash); + + const accTxs = await provider.accountPendingTxs(alice.accountId!, { + from: 1, + limit: 10, + direction: 'newer' + }); + expect(accTxs).to.exist; + }); + + it('should check api v0.2 block scope', async () => { + const lastCommittedBlock = await provider.blockByPosition('lastCommitted'); + expect(lastCommittedBlock).to.exist; + + const expectedBlocks = 3; + const blocks = await provider.blockPagination({ + from: lastCommittedBlock.blockNumber, + limit: 3, + direction: 'older' + }); + expect( + blocks.list.length, + `Endpoint returned incorrect number of blocks: ${blocks.list.length}, expected ${expectedBlocks}` + ).to.eql(expectedBlocks); + + const expectedTxs = 1; + const blockTxs = await provider.blockTransactions(lastTxReceipt.block!.blockNumber, { + from: lastTxHash, + limit: 10, + direction: 'newer' + }); + expect( + blockTxs.list.length, + `Endpoint returned incorrect number of transactions: ${blockTxs.list.length}, expected ${expectedTxs}` + ).to.eql(expectedTxs); + }); + + it('should check api v0.2 config endpoint', async () => { + const config = await provider.config(); + expect(config.network === 'localhost').to.be.true; + }); + + it('should check api v0.2 fee scope', async () => { + const fee = await provider.getTransactionFee('Withdraw', alice.address(), 'ETH'); + expect(fee).to.exist; + const batchFee = await provider.getBatchFullFee( + [ + { txType: 'Transfer', address: alice.address() }, + { txType: 'Withdraw', address: alice.address() } + ], + 'ETH' + ); + expect(batchFee).to.exist; + }); + + it('should check api v0.2 network status endpoint', async () => { + const networkStatus = await provider.networkStatus(); + expect(networkStatus).to.exist; + }); + + it('should check api v0.2 token scope', async () => { + const tokens = await provider.tokenPagination({ + from: 0, + limit: 2, + direction: 'newer' + }); + expect(tokens.list.length).to.be.eql(2); + const firstToken = await provider.tokenByIdOrAddress('0x'.padEnd(42, '0')); + const secondToken = await provider.tokenByIdOrAddress(1); + expect(tokens.list[0]).to.be.eql(firstToken); + expect(tokens.list[1]).to.be.eql(secondToken); + }); + + it('should check api v0.2 transaction scope', async () => { + const apiReceipt = await provider.txStatus(lastTxHash); + expect(apiReceipt!.rollupBlock).to.exist; + + const txData = await provider.txData(lastTxHash); + expect(txData!.tx.op.type).to.eql('Transfer'); + + const batch = await alice + .batchBuilder() + .addTransfer({ to: bob.address(), token: 'ETH', amount: alice.provider.tokenSet.parseToken('ETH', '1') }) + .addTransfer({ to: bob.address(), token: 'ETH', amount: alice.provider.tokenSet.parseToken('ETH', '1') }) + .build('ETH'); + const submitBatchResponse = await provider.submitTxsBatchNew( + batch.txs.map((signedTx) => signedTx.tx), + [batch.signature] + ); + await provider.notifyAnyTransaction(submitBatchResponse.transactionHashes[0], 'COMMIT'); + const batchInfo = await provider.getBatch(submitBatchResponse.batchHash); + expect(batchInfo.batchHash).to.eql(submitBatchResponse.batchHash); + }); +}); diff --git a/core/tests/ts-tests/tests/change-pub-key.ts b/core/tests/ts-tests/tests/change-pub-key.ts index 9c2a15c074..2c6dff9ab2 100644 --- a/core/tests/ts-tests/tests/change-pub-key.ts +++ b/core/tests/ts-tests/tests/change-pub-key.ts @@ -34,5 +34,9 @@ Tester.prototype.testChangePubKey = async function (wallet: Wallet, feeToken: To const receipt = await changePubkeyHandle.awaitReceipt(); expect(receipt.success, `ChangePubKey transaction failed with a reason: ${receipt.failReason}`).to.be.true; expect(await wallet.isSigningKeySet(), 'ChangePubKey failed').to.be.true; + + const accountState = await wallet.getAccountState(); + expect(accountState.accountType, 'Incorrect account type').to.be.eql('Owned'); + this.runningFee = this.runningFee.add(fee); }; diff --git a/core/tests/ts-tests/tests/main.test.ts b/core/tests/ts-tests/tests/main.test.ts index 00b22a799e..45ac39dd26 100644 --- a/core/tests/ts-tests/tests/main.test.ts +++ b/core/tests/ts-tests/tests/main.test.ts @@ -22,8 +22,8 @@ const DEPOSIT_AMOUNT = TX_AMOUNT.mul(200); // prettier-ignore /// We don't want to run tests with all tokens, so we highlight basic operations such as: Deposit, Withdrawal, Forced Exit /// We want to check basic operations with all tokens, and other operations only if it's necessary -const TestSuite = (token: types.TokenSymbol, transport: 'HTTP' | 'WS', onlyBasic: boolean = false) => -describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, () => { +const TestSuite = (token: types.TokenSymbol, transport: 'HTTP' | 'WS', providerType: 'REST' | 'RPC', onlyBasic: boolean = false) => +describe(`ZkSync integration tests (token: ${token}, transport: ${transport}, provider: ${providerType})`, () => { let tester: Tester; let alice: Wallet; let bob: Wallet; @@ -36,7 +36,7 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, let nft: types.NFT; before('create tester and test wallets', async () => { - tester = await Tester.init('localhost', transport); + tester = await Tester.init('localhost', transport, providerType); alice = await tester.fundedWallet('5.0'); bob = await tester.emptyWallet(); chuck = await tester.emptyWallet(); @@ -119,8 +119,8 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, step('should test multi-transfers', async () => { await tester.testBatch(alice, bob, token, TX_AMOUNT); await tester.testIgnoredBatch(alice, bob, token, TX_AMOUNT); - await tester.testRejectedBatch(alice, bob, token, TX_AMOUNT); - await tester.testInvalidFeeBatch(alice, bob, token, TX_AMOUNT); + await tester.testRejectedBatch(alice, bob, token, TX_AMOUNT, providerType); + await tester.testInvalidFeeBatch(alice, bob, token, TX_AMOUNT, providerType); }); step('should test batch-builder', async () => { @@ -166,7 +166,7 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, step('should test multi-signers', async () => { // At this point, all these wallets already have their public keys set. await tester.testMultipleBatchSigners([alice, david, frank], token, TX_AMOUNT); - await tester.testMultipleWalletsWrongSignature(alice, david, token, TX_AMOUNT); + await tester.testMultipleWalletsWrongSignature(alice, david, token, TX_AMOUNT, providerType); }); step('should test backwards compatibility', async () => { @@ -208,7 +208,7 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, if (onlyBasic) { return; } - await tester.testWrongSignature(alice, bob, token, TX_AMOUNT); + await tester.testWrongSignature(alice, bob, token, TX_AMOUNT, providerType); }); describe('Full Exit tests', () => { @@ -283,6 +283,8 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, ethAuthType: 'CREATE2' }); await cpk.awaitReceipt(); + const accountState = await hilda.getAccountState(); + expect(accountState.accountType, 'Incorrect account type').to.be.eql('CREATE2'); }); step('should make transfers from create2 account', async () => { @@ -315,8 +317,10 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, } // here we have a signle eth signature for the whole batch await tester.testCreate2SignedBatchFail(hilda, david, token, TX_AMOUNT); - // here the only each individual transaction is signed - await tester.testCreate2BatchFail(hilda, david, token, TX_AMOUNT); + if(providerType === 'RPC') { + // REST provider always expects Ethereum signed message for the whole batch, skip this test. + await tester.testCreate2BatchFail(hilda, david, token, TX_AMOUNT); + } }); }); }); @@ -324,6 +328,7 @@ describe(`ZkSync integration tests (token: ${token}, transport: ${transport})`, // wBTC is chosen because it has decimals different from ETH (8 instead of 18). // Using this token will help us to detect decimals-related errors. const defaultERC20 = 'wBTC'; +const defaultProviderType = 'REST'; let tokenAndTransport = []; if (process.env.TEST_TRANSPORT) { @@ -334,7 +339,8 @@ if (process.env.TEST_TRANSPORT) { tokenAndTransport = [ { transport: envTransport, - token: envToken + token: envToken, + providerType: process.env.TEST_PROVIDER ? process.env.TEST_PROVIDER : defaultProviderType } ]; } else { @@ -343,7 +349,8 @@ if (process.env.TEST_TRANSPORT) { tokenAndTransport = [ { transport: envTransport, - token: defaultERC20 + token: defaultERC20, + providerType: process.env.TEST_PROVIDER ? process.env.TEST_PROVIDER : defaultProviderType } ]; } @@ -353,11 +360,25 @@ if (process.env.TEST_TRANSPORT) { { transport: 'HTTP', token: 'ETH', + providerType: 'RPC', + onlyBasic: true + }, + { + transport: 'HTTP', + token: defaultERC20, + providerType: 'RPC', + onlyBasic: false + }, + { + transport: 'HTTP', + token: 'ETH', + providerType: 'REST', onlyBasic: true }, { transport: 'HTTP', token: defaultERC20, + providerType: 'REST', onlyBasic: false } ]; @@ -365,5 +386,5 @@ if (process.env.TEST_TRANSPORT) { for (const input of tokenAndTransport) { // @ts-ignore - TestSuite(input.token, input.transport, input.onlyBasic); + TestSuite(input.token, input.transport, input.providerType, input.onlyBasic); } diff --git a/core/tests/ts-tests/tests/misc.ts b/core/tests/ts-tests/tests/misc.ts index 488265c2de..3f8a63ec7c 100644 --- a/core/tests/ts-tests/tests/misc.ts +++ b/core/tests/ts-tests/tests/misc.ts @@ -21,14 +21,32 @@ type TokenLike = types.TokenLike; declare module './tester' { interface Tester { - testWrongSignature(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; + testWrongSignature( + from: Wallet, + to: Wallet, + token: TokenLike, + amount: BigNumber, + providerType: 'REST' | 'RPC' + ): Promise; testMultipleBatchSigners(wallets: Wallet[], token: TokenLike, amount: BigNumber): Promise; - testMultipleWalletsWrongSignature(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; + testMultipleWalletsWrongSignature( + from: Wallet, + to: Wallet, + token: TokenLike, + amount: BigNumber, + providerType: 'REST' | 'RPC' + ): Promise; testBackwardCompatibleEthMessages(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; } } -Tester.prototype.testWrongSignature = async function (from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber) { +Tester.prototype.testWrongSignature = async function ( + from: Wallet, + to: Wallet, + token: TokenLike, + amount: BigNumber, + providerType: 'REST' | 'RPC' +) { const signedTransfer = await from.signSyncTransfer({ to: to.address(), token: token, @@ -48,7 +66,11 @@ Tester.prototype.testWrongSignature = async function (from: Wallet, to: Wallet, await from.provider.submitTx(signedTransfer.tx, fakeEthSignature); thrown = false; // this line should be unreachable } catch (e) { - expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + if (providerType === 'REST') { + expect(e.restError.message).to.equal('Transaction adding error: Eth signature is incorrect.'); + } else { + expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + } } expect(thrown, 'Sending tx with incorrect ETH signature must throw').to.be.true; @@ -66,7 +88,11 @@ Tester.prototype.testWrongSignature = async function (from: Wallet, to: Wallet, await from.provider.submitTx(signedWithdraw.tx, fakeEthSignature); thrown = false; // this line should be unreachable } catch (e) { - expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + if (providerType === 'REST') { + expect(e.restError.message).to.equal('Transaction adding error: Eth signature is incorrect.'); + } else { + expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + } } expect(thrown, 'Sending tx with incorrect ETH signature must throw').to.be.true; }; @@ -128,7 +154,8 @@ Tester.prototype.testMultipleWalletsWrongSignature = async function ( from: Wallet, to: Wallet, token: TokenLike, - amount: BigNumber + amount: BigNumber, + providerType: 'REST' | 'RPC' ) { const fee = await this.syncProvider.getTransactionsBatchFee( ['Transfer', 'Transfer'], @@ -168,7 +195,11 @@ Tester.prototype.testMultipleWalletsWrongSignature = async function ( await submitSignedTransactionsBatch(from.provider, batch, [ethSignature]); thrown = false; // this line should be unreachable } catch (e) { - expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + if (providerType === 'REST') { + expect(e.restError.message).to.equal('Transaction adding error: Eth signature is incorrect.'); + } else { + expect(e.jrpcError.message).to.equal('Eth signature is incorrect'); + } } expect(thrown, 'Sending batch with incorrect ETH signature must throw').to.be.true; }; diff --git a/core/tests/ts-tests/tests/tester.ts b/core/tests/ts-tests/tests/tester.ts index 9c96387c0a..8a72b2560a 100644 --- a/core/tests/ts-tests/tests/tester.ts +++ b/core/tests/ts-tests/tests/tester.ts @@ -28,7 +28,7 @@ export class Tester { constructor( public network: Network, public ethProvider: ethers.providers.Provider, - public syncProvider: zksync.Provider, + public syncProvider: zksync.SyncProvider, public ethWallet: ethers.Wallet, public syncWallet: zksync.Wallet ) { @@ -37,7 +37,10 @@ export class Tester { } // prettier-ignore - static async init(network: Network, transport: 'WS' | 'HTTP') { + static async init(network: Network, transport: 'WS' | 'HTTP', providerType: 'REST' | 'RPC') { + if(transport === 'WS' && providerType === 'REST') { + throw new Error('REST provider supports only HTTP transport'); + } // @ts-ignore let web3Url = process.env.ETH_CLIENT_WEB3_URL.split(",")[0]; const ethProvider = network == 'localhost' @@ -46,7 +49,9 @@ export class Tester { if (network == 'localhost') { ethProvider.pollingInterval = 100; } - const syncProvider = await zksync.getDefaultProvider(network, transport); + const syncProvider = providerType === 'REST' + ? await zksync.getDefaultRestProvider(network) + : await zksync.getDefaultProvider(network, transport); const ethWallet = ethers.Wallet.fromMnemonic( ethTestConfig.test_mnemonic as string, "m/44'/60'/0'/0/0" diff --git a/core/tests/ts-tests/tests/transfer.ts b/core/tests/ts-tests/tests/transfer.ts index 214b253dc9..c111aff6e2 100644 --- a/core/tests/ts-tests/tests/transfer.ts +++ b/core/tests/ts-tests/tests/transfer.ts @@ -12,8 +12,20 @@ declare module './tester' { testTransferNFT(from: Wallet, to: Wallet, feeToken: TokenLike): Promise; testBatch(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; testIgnoredBatch(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; - testRejectedBatch(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; - testInvalidFeeBatch(from: Wallet, to: Wallet, token: TokenLike, amount: BigNumber): Promise; + testRejectedBatch( + from: Wallet, + to: Wallet, + token: TokenLike, + amount: BigNumber, + providerType: 'REST' | 'RPC' + ): Promise; + testInvalidFeeBatch( + from: Wallet, + to: Wallet, + token: TokenLike, + amount: BigNumber, + providerType: 'REST' | 'RPC' + ): Promise; } } @@ -152,7 +164,8 @@ Tester.prototype.testRejectedBatch = async function ( sender: Wallet, receiver: Wallet, token: types.TokenLike, - amount: BigNumber + amount: BigNumber, + providerType: 'REST' | 'RPC' ) { const tx = { to: receiver.address(), @@ -169,7 +182,13 @@ Tester.prototype.testRejectedBatch = async function ( } thrown = false; // this line should be unreachable } catch (e) { - expect(e.jrpcError.message).to.equal('Transactions batch summary fee is too low'); + if (providerType === 'REST') { + expect(e.restError.message).to.equal( + 'Transaction adding error: Transactions batch summary fee is too low.' + ); + } else { + expect(e.jrpcError.message).to.equal('Transactions batch summary fee is too low'); + } } expect(thrown, 'Batch should have failed').to.be.true; }; @@ -180,7 +199,8 @@ Tester.prototype.testInvalidFeeBatch = async function ( sender: Wallet, receiver: Wallet, token: types.TokenLike, - amount: BigNumber + amount: BigNumber, + providerType: 'REST' | 'RPC' ) { // Ignore the second transfer. const fee = await this.syncProvider.getTransactionsBatchFee(['Transfer'], [receiver.address()], token); @@ -212,7 +232,13 @@ Tester.prototype.testInvalidFeeBatch = async function ( } thrown = false; // this line should be unreachable } catch (e) { - expect(e.jrpcError.message).to.equal('Transactions batch summary fee is too low'); + if (providerType === 'REST') { + expect(e.restError.message).to.equal( + 'Transaction adding error: Transactions batch summary fee is too low.' + ); + } else { + expect(e.jrpcError.message).to.equal('Transactions batch summary fee is too low'); + } } expect(thrown, 'Batch should have failed').to.be.true; }; diff --git a/core/tests/ts-tests/tests/withdrawal-helpers.test.ts b/core/tests/ts-tests/tests/withdrawal-helpers.test.ts index d54a6c62c6..51c22dcfd4 100644 --- a/core/tests/ts-tests/tests/withdrawal-helpers.test.ts +++ b/core/tests/ts-tests/tests/withdrawal-helpers.test.ts @@ -16,14 +16,16 @@ const TEST_CONFIG = loadTestConfig(true); // The token here should have the ERC20 implementation from RevertTransferERC20.sol const erc20Token = 'wBTC'; -describe('Withdrawal helpers tests', () => { +// prettier-ignore +const TestSuite = (providerType: 'REST' | 'RPC') => +describe(`Withdrawal helpers tests (provider: ${providerType})`, () => { let tester: Tester; let alice: Wallet; let bob: Wallet; let chuck: Wallet; before('create tester and test wallets', async () => { - tester = await Tester.init('localhost', 'HTTP'); + tester = await Tester.init('localhost', 'HTTP', providerType); alice = await tester.fundedWallet('10.0'); bob = await tester.fundedWallet('10.0'); chuck = await tester.emptyWallet(); @@ -76,3 +78,8 @@ describe('Withdrawal helpers tests', () => { ); }); }); + +for (const providerType of ['RPC', 'REST']) { + // @ts-ignore + TestSuite(providerType); +} diff --git a/core/tests/ts-tests/tests/withdrawal-helpers.ts b/core/tests/ts-tests/tests/withdrawal-helpers.ts index e4bcfe13d5..2117d09455 100644 --- a/core/tests/ts-tests/tests/withdrawal-helpers.ts +++ b/core/tests/ts-tests/tests/withdrawal-helpers.ts @@ -1,6 +1,6 @@ import { Tester } from './tester'; import { expect } from 'chai'; -import { Wallet, types, Provider, utils } from 'zksync'; +import { Wallet, types, utils, SyncProvider } from 'zksync'; import { BigNumber, ethers } from 'ethers'; import { Address } from 'zksync/build/types'; import { sleep } from 'zksync/build/utils'; @@ -26,7 +26,7 @@ declare module './tester' { } async function waitForOnchainWithdrawal( - syncProvider: Provider, + syncProvider: SyncProvider, hash: string, polling_interval: number = 200, polling_timeout: number = 35000 @@ -60,7 +60,7 @@ async function setRevertTransfer(ethWallet: ethers.Signer, tokenAddress: Address async function setRevert( ethWallet: ethers.Signer, - provider: Provider, + provider: SyncProvider, recipient: Address, token: TokenLike, value: boolean diff --git a/docker-compose-runner.yml b/docker-compose-runner.yml index 513cbf65f6..eb7c0513ff 100644 --- a/docker-compose-runner.yml +++ b/docker-compose-runner.yml @@ -12,6 +12,8 @@ services: dev-ticker: image: "matterlabs/dev-ticker:latest" + volumes: + - ./etc/tokens/:/etc/tokens dev-liquidity-token-watcher: image: "matterlabs/dev-liquidity-token-watcher:latest" diff --git a/docker-compose.yml b/docker-compose.yml index 96b2a4662a..e560a21a04 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -32,6 +32,8 @@ services: image: "matterlabs/dev-ticker:latest" ports: - "9876:9876" + volumes: + - ./etc/tokens/:/etc/tokens tesseracts: image: "adria0/tesseracts" command: --cfg /tesseracts.toml -vvv diff --git a/docker/dev-ticker/Dockerfile b/docker/dev-ticker/Dockerfile index da146d226a..d1bb4dcda5 100644 --- a/docker/dev-ticker/Dockerfile +++ b/docker/dev-ticker/Dockerfile @@ -15,4 +15,5 @@ RUN apt install openssl -y EXPOSE 9876 ENV RUST_LOG info COPY --from=builder /usr/src/zksync/target/release/dev-ticker-server /bin/ +COPY --from=builder /usr/src/zksync/etc/tokens /etc/tokens ENTRYPOINT ["dev-ticker-server"] diff --git a/docker/zk-environment/Dockerfile b/docker/zk-environment/Dockerfile index d23100a6ed..fc55fd95e7 100644 --- a/docker/zk-environment/Dockerfile +++ b/docker/zk-environment/Dockerfile @@ -10,13 +10,17 @@ RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - RUN apt-get install -y nodejs RUN npm install -g yarn +# Install required node packages +RUN yarn global add aglio +RUN yarn global add dredd + # Install Rust and required cargo packages ENV RUSTUP_HOME=/usr/local/rustup \ CARGO_HOME=/usr/local/cargo \ PATH=/usr/local/cargo/bin:$PATH RUN curl https://sh.rustup.rs -sSf | bash -s -- -y RUN cargo install diesel_cli --no-default-features --features postgres -RUN cargo install --version=0.2.0 sqlx-cli +RUN cargo install --version=0.5.2 sqlx-cli # Install `solc` RUN curl -LO https://github.com/ethereum/solidity/releases/download/v0.5.16/solc-static-linux diff --git a/infrastructure/api-docs/.gitignore b/infrastructure/api-docs/.gitignore new file mode 100755 index 0000000000..a6d7570905 --- /dev/null +++ b/infrastructure/api-docs/.gitignore @@ -0,0 +1,4 @@ +index.html +documentation.apib +test.apib +/build diff --git a/infrastructure/api-docs/blueprint/groups/accounts.apib b/infrastructure/api-docs/blueprint/groups/accounts.apib new file mode 100755 index 0000000000..c1678aa2d6 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/accounts.apib @@ -0,0 +1,58 @@ +# Group Accounts + +## api/v0.2/accounts [/accounts/{accountIdOrAddress}/{stateType}] + ++ Parameters + + accountIdOrAddress (required, string, `1`) ... Account Id or address in the zkSync network + + stateType (required, "committed" | "finalized", `committed`) ... The type of account state which we want to get + +### Get account state [GET] +Returns the account state for a particular block + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Account, required{{isResultNullable}}) + + error (Error, required, nullable) + +## api/v0.2/accounts/{accountIdOrAddress}/transactions [/accounts/{accountIdOrAddress}/transactions{?from,limit,direction}] + ++ Parameters + + accountIdOrAddress (required, string, `1`) ... Account Id or address in the zkSync network + + from (required, "latest" | string, `latest`) ... From which transaction to show + + limit (required, number, `2`) ... The limit of transactions to be shown + + direction (required, "newer" | "older", `older`) ... newer or older than the id of the `from` + + +### Get account transactions [GET] +Returns the transactions for a particular account according to the query params + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (object, required{{isResultNullable}}) + + pagination (PaginationFromHash, required) + + list (array[Transaction.InBlock], required) + + error (Error, required, nullable) + +## api/v0.2/accounts/{accountIdOrAddress}/transactions/pending [/accounts/{accountIdOrAddress}/transactions/pending{?from,limit,direction}] + ++ Parameters + + accountIdOrAddress (required, string, `1`) ... Account Id or address in the zkSync network + + from (required, "latest" | number, `100`) ... From which transaction to show + + limit (required, number, `2`) ... The limit of transactions to be shown + + direction (required, "newer" | "older", `older`) ... newer or older than the id of the `from` + +### Get pending priority operations [GET] +Get account pending priority operations. + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (object, required{{isResultNullable}}) + + pagination (PaginationFromNumber, required) + + list (array[Transaction.InBlock.L1], required) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/batches.apib b/infrastructure/api-docs/blueprint/groups/batches.apib new file mode 100755 index 0000000000..045b020ca4 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/batches.apib @@ -0,0 +1,34 @@ +# Group Batches + +## api/v0.2/transactions/batches [/transactions/batches] + +### Submit batch of transactions [POST] +Submit batch of transactions. + ++ Request (application/json) + + Attributes + + txs (array[Transaction.Incoming], required) + + signature (BatchSignature, required) + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (SubmitBatchResponse, required{{isResultNullable}}) + + error (Error, required, nullable) + + +## api/v0.2/transactions/batches/:txBatchHash [/transactions/batches/{txBatchHash}] + ++ Parameters + + txBatchHash (required, string, `{{txBatchHash}}`) ... the hash of the contents of the batch queried + +### Get transactions batch status [GET] +Get transactions batch status (latest receipt) + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (BatchStatus, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/blocks.apib b/infrastructure/api-docs/blueprint/groups/blocks.apib new file mode 100755 index 0000000000..698982cf42 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/blocks.apib @@ -0,0 +1,55 @@ +# Group Blocks + +## api/v0.2/blocks [/blocks{?from,limit,direction}] + ++ Parameters + + from (required, "latest" | number, `latest`) ... From which transaction to show + + limit (required, number, `2`) ... The limit of transactions to be shown + + direction (required, "newer" | "older", `older`) ... newer or older than the id of the `from` + +### Get blocks range [GET] +Get blocks range + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (object, required{{isResultNullable}}) + + pagination (PaginationFromNumber, required) + + list (array[Block], required) + + error (Error, required, nullable) + +## api/v0.2/blocks/:block_number [/blocks/{blockNumber}] + ++ Parameters + + `blockNumber`: `lastCommitted` (required, BlockNumber) - a number of the block or `lastCommitted` for getting the last committed block or `lastFinalized` for getting the last finalized block + +### Get block details [GET] +Get block details + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Block, required{{isResultNullable}}) + + error (Error, required, nullable) + +## api/v0.2/blocks/blockNumber/transactions [/blocks/{blockNumber}/transactions{?from,limit,direction}] + ++ Parameters + + `blockNumber`: `lastCommitted` (required, BlockNumber) - a number of the block or `lastCommitted` for getting the last committed block or `lastFinalized` for getting the last finalized block + + from (required, "latest" | string, `latest`) ... From which transaction to show + + limit (required, number, `2`) ... The limit of transactions to be shown + + direction (required, "newer" | "older", `older`) ... newer or older than the id of the `from` + +### Get block transactions [GET] +Get block transactions + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (object, required{{isResultNullable}}) + + pagination (PaginationFromHash, required) + + list (array[Transaction.InBlock], required) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/config.apib b/infrastructure/api-docs/blueprint/groups/config.apib new file mode 100755 index 0000000000..9124c4656f --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/config.apib @@ -0,0 +1,13 @@ +# Group Config + +## api/v0.2/config [/config] + +### Get config of network [GET] +Returns the config of network + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Config, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/fee.apib b/infrastructure/api-docs/blueprint/groups/fee.apib new file mode 100755 index 0000000000..4e8f68adf2 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/fee.apib @@ -0,0 +1,36 @@ +# Group Fees + +## api/v0.2/fee [/fee] + +### Get fee for a single transaction [POST] +Request fee for a single transaction. + ++ Request (application/json) + + Attributes + + txType: Transfer (Fee.Type, required) + + address: 0xf33A2D61DD09541A8C9897D7236aDcCCC14Cf769 (string, required) + + tokenLike: ETH (Token.TokenLike, required) + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Fee.Single, required{{isResultNullable}}) + + error (Error, required, nullable) + +## api/v0.2/fee/batch [/fee/batch] + +### Get fee for a transactions batch [POST] +Request fee for a transactions batch. + ++ Request (application/json) + + Attributes + + transactions (array[Fee.Type.with.Address], required) + + tokenLike: ETH (Token.TokenLike, required) + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Fee.Batch, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/status.apib b/infrastructure/api-docs/blueprint/groups/status.apib new file mode 100755 index 0000000000..2cee10a9e1 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/status.apib @@ -0,0 +1,13 @@ +# Group Network status + +## api/v0.2/networkStatus [/networkStatus] + +### Get network status [GET] +Returns the status of the network + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (NetworkStatus, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/tokens.apib b/infrastructure/api-docs/blueprint/groups/tokens.apib new file mode 100755 index 0000000000..5855308350 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/tokens.apib @@ -0,0 +1,51 @@ +# Group Tokens + +## api/v0.2/tokens [/tokens{?from,limit,direction}] + ++ Parameters + + from (required, "latest" | number, `latest`) ... From which token id to show + + limit (required, number, `2`) ... The limit of tokens to be returned + + direction (required, "newer" | "older", `older`) ... newer or older than the id of the `from` + +### Get list of supported tokens [GET] +Returns the tokens that are currently supported by zkSync + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (object, required{{isResultNullable}}) + + pagination (PaginationFromNumber, required) + + list (array[Token.Token], required) + + error (Error, required, nullable) + +## api/v0.2/tokens/tokenIdOrAddress [/tokens/{tokenIdOrAddress}] + ++ Parameters + + tokenIdOrAddress (required, string, `2`) ... the id or address of the token in the zkSync network + +### Get token info [GET] +Get token into + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Token.Token, required{{isResultNullable}}) + + error (Error, required, nullable) + +## api/v0.2/tokens/:tokenIdOrAddress/priceIn/:currency [/tokens/{tokenIdOrAddress}/priceIn/{currency}] + ++ Parameters + + tokenIdOrAddress (required, string, `2`) ... the id or address of the token in the zkSync network + + currency (required, `usd` | number, `usd`) ... either usd or any of the supported tokens in the zkSync network + +### Get token price [GET] +Get token price relative to another token + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Token.Price, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/groups/transactions.apib b/infrastructure/api-docs/blueprint/groups/transactions.apib new file mode 100755 index 0000000000..6c98c26511 --- /dev/null +++ b/infrastructure/api-docs/blueprint/groups/transactions.apib @@ -0,0 +1,49 @@ +# Group Transactions + +## api/v0.2/transactions [/transactions] + +### Submit transaction [POST] +Submit single transaction + ++ Request (application/json) + + Attributes + + tx (Transaction.Incoming, required) + + signature (TxEthSignature, optional) + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result: 0xb092a0a2eabbf0b5b57d18e70d929899676cf3c0ad4df0f9492c3094ec4a8b19 (string, required{{isResultNullable}}) + + error (Error, required, nullable) + +## api/v0.2/transactions/:txHash [/transactions/{txHash}] + ++ Parameters + + txHash (required, string, `{{txHash}}`) ... hash of the transaction in the zkSync network + +### Get transaction receipt [GET] +Get transaction receipt + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Receipt, required{{isResultNullable}}) + + error (Error, required, nullable) + + +## api/v0.2/transactions/:txHash/data [/transactions/{txHash}/data] + ++ Parameters + + txHash (required, string, `{{txHash}}`) ... hash of the transaction in the zkSync network + +### Get transaction data [GET] +Get transaction data + ++ Response 200 (application/json) + + Attributes + + request (Request, required) + + status: success (string, required) + + result (Transaction.Signed, required{{isResultNullable}}) + + error (Error, required, nullable) diff --git a/infrastructure/api-docs/blueprint/template.apib b/infrastructure/api-docs/blueprint/template.apib new file mode 100755 index 0000000000..546a38adfb --- /dev/null +++ b/infrastructure/api-docs/blueprint/template.apib @@ -0,0 +1,48 @@ +FORMAT: 1A + +# zkSync API v0.2 + +{{accountsEndpoints}} +{{batchesEndpoints}} +{{blocksEndpoints}} +{{configEndpoints}} +{{feeEndpoints}} +{{statusEndpoints}} +{{tokensEndpoints}} +{{transactionsEndpoints}} + +# Data Structures + +{{accountsTypes}} +{{batchesTypes}} +{{blocksTypes}} +{{configTypes}} +{{feeTypes}} +{{statusTypes}} +{{paginationTypes}} +{{receiptTypes}} +{{tokensTypes}} +{{transactionsTypes}} + +## TxState (enum) ++ queued ++ committed ++ finalized ++ rejected + +## BlockNumber (enum) ++ (number) ++ lastCommitted ++ lastFinalized + +## Request ++ network: localhost (Network, required) ++ apiVersion: v02 (fixed, required) ++ resource: /api/v0.2/... (string, required) ++ args (object, required) ++ timestamp: `2021-05-31T14:17:24.112536900Z` (string, required) + +## Error ++ errorType: errorType (string, required) ++ code: 0 (number, required) ++ message: message (string, required) diff --git a/infrastructure/api-docs/blueprint/types/accounts.apib b/infrastructure/api-docs/blueprint/types/accounts.apib new file mode 100755 index 0000000000..af65f8de3b --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/accounts.apib @@ -0,0 +1,12 @@ +## Account (object) ++ accountId: 12 (number, required) ++ address: `0xc0f97CC918C9d6fA4E9fc6be61a6a06589D199b3` (string, required) ++ nonce: 6412 (number, required) ++ pubKeyHash: `sync:82b9eb68c6f7f80cecf49ee1a20acb8ae9ecd602` (string, required) ++ lastUpdateInBlock: 15001 (number, required) ++ balances (object, required) ++ accountType (Account.Type, required, nullable) + +## Account.Type (enum) +- Owned +- CREATE2 diff --git a/infrastructure/api-docs/blueprint/types/batches.apib b/infrastructure/api-docs/blueprint/types/batches.apib new file mode 100755 index 0000000000..5a9bd8e6df --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/batches.apib @@ -0,0 +1,15 @@ +## SubmitBatchResponse (object) ++ batchHash: 0x6d6f7e07c85a770c6cb9f667c6b7d2d6335781f956a3a66503c87a56fd392e80 (string, required) ++ transactionHashes: [0xca1d902dc4aa0d401aded0f3c4c6e73c1aa68fbb661856af1a8874472f5043bb] (array[string], required) + +## BatchStatus (object) ++ batchHash: 0x6d6f7e07c85a770c6cb9f667c6b7d2d6335781f956a3a66503c87a56fd392e80 (string, required), ++ transactionHashes: [0xca1d902dc4aa0d401aded0f3c4c6e73c1aa68fbb661856af1a8874472f5043bb] (array[string], required) ++ createdAt: `2020-10-12T09:05:03.123416742` (string, required), ++ batchStatus (object, required) + + updatedAt: `2020-10-12T09:05:03.123416742` (string, required) + + lastState: queued (TxState, required) + +## BatchSignature (enum) +- (TxEthSignature) +- (array[TxEthSignature]) diff --git a/infrastructure/api-docs/blueprint/types/blocks.apib b/infrastructure/api-docs/blueprint/types/blocks.apib new file mode 100755 index 0000000000..bd74f31c14 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/blocks.apib @@ -0,0 +1,9 @@ +## Block (object) ++ blockNumber: 11109 (number, required), ++ newStateRoot: `0x61684c1bd4ac3f7843b6a20d4270d58dc4139d546ea4249424bc6c2ce0a48f92` (string, required), ++ blockSize: 110 (number, required), ++ commitTxHash: `0xc2541cf68c6f41a6bc55f9d6ba24816c79431942ca3762514f448540cfa475` (string, required, nullable), ++ verifyTxHash: `0x5e188571f82171b0dc313d9ff5433f10d03153a573e6c2ae5b4fbf683dd2a3` (string, required, nullable), ++ committedAt: `2020-10-12T12:05:03.123416742` (string, required) ++ finalizedAt: `2020-10-12T12:10:03.123416742` (string, required, nullable) ++ status: `committed` (string, required) diff --git a/infrastructure/api-docs/blueprint/types/config.apib b/infrastructure/api-docs/blueprint/types/config.apib new file mode 100755 index 0000000000..9ccfdbacba --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/config.apib @@ -0,0 +1,6 @@ +## Config (object) ++ network: `mainnet` (Network, required) ++ contract: `0x5D8cec831732d6DA03677a33430796C4840728f7` (string, required) ++ govContract: `0x70e7fB5A89f9556F8fF9611bB6f42C8eAFDB6c93` (string, required) ++ depositConfirmations: 10 (number, required) ++ zksyncVersion: `contractV4` (string, required) diff --git a/infrastructure/api-docs/blueprint/types/fee.apib b/infrastructure/api-docs/blueprint/types/fee.apib new file mode 100755 index 0000000000..5451a61a82 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/fee.apib @@ -0,0 +1,35 @@ +## ChangePubKeyType (enum) +- Onchain +- ECDSA +- CREATE2 + +## ChangePubKeyFee (object) ++ ChangePubKey (ChangePubKeyType, required) + +## LegacyChangePubKeyFeeType (object) ++ onchainPubkeyAuth (boolean, required) + +## LegacyChangePubKeyFee (object) ++ ChangePubKey (LegacyChangePubKeyFeeType, required) + +## Fee.Type (enum) +- Transfer +- Withdraw +- FastWithdraw +- ForcedExit +- (ChangePubKeyFee) +- (LegacyChangePubKeyFee) + +## Fee.Single (object) ++ gasFee: `12000000000` (string, required), ++ zkpFee: `12000000` (string, required), ++ totalFee: `12012000000` (string, required), + +## Fee.Batch (object) ++ gasFee: `12000000000` (string, required), ++ zkpFee: `12000000` (string, required), ++ totalFee: `12012000000` (string, required), + +## Fee.Type.with.Address (object) ++ txType (Fee.Type, required) ++ address: `0xF659D25A06607Da53e62DAA5842499316A4e2548` (string, required) diff --git a/infrastructure/api-docs/blueprint/types/pagination.apib b/infrastructure/api-docs/blueprint/types/pagination.apib new file mode 100644 index 0000000000..35bfc8fa98 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/pagination.apib @@ -0,0 +1,11 @@ +## PaginationFromHash (object) ++ from: `0x2dcb7cb4288b84766a1bef4a1bd4ae9ac15ab817f06ad07efc4dac035bcfdb45` (string, required) ++ limit: 2 (number, required) ++ direction: `older` (string, required) ++ count: 268 (number, required) + +## PaginationFromNumber (object) ++ from: 5 (number, required) ++ limit: 2 (number, required) ++ direction: `older` (string, required) ++ count: 268 (number, required) diff --git a/infrastructure/api-docs/blueprint/types/receipt.apib b/infrastructure/api-docs/blueprint/types/receipt.apib new file mode 100755 index 0000000000..fe2f00e351 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/receipt.apib @@ -0,0 +1,20 @@ +## Receipt.L2 (object) ++ txHash: `0x466a9432e5337ee85deb9092526bb96377d316b9a1f0717ae4027798837fb85b` (string, required), ++ rollupBlock: 99812 (number, required, nullable), ++ status: committed (TxState, required), ++ failReason: null (string, required, nullable) + +## Receipt.L1 (object) ++ status: committed (L1Status, required), ++ ethBlock: 134300 (number, required), ++ rollupBlock: 99812 (number, required, nullable), ++ id: 12001 (number, required) + +## Receipt (enum) +- (Receipt.L1) +- (Receipt.L2) + +## L1Status (enum) ++ committed ++ finalized ++ queued diff --git a/infrastructure/api-docs/blueprint/types/status.apib b/infrastructure/api-docs/blueprint/types/status.apib new file mode 100755 index 0000000000..a868088e66 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/status.apib @@ -0,0 +1,11 @@ +## Network (enum) ++ mainnet ++ rinkeby ++ ropsten ++ localhost + +## NetworkStatus (object) +- lastCommitted: 1298 (number, required) +- finalized: 1296 (number, required) +- totalTransactions: 1200000 (number, required) +- mempoolSize: 123 (number, required) diff --git a/infrastructure/api-docs/blueprint/types/tokens.apib b/infrastructure/api-docs/blueprint/types/tokens.apib new file mode 100755 index 0000000000..6816b79a9c --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/tokens.apib @@ -0,0 +1,17 @@ +## Token.Token (object) +- id: 12 (number, required) +- address: `0x0849D6ae02349352258Ca59c27bC6D3159A7b752` (string, required) +- symbol: `MLTT` (string, required) +- decimals: 18 (number, required) +- enabledForFees: true (boolean, required) + +## Token.TokenLike (enum) ++ (number) ++ (string) + +## Token.Price (object) +- tokenId: 12 (number, required) +- tokenSymbol: `MLTT` (string, required) +- priceIn: `USD` (string, required) +- decimals: 18 (number, required) +- price: `1.01` (string, required) diff --git a/infrastructure/api-docs/blueprint/types/transactions.apib b/infrastructure/api-docs/blueprint/types/transactions.apib new file mode 100755 index 0000000000..06f0aeb532 --- /dev/null +++ b/infrastructure/api-docs/blueprint/types/transactions.apib @@ -0,0 +1,174 @@ +## Transaction.InBlock (object) +- txHash: 0x732D0a2598ba5E9db4e5cfF36F86dF8dA88A959A (string, required) +- blockNumber: 12 (number, required) +- op (Transaction, required) +- status: rejected (TxState, required) +- failReason: Nonce mismatch (string, required, nullable) +- createdAt: `2018-12-12T01:02:03.123456789` (string, required, nullable) + +## Transaction.Signed (object) +- tx (Transaction.InBlock, required) +- ethSignature: 0xb71ef30467c91b779ccc07c6615ac5fcd2e8293847dda0a6d0c63c85e95120c812e3f1e1973fc79f1718f45554120ffeca99b21b6d424dca854e8191415d2ce91b (string, required, nullable) + +## Transaction.InBlock.L1 (object) +- txHash: 0x732D0a2598ba5E9db4e5cfF36F86dF8dA88A959A (string, required) +- blockNumber: 12 (number, required) +- op (Transaction.L1, required) +- status: rejected (TxState, required) +- failReason: Nonce mismatch (string, required, nullable) +- createdAt: `2018-12-12T01:02:03.123456789` (string, required, nullable) + +## Transaction.L2.Transfer (object) +- type: Transfer (string, fixed) +- to: 0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA (string, required) +- fee: 12000000000000000000 (string, required) +- from: {{address}} (string, required) +- nonce: 12123 (number, required) +- token: 0 (number, required) +- amount: 17500000000000000 (string, required) +- accountId: {{accountId}} (number, required) +- validFrom: 0 (number, required, nullable) +- validUntil: 1239213821 (number, required, nullable) +- signature (L2Signature, required) + +## Transaction.L2.Withdraw (object) +- type: Withdraw (string, fixed) +- to: 0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA (string, required) +- fee: 1200000000 (number, required) +- from: 0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA (string, required) +- nonce: 12123 (number, required) +- tokenId: 10 (number, required) +- amount: 17500000000000000 (number, required) +- accountId: 12123 (number, required) +- validFrom: 0 (number, required, nullable) +- validUntil: 1239213821 (number, required, nullable) +- fast: false (boolean, required) +- signature (L2Signature, required) + +## Transaction.L2.WithdrawWithEthHash (object) +- type: Withdraw (string, fixed) +- to: 0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA (string, required) +- fee: 1200000000 (number, required) +- from: 0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA (string, required) +- nonce: 12123 (number, required) +- tokenId: 10 (number, required) +- amount: 17500000000000000 (number, required) +- accountId: 12123 (number, required) +- validFrom: 0 (number, required, nullable) +- validUntil: 1239213821 (number, required, nullable) +- fast: false (boolean, required) +- signature (L2Signature, required) +- ethTxHash: 0xdda1287002282e1804af40a7c7373bd77cc99a2a27c88bf7908be45398e93148 (string, required, nullable) + +## Transaction.L2.ChangePubKey (object) +- type: ChangePubKey (string, fixed) +- accountId: 1201 (number, required) +- account: 0x3EEe181F789c6585B39904eBAeAb10ACf70f5F1a (string, required) +- newPkHash: 5fcf0bad911e8fdebd4fd79b0a832bc92a694e55 (string, required) +- feeTokenId: 6 (number, required) +- fee: 121000000000000 (number, required) +- nonce: 51 (number, required) +- ethAuthData: *ECDSAAuthType* (AuthType, optional) +- signature (L2Signature, required) +- ethSignature: 0xf0947f0a731ada6e09ca99d3aafb1469a3baaf602eab2f5661edd30d6386 (string, optional) +- validFrom: 0 (number, required, nullable) +- validUntil: 1239213821 (number, required, nullable) + +## Transaction.L2.ForcedExit (object) +- type: `ForcedExit` (string, fixed) +- initiatorAccountId: 12 (number, required) +- target: 0x38de1b4a24548d6ff66fa8e56448d9de09955b08 (string, required) +- nonce: 1001 (number, required) +- tokenId: 5 (number, required) +- fee: `12000000000` (string, required) +- validFrom: 0 (number, required) +- validUntil: 1239213821 (number, required) +- signature (L2Signature, required) + +## Transaction.L2.ForcedExitWithEthHash (object) +- type: `ForcedExit` (string, fixed) +- initiatorAccountId: 12 (number, required) +- target: 0x38de1b4a24548d6ff66fa8e56448d9de09955b08 (string, required) +- nonce: 1001 (number, required) +- tokenId: 5 (number, required) +- fee: `12000000000` (string, required) +- validFrom: `2018-12-12T01:02:03.123456789` (string, required) +- validUntil: `2018-12-12T09:02:03.123456789` (string, required) +- signature (L2Signature, required) +- ethTxHash: `0xdda1287002282e1804af40a7c7373bd77cc99a2a27c88bf7908be45398e93148` (string, required, nullable) + +## Transaction.Incoming (enum) +- (Transaction.L2.Transfer) +- (Transaction.L2.Withdraw) +- (Transaction.L2.ChangePubKey) +- (Transaction.L2.ForcedExit) + +## Transaction.L2 (enum) +- (Transaction.L2.Transfer) +- (Transaction.L2.WithdrawWithEthHash) +- (Transaction.L2.ChangePubKey) +- (Transaction.L2.ForcedExitWithEthHash) + +## Transaction.L1.Deposit +- type: `Deposit` (string, fixed) +- from: `0x555968cfc291aece5550822feb712d1cf66c57b4` (string, required) +- tokenId: 12 (number, required) +- amount: `18000000000000000` (string, required) +- to: `0x3f0d95988eb8538c74e609cac0c1ce56ff3d3179` (string, required) +- accountId: 1413 (number, required, nullable) +- ethHash: `0xdda1287002282e1804af40a7c7373bd77cc99a2a27c88bf7908be45398e93148` (string, required) +- id: 17002 (number, required) +- txHash: `0x2dcb7cb4288b847d8a1bef4a1bd4ae9ac15ab817f06ad07efc4dac035bcfdb45` (string, required) + +## Transaction.L1.FullExit +- type: `FullExit` (string, fixed) +- accountId: 123 (number, required) +- tokenId: 34 (number, required) +- ethHash: `0x1216aae3714e46a9efe0066ff5f3684c95ea9a680a4c39cd36e62b117cb1837c` (string, required) +- id: 1348 (number, required) +- txHash: `0xaf0612970ea1b7809886ab0cce398bb14524d700e4c7305c54c34b5f9f3e6f56` (string, required) + +## Transaction.L1 (enum) +- (Transaction.L1.Deposit) +- (Transaction.L1.FullExit) + +## Transaction (enum) +- (Transaction.L2.Transfer) +- (Transaction.L2.WithdrawWithEthHash) +- (Transaction.L2.ChangePubKey) +- (Transaction.L2.ForcedExitWithEthHash) +- (Transaction.L1.Deposit) +- (Transaction.L1.FullExit) + +## L2Signature (object) +- pubKey: {{pubKey}} (string, required) +- signature: {{l2Signature}} (string, required) + +## ECDSAAuthType (object) +- type: `ECDSA` (string, fixed) +- batchHash: `0xcf70011f7220e08cd58493bf04be43addb4aaf4ec6a4e05b50bbe05aa066d` (string, required) + +## OnchainAuthType (object) +- type: `Onchain` (string, fixed) + +## CREATE2AuthType (object) +- creatorAddress: `0xCd4eA9f4346b992e906a3672a2Ce6632760efb10` (string, required) +- saltArg: `0x81d79d8c78a01373a37cd2d0b5d109798f5c7a07c3bf20b4d5` (string, required) +- codeHash: `0x896a58a92659223a74dd447ee259607d3bac01f9b826dfce62293b8861f4e729` (string, required) + +## AuthType (enum) +- (ECDSAAuthType) +- (OnchainAuthType) +- (CREATE2AuthType) + +## EthereumSignature (object) +- type: `EthereumSignature` (string, fixed) +- signature: `{{ethereumSignature}}` (string, required) + +## EIP1271Signature (object) +- type: `EIP1271Signature` (string, fixed) +- signature: `0xc9eb71b736ef69192d90a6516c5d66af883f6bc9a1749ed98edff106bcd49594618644860be89b08725de834d3a312d409810ecb39e9e7a5a5a9d8d870b166ab1c` (string, required) + +## TxEthSignature (enum) +- (EthereumSignature) +- (EIP1271Signature) diff --git a/infrastructure/api-docs/dredd.yml b/infrastructure/api-docs/dredd.yml new file mode 100644 index 0000000000..82995ed387 --- /dev/null +++ b/infrastructure/api-docs/dredd.yml @@ -0,0 +1,32 @@ +color: true +dry-run: null +hookfiles: null +language: nodejs +require: null +server: null +server-wait: 10 +init: false +custom: {} +names: false +only: [] +reporter: [] +output: [] +header: [] +sorted: false +user: null +inline-errors: false +details: false +method: [] +loglevel: warning +path: [] +hooks-worker-timeout: 5000 +hooks-worker-connect-timeout: 1500 +hooks-worker-connect-retry: 500 +hooks-worker-after-connect-wait: 100 +hooks-worker-term-timeout: 5000 +hooks-worker-term-retry: 500 +hooks-worker-handler-host: 127.0.0.1 +hooks-worker-handler-port: 61321 +config: ./dredd.yml +blueprint: ./blueprint/test.apib +endpoint: 'http://127.0.0.1:3001/api/v0.2' diff --git a/infrastructure/api-docs/package.json b/infrastructure/api-docs/package.json new file mode 100644 index 0000000000..e9751c4ba3 --- /dev/null +++ b/infrastructure/api-docs/package.json @@ -0,0 +1,18 @@ +{ + "name": "api-docs", + "version": "1.0.0", + "license": "MIT", + "main": "build/index.js", + "private": true, + "dependencies": { + "ts-node": "^9.0.0", + "typescript": "^4.0.5", + "handlebars": "4.7.6", + "ethers": "^5.0.19", + "commander": "^6.0.0" + }, + "scripts": { + "build": "tsc", + "watch": "tsc --watch" + } +} diff --git a/infrastructure/api-docs/src/compile.ts b/infrastructure/api-docs/src/compile.ts new file mode 100644 index 0000000000..9b089ddae9 --- /dev/null +++ b/infrastructure/api-docs/src/compile.ts @@ -0,0 +1,159 @@ +import * as fs from 'fs'; +import * as path from 'path'; +import * as handlebars from 'handlebars'; +import * as zksync from 'zksync'; +import * as ethers from 'ethers'; + +export function getDirPath() { + return path.join(process.env.ZKSYNC_HOME as string, 'infrastructure/api-docs'); +} + +function pasteAllFilesInOne() { + let template = handlebars.compile(fs.readFileSync(path.join(getDirPath(), 'blueprint/template.apib'), 'utf-8'), { + noEscape: true + }); + + let replaceObject: any = {}; + + const groupsFiles = fs.readdirSync(path.join(getDirPath(), 'blueprint/groups')); + for (let file of groupsFiles) { + const data = fs.readFileSync(path.join(getDirPath(), 'blueprint/groups', file), 'utf-8'); + replaceObject[file.replace('.apib', '') + 'Endpoints'] = data; + } + + const typesFiles = fs.readdirSync(path.join(getDirPath(), 'blueprint/types')); + for (const file of typesFiles) { + const data = fs.readFileSync(path.join(getDirPath(), 'blueprint/types', file), 'utf-8'); + replaceObject[file.replace('.apib', '') + 'Types'] = data; + } + + return template(replaceObject); +} + +async function compileCommon() { + const data = pasteAllFilesInOne(); + let template = handlebars.compile(data, { noEscape: true }); + + let replaceObject: any = await getHashesAndSignatures(); + replaceObject['isResultNullable'] = '{{isResultNullable}}'; + + return template(replaceObject); +} + +async function setupWallet() { + const pathToConfig = path.join(process.env.ZKSYNC_HOME as string, `etc/test_config/constant/eth.json`); + const config = fs.readFileSync(pathToConfig, { + encoding: 'utf-8' + }); + const ethTestConfig = JSON.parse(config); + let web3Url = (process.env.ETH_CLIENT_WEB3_URL as string).split(',')[0]; + const ethProvider = new ethers.providers.JsonRpcProvider(web3Url); + ethProvider.pollingInterval = 100; + const syncProvider = await zksync.getDefaultRestProvider('localhost'); + const ethWallet = ethers.Wallet.fromMnemonic(ethTestConfig.test_mnemonic as string, "m/44'/60'/0'/0/0").connect( + ethProvider + ); + + const syncWallet = await zksync.Wallet.fromEthSigner(ethWallet, syncProvider); + + const depositHandle = await syncWallet.depositToSyncFromEthereum({ + depositTo: syncWallet.address(), + token: 'ETH', + amount: syncWallet.provider.tokenSet.parseToken('ETH', '1000') + }); + await depositHandle.awaitReceipt(); + + if (!(await syncWallet.isSigningKeySet())) { + const changePubkeyHandle = await syncWallet.setSigningKey({ + feeToken: 'ETH', + ethAuthType: 'ECDSA' + }); + await changePubkeyHandle.awaitReceipt(); + } + + return syncWallet; +} + +interface Parameters { + txHash: string; + txBatchHash: string; + address: string; + accountId: number; + pubKey: string; + l2Signature: string; + ethereumSignature: string; +} + +async function getHashesAndSignatures() { + let syncWallet = await setupWallet(); + + const handle = await syncWallet.syncTransfer({ to: syncWallet.address(), token: 'ETH', amount: 0 }); + await handle.awaitReceipt(); + const txHash = handle.txHash; + + const batch = await syncWallet + .batchBuilder() + .addTransfer({ to: syncWallet.address(), token: 'ETH', amount: 0 }) + .build('ETH'); + let txs = []; + for (const signedTx of batch.txs) { + txs.push(signedTx.tx); + } + + const submitBatchResponse = await (syncWallet.provider as zksync.RestProvider).submitTxsBatchNew( + txs, + batch.signature + ); + await syncWallet.provider.notifyTransaction(submitBatchResponse.transactionHashes[0], 'COMMIT'); + const txBatchHash = submitBatchResponse.batchHash; + + const signedTransfer = await syncWallet.signSyncTransfer({ + to: '0xD3c62D2F7b6d4A63577F2415E55A6Aa6E1DbB9CA', + token: 'ETH', + amount: '17500000000000000', + fee: '12000000000000000000', + nonce: 12123, + validFrom: 0, + validUntil: 1239213821 + }); + const address = syncWallet.address(); + const accountId = (await syncWallet.getAccountId())!; + const pubKey = signedTransfer.tx.signature!.pubKey; + const l2Signature = signedTransfer.tx.signature!.signature; + const ethereumSignature = signedTransfer.ethereumSignature!.signature; + + let result: Parameters = { + txHash, + txBatchHash, + address, + accountId, + pubKey, + l2Signature, + ethereumSignature + }; + return result; +} + +export async function compileApibForDocumentation() { + const before = await compileCommon(); + let template = handlebars.compile(before, { noEscape: true }); + + let replaceObject: any = {}; + replaceObject['isResultNullable'] = ', nullable'; + + const after = template(replaceObject); + + fs.writeFileSync(path.join(getDirPath(), 'blueprint/documentation.apib'), after); +} + +export async function compileApibForTest() { + const before = await compileCommon(); + let template = handlebars.compile(before, { noEscape: true }); + + let replaceObject: any = {}; + replaceObject['isResultNullable'] = ''; + + const after = template(replaceObject); + + fs.writeFileSync(path.join(getDirPath(), 'blueprint/test.apib'), after); +} diff --git a/infrastructure/api-docs/src/index.ts b/infrastructure/api-docs/src/index.ts new file mode 100644 index 0000000000..c5459dfade --- /dev/null +++ b/infrastructure/api-docs/src/index.ts @@ -0,0 +1,40 @@ +import { Command, program } from 'commander'; +import * as path from 'path'; +import { compileApibForTest, compileApibForDocumentation, getDirPath } from './compile'; +import { spawn } from './utils'; + +export const compile = new Command('compile') + .description('compile .apib files') + .option('--test', 'build test.apib') + .action(async (cmd: Command) => { + if (cmd.test) { + await compileApibForTest(); + } else { + await compileApibForDocumentation(); + } + }); + +export const generateDocs = new Command('generate-docs') + .description('generate docs .html file') + .action(async (_cmd: Command) => { + const pathToApib = path.join(getDirPath(), 'blueprint/documentation.apib'); + await spawn(`aglio -i ${pathToApib} -o index.html`); + }); + +export const test = new Command('test').description('test docs').action(async (_cmd: Command) => { + await spawn(`cd ${getDirPath()} && dredd`); +}); + +program.version('1.0.0').name('api-docs').description('api documentation tool'); +program.addCommand(compile); +program.addCommand(generateDocs); +program.addCommand(test); + +async function main() { + await program.parseAsync(process.argv); +} + +main().catch((err: Error) => { + console.error('Error:', err.message || err); + process.exitCode = 1; +}); diff --git a/infrastructure/api-docs/src/utils.ts b/infrastructure/api-docs/src/utils.ts new file mode 100644 index 0000000000..46d54ec594 --- /dev/null +++ b/infrastructure/api-docs/src/utils.ts @@ -0,0 +1,14 @@ +import { spawn as _spawn } from 'child_process'; + +// executes a command in a new shell +// but pipes data to parent's stdout/stderr +export function spawn(command: string) { + command = command.replace(/\n/g, ' '); + const child = _spawn(command, { stdio: 'inherit', shell: true }); + return new Promise((resolve, reject) => { + child.on('error', reject); + child.on('close', (code) => { + code == 0 ? resolve(code) : reject(`Child process exited with code ${code}`); + }); + }); +} diff --git a/infrastructure/api-docs/tsconfig.json b/infrastructure/api-docs/tsconfig.json new file mode 100644 index 0000000000..f96df8d60e --- /dev/null +++ b/infrastructure/api-docs/tsconfig.json @@ -0,0 +1,15 @@ +{ + "compilerOptions": { + "target": "es2019", + "module": "commonjs", + "outDir": "build", + "strict": true, + "esModuleInterop": true, + "noEmitOnError": true, + "skipLibCheck": true, + "declaration": true + }, + "files": [ + "src/index.ts" + ] +} diff --git a/infrastructure/zk/src/api-docs.ts b/infrastructure/zk/src/api-docs.ts new file mode 100644 index 0000000000..f4a1596360 --- /dev/null +++ b/infrastructure/zk/src/api-docs.ts @@ -0,0 +1,10 @@ +import { Command } from 'commander'; +import * as utils from './utils'; + +export async function buildDocs() { + await utils.spawn('api_docs'); + await utils.spawn('api_docs compile'); + await utils.spawn('api_docs generate-docs'); +} + +export const command = new Command('api-docs').description('build api v0.2 documentation').action(buildDocs); diff --git a/infrastructure/zk/src/index.ts b/infrastructure/zk/src/index.ts index 12513c7057..de12acda3f 100644 --- a/infrastructure/zk/src/index.ts +++ b/infrastructure/zk/src/index.ts @@ -18,6 +18,7 @@ import { command as fmt } from './fmt'; import { command as lint } from './lint'; import { command as completion } from './completion'; import { command as config } from './config'; +import { command as apiDocs } from './api-docs'; import * as env from './env'; const COMMANDS = [ @@ -36,6 +37,7 @@ const COMMANDS = [ lint, docker, config, + apiDocs, env.command, completion(program as Command) ]; diff --git a/infrastructure/zk/src/run/run.ts b/infrastructure/zk/src/run/run.ts index 976a85f72f..e2b0b0495c 100644 --- a/infrastructure/zk/src/run/run.ts +++ b/infrastructure/zk/src/run/run.ts @@ -22,6 +22,7 @@ export async function deployERC20(command: 'dev' | 'new', name?: string, symbol? ]' > ./etc/tokens/localhost.json`); if (!process.env.CI) { await docker.restart('dev-liquidity-token-watcher'); + await docker.restart('dev-ticker'); } } else if (command == 'new') { await utils.spawn( diff --git a/infrastructure/zk/src/test/integration.ts b/infrastructure/zk/src/test/integration.ts index bb0ed631e5..e83f24e270 100644 --- a/infrastructure/zk/src/test/integration.ts +++ b/infrastructure/zk/src/test/integration.ts @@ -94,6 +94,7 @@ export async function inDocker(command: string, timeout: number) { export async function all() { await server(); await api(); + await apiDocs(); await withdrawalHelpers(); await zcli(); await rustSDK(); @@ -102,6 +103,16 @@ export async function all() { await run.dataRestore.checkExisting(); } +export async function apiDocs() { + await utils.spawn('api_docs'); + // Checks that documentation can be built successfully. + await utils.spawn('api_docs compile'); + await utils.spawn('api_docs generate-docs'); + // Checks that response structures of endpoints match structures defined in the documentation. + await utils.spawn('api_docs compile --test'); + await utils.spawn('api_docs test'); +} + export async function api() { await utils.spawn('yarn ts-tests api-test'); } @@ -237,6 +248,14 @@ command cmd.withServer ? await withServer(api, 240) : await api(); }); +command + .command('api-docs') + .description('run api-docs integration tests') + .option('--with-server') + .action(async (cmd: Command) => { + cmd.withServer ? await withServer(apiDocs, 240) : await apiDocs(); + }); + command .command('testkit [mode]') .description('run testkit tests') diff --git a/package.json b/package.json index d1eeb8f15d..12d2b40fe0 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "infrastructure/zk", "infrastructure/reading-tool", "infrastructure/token-lists-manager", + "infrastructure/api-docs", "core/tests/ts-tests" ], "nohoist": [ @@ -32,7 +33,8 @@ "ts-tests": "yarn workspace ts-tests", "explorer": "yarn workspace sync-explorer", "zk": "yarn workspace zk", - "reading-tool": "yarn workspace reading-tool" + "reading-tool": "yarn workspace reading-tool", + "api-docs": "yarn workspace api-docs" }, "devDependencies": { "@typescript-eslint/eslint-plugin": "^4.10.0", diff --git a/sdk/zksync.js/src/batch-builder.ts b/sdk/zksync.js/src/batch-builder.ts index 591efe8f66..767fe542b6 100644 --- a/sdk/zksync.js/src/batch-builder.ts +++ b/sdk/zksync.js/src/batch-builder.ts @@ -20,7 +20,15 @@ import { Wallet } from './wallet'; interface InternalTx { type: 'Withdraw' | 'Transfer' | 'ChangePubKey' | 'ForcedExit' | 'MintNFT' | 'WithdrawNFT' | 'Swap'; tx: any; - feeType: 'Withdraw' | 'Transfer' | ChangePubKeyFee | 'MintNFT' | 'WithdrawNFT' | 'Swap'; + feeType: + | 'Withdraw' + | 'Transfer' + | 'FastWithdraw' + | 'ForcedExit' + | ChangePubKeyFee + | 'Swap' + | 'MintNFT' + | 'WithdrawNFT'; address: Address; token: TokenLike; // Whether or not the tx has been signed. @@ -292,7 +300,7 @@ export class BatchBuilder { this.txs.push({ type: 'ForcedExit', tx: _forcedExit, - feeType: 'Withdraw', + feeType: 'ForcedExit', address: _forcedExit.target, token: _forcedExit.token }); diff --git a/sdk/zksync.js/src/index.ts b/sdk/zksync.js/src/index.ts index fd2193fd77..2eb83f9daa 100644 --- a/sdk/zksync.js/src/index.ts +++ b/sdk/zksync.js/src/index.ts @@ -1,5 +1,7 @@ export { Wallet, Transaction, ETHOperation, submitSignedTransaction, submitSignedTransactionsBatch } from './wallet'; export { Provider, ETHProxy, getDefaultProvider } from './provider'; +export { RestProvider, getDefaultRestProvider } from './rest-provider'; +export { SyncProvider } from './provider-interface'; export { Signer, Create2WalletSigner } from './signer'; export { closestPackableTransactionAmount, closestPackableTransactionFee } from './utils'; export { EthMessageSigner } from './eth-message-signer'; diff --git a/sdk/zksync.js/src/provider-interface.ts b/sdk/zksync.js/src/provider-interface.ts new file mode 100644 index 0000000000..15eabbbf1a --- /dev/null +++ b/sdk/zksync.js/src/provider-interface.ts @@ -0,0 +1,53 @@ +import { + AccountState, + Address, + ContractAddress, + Fee, + IncomingTxFeeType, + PriorityOperationReceipt, + TokenLike, + Tokens, + TransactionReceipt, + TxEthSignature +} from './types'; +import { BigNumber } from 'ethers'; +import { TokenSet } from './utils'; + +export abstract class SyncProvider { + contractAddress: ContractAddress; + public tokenSet: TokenSet; + public providerType: 'RPC' | 'Rest'; + // For HTTP provider + public pollIntervalMilliSecs = 500; + + abstract submitTx(tx: any, signature?: TxEthSignature, fastProcessing?: boolean): Promise; + abstract submitTxsBatch( + transactions: { tx: any; signature?: TxEthSignature }[], + ethSignatures?: TxEthSignature | TxEthSignature[] + ): Promise; + abstract getContractAddress(): Promise; + abstract getTokens(): Promise; + abstract getState(address: Address): Promise; + abstract getTxReceipt(txHash: string): Promise; + abstract getPriorityOpStatus(hashOrSerialId: string | number): Promise; + abstract getConfirmationsForEthOpAmount(): Promise; + abstract notifyPriorityOp( + hashOrSerialId: string | number, + action: 'COMMIT' | 'VERIFY' + ): Promise; + abstract notifyTransaction(hash: string, action: 'COMMIT' | 'VERIFY'): Promise; + abstract getTransactionFee(txType: IncomingTxFeeType, address: Address, tokenLike: TokenLike): Promise; + abstract getTransactionsBatchFee( + txTypes: IncomingTxFeeType[], + addresses: Address[], + tokenLike: TokenLike + ): Promise; + abstract getTokenPrice(tokenLike: TokenLike): Promise; + abstract getEthTxForWithdrawal(withdrawal_hash: string): Promise; + + async updateTokenSet(): Promise { + const updatedTokenSet = new TokenSet(await this.getTokens()); + this.tokenSet = updatedTokenSet; + } + async disconnect() {} +} diff --git a/sdk/zksync.js/src/provider.ts b/sdk/zksync.js/src/provider.ts index aacc72dce6..6a90056053 100644 --- a/sdk/zksync.js/src/provider.ts +++ b/sdk/zksync.js/src/provider.ts @@ -3,10 +3,9 @@ import { BigNumber, ethers } from 'ethers'; import { AccountState, Address, - ChangePubKeyFee, + IncomingTxFeeType, ContractAddress, Fee, - LegacyChangePubKeyFee, Network, PriorityOperationReceipt, TokenAddress, @@ -17,7 +16,7 @@ import { TxEthSignatureVariant, NFTInfo } from './types'; -import { isTokenETH, sleep, TokenSet, isNFT } from './utils'; +import { isTokenETH, sleep, TokenSet, isNFT, SYNC_GOV_CONTRACT_INTERFACE } from './utils'; import { Governance, GovernanceFactory, @@ -27,6 +26,8 @@ import { ZkSyncNFTFactoryFactory } from './typechain'; +import { SyncProvider } from './provider-interface'; + export async function getDefaultProvider(network: Network, transport: 'WS' | 'HTTP' = 'HTTP'): Promise { if (transport === 'WS') { console.warn('Websocket support will be removed in future. Use HTTP transport instead.'); @@ -72,14 +73,11 @@ export async function getDefaultProvider(network: Network, transport: 'WS' | 'HT } } -export class Provider { - contractAddress: ContractAddress; - public tokenSet: TokenSet; - - // For HTTP provider - public pollIntervalMilliSecs = 500; - - private constructor(public transport: AbstractJSONRPCTransport) {} +export class Provider extends SyncProvider { + private constructor(public transport: AbstractJSONRPCTransport) { + super(); + this.providerType = 'RPC'; + } /** * @deprecated Websocket support will be removed in future. Use HTTP transport instead. @@ -154,11 +152,6 @@ export class Provider { return await this.transport.request('tokens', null); } - async updateTokenSet(): Promise { - const updatedTokenSet = new TokenSet(await this.getTokens()); - this.tokenSet = updatedTokenSet; - } - async getState(address: Address): Promise { return await this.transport.request('account_info', [address]); } @@ -256,20 +249,7 @@ export class Provider { } } - async getTransactionFee( - txType: - | 'Withdraw' - | 'Transfer' - | 'FastWithdraw' - | 'MintNFT' - | 'Swap' - | ChangePubKeyFee - | 'WithdrawNFT' - | 'FastWithdrawNFT' - | LegacyChangePubKeyFee, - address: Address, - tokenLike: TokenLike - ): Promise { + async getTransactionFee(txType: IncomingTxFeeType, address: Address, tokenLike: TokenLike): Promise { const transactionFee = await this.transport.request('get_tx_fee', [txType, address.toString(), tokenLike]); return { feeType: transactionFee.feeType, @@ -282,17 +262,7 @@ export class Provider { } async getTransactionsBatchFee( - txTypes: ( - | 'Withdraw' - | 'Transfer' - | 'FastWithdraw' - | 'MintNFT' - | 'WithdrawNFT' - | 'FastWithdrawNFT' - | ChangePubKeyFee - | LegacyChangePubKeyFee - | 'Swap' - )[], + txTypes: IncomingTxFeeType[], addresses: Address[], tokenLike: TokenLike ): Promise { diff --git a/sdk/zksync.js/src/rest-provider.ts b/sdk/zksync.js/src/rest-provider.ts new file mode 100644 index 0000000000..ea35b02721 --- /dev/null +++ b/sdk/zksync.js/src/rest-provider.ts @@ -0,0 +1,613 @@ +import Axios from 'axios'; +import { BigNumber } from 'ethers'; +import { SyncProvider } from './provider-interface'; +import * as types from './types'; +import { sleep, TokenSet } from './utils'; + +export async function getDefaultRestProvider(network: types.Network): Promise { + if (network === 'localhost') { + return await RestProvider.newProvider('http://127.0.0.1:3001/api/v0.2'); + } else if (network === 'ropsten') { + return await RestProvider.newProvider('https://ropsten-api.zksync.io/api/v0.2'); + } else if (network === 'rinkeby') { + return await RestProvider.newProvider('https://rinkeby-api.zksync.io/api/v0.2'); + } else if (network === 'ropsten-beta') { + return await RestProvider.newProvider('https://ropsten-beta-api.zksync.io/api/v0.2'); + } else if (network === 'rinkeby-beta') { + return await RestProvider.newProvider('https://rinkeby-beta-api.zksync.io/api/v0.2'); + } else if (network === 'mainnet') { + return await RestProvider.newProvider('https://api.zksync.io/api/v0.2'); + } else { + throw new Error(`Ethereum network ${network} is not supported`); + } +} + +export interface Request { + network: types.Network; + apiVersion: 'v02'; + resource: string; + args: any; + timestamp: string; +} + +export interface Error { + errorType: string; + code: number; + message: string; +} + +export interface Response { + request: Request; + status: 'success' | 'error'; + error?: Error; + result?: T; +} + +export class RESTError extends Error { + constructor(message: string, public restError: Error) { + super(message); + } +} + +export class RestProvider extends SyncProvider { + public static readonly MAX_LIMIT = 100; + + private constructor(public address: string) { + super(); + this.providerType = 'Rest'; + } + + static async newProvider( + address: string = 'http://127.0.0.1:3001', + pollIntervalMilliSecs?: number + ): Promise { + const provider = new RestProvider(address); + if (pollIntervalMilliSecs) { + provider.pollIntervalMilliSecs = pollIntervalMilliSecs; + } + provider.contractAddress = await provider.getContractAddress(); + provider.tokenSet = new TokenSet(await provider.getTokens()); + return provider; + } + + parseResponse(response: Response): T { + if (response.status === 'success') { + return response.result; + } else { + throw new RESTError( + `zkSync API response error: errorType: ${response.error.errorType};` + + ` code ${response.error.code}; message: ${response.error.message}`, + response.error + ); + } + } + + async get(url: string): Promise> { + return await Axios.get(url).then((resp) => { + return resp.data; + }); + } + + async post(url: string, body: any): Promise> { + return await Axios.post(url, body).then((resp) => { + return resp.data; + }); + } + + async accountInfoDetailed( + idOrAddress: number | types.Address, + infoType: 'committed' | 'finalized' + ): Promise> { + return await this.get(`${this.address}/accounts/${idOrAddress}/${infoType}`); + } + + async accountInfo( + idOrAddress: number | types.Address, + infoType: 'committed' | 'finalized' + ): Promise { + return this.parseResponse(await this.accountInfoDetailed(idOrAddress, infoType)); + } + + async accountFullInfoDetailed(idOrAddress: number | types.Address): Promise> { + return await this.get(`${this.address}/accounts/${idOrAddress}`); + } + + async accountFullInfo(idOrAddress: number | types.Address): Promise { + return this.parseResponse(await this.accountFullInfoDetailed(idOrAddress)); + } + + async accountTxsDetailed( + idOrAddress: number | types.Address, + paginationQuery: types.PaginationQuery + ): Promise>> { + return await this.get( + `${this.address}/accounts/${idOrAddress}/transactions?from=${paginationQuery.from}` + + `&limit=${paginationQuery.limit}&direction=${paginationQuery.direction}` + ); + } + + async accountTxs( + idOrAddress: number | types.Address, + paginationQuery: types.PaginationQuery + ): Promise> { + return this.parseResponse(await this.accountTxsDetailed(idOrAddress, paginationQuery)); + } + + async accountPendingTxsDetailed( + idOrAddress: number | types.Address, + paginationQuery: types.PaginationQuery + ): Promise>> { + return await this.get( + `${this.address}/accounts/${idOrAddress}/transactions/pending?from=${paginationQuery.from}` + + `&limit=${paginationQuery.limit}&direction=${paginationQuery.direction}` + ); + } + + async accountPendingTxs( + idOrAddress: number | types.Address, + paginationQuery: types.PaginationQuery + ): Promise> { + return this.parseResponse(await this.accountPendingTxsDetailed(idOrAddress, paginationQuery)); + } + + async blockPaginationDetailed( + paginationQuery: types.PaginationQuery + ): Promise>> { + return await this.get( + `${this.address}/blocks?from=${paginationQuery.from}&limit=${paginationQuery.limit}` + + `&direction=${paginationQuery.direction}` + ); + } + + async blockPagination( + paginationQuery: types.PaginationQuery + ): Promise> { + return this.parseResponse(await this.blockPaginationDetailed(paginationQuery)); + } + + async blockByPositionDetailed(blockPosition: types.BlockPosition): Promise> { + return await this.get(`${this.address}/blocks/${blockPosition}`); + } + + async blockByPosition(blockPosition: types.BlockPosition): Promise { + return this.parseResponse(await this.blockByPositionDetailed(blockPosition)); + } + + async blockTransactionsDetailed( + blockPosition: types.BlockPosition, + paginationQuery: types.PaginationQuery + ): Promise>> { + return await this.get( + `${this.address}/blocks/${blockPosition}/transactions?from=${paginationQuery.from}` + + `&limit=${paginationQuery.limit}&direction=${paginationQuery.direction}` + ); + } + + async blockTransactions( + blockPosition: types.BlockPosition, + paginationQuery: types.PaginationQuery + ): Promise> { + return this.parseResponse(await this.blockTransactionsDetailed(blockPosition, paginationQuery)); + } + + async configDetailed(): Promise> { + return await this.get(`${this.address}/config`); + } + + async config(): Promise { + return this.parseResponse(await this.configDetailed()); + } + + async getTransactionFeeDetailed( + txType: types.IncomingTxFeeType, + address: types.Address, + tokenLike: types.TokenLike + ): Promise> { + const rawFee = await this.post<{ gasFee: string; zkpFee: string; totalFee: string }>(`${this.address}/fee`, { + txType, + address, + tokenLike + }); + let fee: Response; + if (rawFee.status === 'success') { + fee = { + request: rawFee.request, + status: rawFee.status, + error: null, + result: { + gasFee: BigNumber.from(rawFee.result.gasFee), + zkpFee: BigNumber.from(rawFee.result.zkpFee), + totalFee: BigNumber.from(rawFee.result.totalFee) + } + }; + } else { + fee = { + request: rawFee.request, + status: rawFee.status, + error: rawFee.error, + result: null + }; + } + return fee; + } + + async getTransactionFee( + txType: types.IncomingTxFeeType, + address: types.Address, + tokenLike: types.TokenLike + ): Promise { + return this.parseResponse(await this.getTransactionFeeDetailed(txType, address, tokenLike)); + } + + async getBatchFullFeeDetailed( + transactions: { + txType: types.IncomingTxFeeType; + address: types.Address; + }[], + tokenLike: types.TokenLike + ): Promise> { + const rawFee = await this.post<{ gasFee: string; zkpFee: string; totalFee: string }>( + `${this.address}/fee/batch`, + { transactions, tokenLike } + ); + let fee: Response; + if (rawFee.status === 'success') { + fee = { + request: rawFee.request, + status: rawFee.status, + error: null, + result: { + gasFee: BigNumber.from(rawFee.result.gasFee), + zkpFee: BigNumber.from(rawFee.result.zkpFee), + totalFee: BigNumber.from(rawFee.result.totalFee) + } + }; + } else { + fee = { + request: rawFee.request, + status: rawFee.status, + error: rawFee.error, + result: null + }; + } + return fee; + } + + async getBatchFullFee( + transactions: { + txType: types.IncomingTxFeeType; + address: types.Address; + }[], + tokenLike: types.TokenLike + ): Promise { + return this.parseResponse(await this.getBatchFullFeeDetailed(transactions, tokenLike)); + } + + async networkStatusDetailed(): Promise> { + return await this.get(`${this.address}/networkStatus`); + } + + async networkStatus(): Promise { + return this.parseResponse(await this.networkStatusDetailed()); + } + + async tokenPaginationDetailed( + paginationQuery: types.PaginationQuery + ): Promise>> { + return await this.get( + `${this.address}/tokens?from=${paginationQuery.from}&limit=${paginationQuery.limit}` + + `&direction=${paginationQuery.direction}` + ); + } + + async tokenPagination( + paginationQuery: types.PaginationQuery + ): Promise> { + return this.parseResponse(await this.tokenPaginationDetailed(paginationQuery)); + } + + async tokenByIdOrAddressDetailed(idOrAddress: number | types.TokenAddress): Promise> { + return await this.get(`${this.address}/tokens/${idOrAddress}`); + } + + async tokenByIdOrAddress(idOrAddress: number | types.TokenAddress): Promise { + return this.parseResponse(await this.tokenByIdOrAddressDetailed(idOrAddress)); + } + + async tokenPriceInfoDetailed( + idOrAddress: number | types.TokenAddress, + tokenIdOrUsd: number | 'usd' + ): Promise> { + return await this.get(`${this.address}/tokens/${idOrAddress}/priceIn/${tokenIdOrUsd}`); + } + + async tokenPriceInfo( + idOrAddress: number | types.TokenAddress, + tokenIdOrUsd: number | 'usd' + ): Promise { + return this.parseResponse(await this.tokenPriceInfoDetailed(idOrAddress, tokenIdOrUsd)); + } + + async submitTxNewDetailed(tx: types.L2Tx, signature?: types.TxEthSignature): Promise> { + return await this.post(`${this.address}/transactions`, { tx, signature }); + } + + async submitTxNew(tx: types.L2Tx, signature?: types.TxEthSignature): Promise { + return this.parseResponse(await this.submitTxNewDetailed(tx, signature)); + } + + /** + * @deprecated Use submitTxNew method instead + */ + async submitTx(tx: any, signature?: types.TxEthSignature, fastProcessing?: boolean): Promise { + if (fastProcessing) { + tx.fastProcessing = fastProcessing; + } + let txHash = await this.submitTxNew(tx, signature); + txHash.replace('0x', 'sync-tx:'); + return txHash; + } + + async txStatusDetailed(txHash: string): Promise> { + return await this.get(`${this.address}/transactions/${txHash}`); + } + + async txStatus(txHash: string): Promise { + return this.parseResponse(await this.txStatusDetailed(txHash)); + } + + async txDataDetailed(txHash: string): Promise> { + return await this.get(`${this.address}/transactions/${txHash}/data`); + } + + async txData(txHash: string): Promise { + return this.parseResponse(await this.txDataDetailed(txHash)); + } + + async submitTxsBatchNewDetailed( + txs: types.L2Tx[], + signature: types.TxEthSignature | types.TxEthSignature[] + ): Promise> { + return await this.post(`${this.address}/transactions/batches`, { txs, signature }); + } + + async submitTxsBatchNew( + txs: types.L2Tx[], + signature: types.TxEthSignature | types.TxEthSignature[] + ): Promise { + return this.parseResponse(await this.submitTxsBatchNewDetailed(txs, signature)); + } + + /** + * @deprecated Use submitTxsBatchNew method instead. + */ + async submitTxsBatch( + transactions: { tx: any; signature?: types.TxEthSignature }[], + ethSignatures?: types.TxEthSignature | types.TxEthSignature[] + ): Promise { + let txs = []; + for (const signedTx of transactions) { + txs.push(signedTx.tx); + } + if (!ethSignatures) { + throw new Error('Batch signature should be provided in API v0.2'); + } + return (await this.submitTxsBatchNew(txs, ethSignatures)).transactionHashes; + } + + async getBatchDetailed(batchHash: string): Promise> { + return await this.get(`${this.address}/transactions/batches/${batchHash}`); + } + + async getBatch(batchHash: string): Promise { + return this.parseResponse(await this.getBatchDetailed(batchHash)); + } + + async notifyAnyTransaction(hash: string, action: 'COMMIT' | 'VERIFY'): Promise { + while (true) { + let transactionStatus = await this.txStatus(hash); + let notifyDone; + if (action === 'COMMIT') { + notifyDone = transactionStatus && transactionStatus.rollupBlock; + } else { + if (transactionStatus && transactionStatus.rollupBlock) { + if (transactionStatus.status === 'rejected') { + // If the transaction status is rejected + // it cannot be known if transaction is queued, committed or finalized. + // That is why there is separate `blockByPosition` query. + const blockStatus = await this.blockByPosition(transactionStatus.rollupBlock); + notifyDone = blockStatus && blockStatus.status === 'finalized'; + } else { + notifyDone = transactionStatus.status === 'finalized'; + } + } + } + if (notifyDone) { + // Transaction status needs to be recalculated because it can + // be updated between `txStatus` and `blockByPosition` calls. + return await this.txStatus(hash); + } else { + await sleep(this.pollIntervalMilliSecs); + } + } + } + + async notifyTransaction(hash: string, action: 'COMMIT' | 'VERIFY'): Promise { + await this.notifyAnyTransaction(hash, action); + return await this.getTxReceipt(hash); + } + + async notifyPriorityOp(hash: string, action: 'COMMIT' | 'VERIFY'): Promise { + await this.notifyAnyTransaction(hash, action); + return await this.getPriorityOpStatus(hash); + } + + async getContractAddress(): Promise { + const config = await this.config(); + return { + mainContract: config.contract, + govContract: config.govContract + }; + } + + async getTokens(limit?: number): Promise { + let tokens = {}; + let tmpId = 0; + limit = limit ? limit : RestProvider.MAX_LIMIT; + let tokenPage: types.Paginated; + do { + tokenPage = await this.tokenPagination({ + from: tmpId, + limit, + direction: 'newer' + }); + for (let token of tokenPage.list) { + tokens[token.symbol] = { + address: token.address, + id: token.id, + symbol: token.symbol, + decimals: token.decimals + }; + } + tmpId += limit; + } while (tokenPage.list.length == limit); + + return tokens; + } + + async getState(address: types.Address): Promise { + const fullInfo = await this.accountFullInfo(address); + + if (fullInfo.finalized) { + return { + address, + id: fullInfo.committed.accountId, + accountType: fullInfo.committed.accountType, + committed: { + balances: fullInfo.committed.balances, + nonce: fullInfo.committed.nonce, + pubKeyHash: fullInfo.committed.pubKeyHash + }, + verified: { + balances: fullInfo.finalized.balances, + nonce: fullInfo.finalized.nonce, + pubKeyHash: fullInfo.finalized.pubKeyHash + } + }; + } else if (fullInfo.committed) { + return { + address, + id: fullInfo.committed.accountId, + accountType: fullInfo.committed.accountType, + committed: { + balances: fullInfo.committed.balances, + nonce: fullInfo.committed.nonce, + pubKeyHash: fullInfo.committed.pubKeyHash + }, + verified: { + balances: {}, + nonce: 0, + pubKeyHash: 'sync:0000000000000000000000000000000000000000' + } + }; + } else { + return { + address, + committed: { + balances: {}, + nonce: 0, + pubKeyHash: 'sync:0000000000000000000000000000000000000000' + }, + verified: { + balances: {}, + nonce: 0, + pubKeyHash: 'sync:0000000000000000000000000000000000000000' + } + }; + } + } + + async getConfirmationsForEthOpAmount(): Promise { + const config = await this.config(); + return config.depositConfirmations; + } + + async getTransactionsBatchFee( + txTypes: types.IncomingTxFeeType[], + addresses: types.Address[], + tokenLike: types.TokenLike + ): Promise { + let transactions = []; + for (let i = 0; i < txTypes.length; ++i) { + transactions.push({ txType: txTypes[i], address: addresses[i] }); + } + const fee = await this.getBatchFullFee(transactions, tokenLike); + return fee.totalFee; + } + + async getTokenPrice(tokenLike: types.TokenLike): Promise { + const price = await this.tokenPriceInfo(tokenLike, 'usd'); + return price.price.toNumber(); + } + + async getTxReceipt(txHash: string): Promise { + const receipt = await this.txStatus(txHash); + if (!receipt || !receipt.rollupBlock) { + return { + executed: false + }; + } else { + if (receipt.status === 'rejected') { + const blockFullInfo = await this.blockByPosition(receipt.rollupBlock); + const blockInfo = { + blockNumber: receipt.rollupBlock, + committed: blockFullInfo ? true : false, + verified: blockFullInfo && blockFullInfo.status === 'finalized' ? true : false + }; + return { + executed: true, + success: false, + failReason: receipt.failReason, + block: blockInfo + }; + } else { + return { + executed: true, + success: true, + block: { + blockNumber: receipt.rollupBlock, + committed: true, + verified: receipt.status === 'finalized' + } + }; + } + } + } + + async getPriorityOpStatus(hash: string): Promise { + const receipt = await this.txStatus(hash); + if (!receipt || !receipt.rollupBlock) { + return { + executed: false + }; + } else { + return { + executed: true, + block: { + blockNumber: receipt.rollupBlock, + committed: true, + verified: receipt.status === 'finalized' + } + }; + } + } + + async getEthTxForWithdrawal(withdrawalHash: string): Promise { + const txData = await this.txData(withdrawalHash); + if (txData.tx.op.type === 'Withdraw' || txData.tx.op.type === 'ForcedExit') { + return txData.tx.op.ethTxHash; + } else { + return null; + } + } +} diff --git a/sdk/zksync.js/src/signer.ts b/sdk/zksync.js/src/signer.ts index 8f22b29b34..d2909f544a 100644 --- a/sdk/zksync.js/src/signer.ts +++ b/sdk/zksync.js/src/signer.ts @@ -315,7 +315,9 @@ export class Signer { return new Signer(await privateKeyFromSeed(seed)); } - static async fromETHSignature(ethSigner: ethers.Signer): Promise<{ + static async fromETHSignature( + ethSigner: ethers.Signer + ): Promise<{ signer: Signer; ethSignatureType: EthSignerType; }> { diff --git a/sdk/zksync.js/src/types.ts b/sdk/zksync.js/src/types.ts index 4b8f3ea089..18ae2a7524 100644 --- a/sdk/zksync.js/src/types.ts +++ b/sdk/zksync.js/src/types.ts @@ -46,9 +46,36 @@ export interface NFTInfo { withdrawnFactory?: Address; } -export interface AccountState { +export type EthAccountType = 'Owned' | 'CREATE2'; + +export type AccountState = AccountStateRest | AccountStateRpc; + +export interface AccountStateRest { address: Address; id?: number; + accountType?: EthAccountType; + committed: { + balances: { + // Token are indexed by their symbol (e.g. "ETH") + [token: string]: BigNumberish; + }; + nonce: number; + pubKeyHash: PubKeyHash; + }; + verified: { + balances: { + // Token are indexed by their symbol (e.g. "ETH") + [token: string]: BigNumberish; + }; + nonce: number; + pubKeyHash: PubKeyHash; + }; +} + +export interface AccountStateRpc { + address: Address; + id?: number; + accountType?: EthAccountType; depositing: { balances: { // Token are indexed by their symbol (e.g. "ETH") @@ -320,7 +347,9 @@ export interface LegacyChangePubKeyFee { }; } -export interface Fee { +export type Fee = FeeRpc | FeeRest; + +export interface FeeRpc { // Operation type (amount of chunks in operation differs and impacts the total fee). feeType: | 'Withdraw' @@ -343,7 +372,208 @@ export interface Fee { totalFee: BigNumber; } -export interface BatchFee { +export type BatchFee = BatchFeeRpc | FeeRest; + +export interface BatchFeeRpc { // Total fee amount (in wei) totalFee: BigNumber; } + +export type IncomingTxFeeType = + | 'Withdraw' + | 'Transfer' + | 'FastWithdraw' + | 'ForcedExit' + | 'MintNFT' + | 'WithdrawNFT' + | 'Swap' + | ChangePubKeyFee + | LegacyChangePubKeyFee; + +export interface PaginationQuery { + from: F | 'latest'; + limit: number; + direction: 'newer' | 'older'; +} + +export interface Paginated { + list: T[]; + pagination: { + from: F; + limit: number; + direction: 'newer' | 'older'; + count: number; + }; +} + +export interface ApiBlockInfo { + blockNumber: number; + newStateRoot: string; + blockSize: number; + commitTxHash?: string; + verifyTxHash?: string; + committedAt: string; + finalizedAt?: string; + status: 'committed' | 'finalized'; +} + +export type BlockPosition = number | 'lastCommitted' | 'lastFinalized'; + +export interface ApiAccountInfo { + accountId: number; + address: Address; + nonce: number; + pubKeyHash: PubKeyHash; + lastUpdateInBlock: number; + balances: { + [token: string]: BigNumber; + }; + accountType?: EthAccountType; +} + +export interface ApiAccountFullInfo { + committed: ApiAccountInfo; + finalized: ApiAccountInfo; +} + +export interface ApiConfig { + network: Network; + contract: Address; + govContract: Address; + depositConfirmations: number; + zksyncVersion: 'contractV4'; + // TODO: server_version (ZKS-627) +} + +export interface FeeRest { + gasFee: BigNumber; + zkpFee: BigNumber; + totalFee: BigNumber; +} + +export interface NetworkStatus { + lastCommitted: number; + finalized: number; + totalTransactions: number; + mempoolSize: number; +} + +export interface TokenInfo { + id: number; + address: Address; + symbol: string; + decimals: number; + enabledForFees: boolean; +} + +export interface TokenPriceInfo { + tokenId: number; + tokenSymbol: string; + priceIn: string; + decimals: number; + price: BigNumber; +} + +export interface SubmitBatchResponse { + transactionHashes: string[]; + batchHash: string; +} + +export interface ApiL1TxReceipt { + status: 'queued' | 'committed' | 'finalized'; + ethBlock: number; + rollupBlock?: number; + id: number; +} + +export type L2TxStatus = 'queued' | 'committed' | 'finalized' | 'rejected'; + +export interface ApiL2TxReceipt { + txHash: string; + rollupBlock?: number; + status: L2TxStatus; + failReason?: string; +} + +export type ApiTxReceipt = ApiL1TxReceipt | ApiL2TxReceipt; + +export interface WithdrawAndEthHash { + type: 'Withdraw'; + accountId: number; + from: Address; + to: Address; + token: number; + amount: BigNumberish; + fee: BigNumberish; + nonce: number; + signature?: Signature; + validFrom: number; + validUntil: number; + ethTxHash?: string; +} + +export interface ForcedExitAndEthHash { + type: 'ForcedExit'; + initiatorAccountId: number; + target: Address; + token: number; + fee: BigNumberish; + nonce: number; + signature?: Signature; + validFrom: number; + validUntil: number; + ethTxHash?: string; +} + +export interface ApiDeposit { + type: 'Deposit'; + from: Address; + tokenId: number; + amount: BigNumber; + to: Address; + accountId?: number; + ethHash: string; + id: number; + txHash: string; +} + +export interface ApiFullExit { + type: 'FullExit'; + accountId: number; + tokenId: number; + ethHash: string; + id: number; + txHash: string; +} + +export type L2Tx = Transfer | Withdraw | ChangePubKey | ForcedExit | CloseAccount; + +export type L2TxData = Transfer | WithdrawAndEthHash | ChangePubKey | ForcedExitAndEthHash | CloseAccount; + +export type TransactionData = L2TxData | ApiDeposit | ApiFullExit; + +export interface ApiTransaction { + txHash: string; + blockNumber?: number; + op: TransactionData; + status: L2TxStatus; + failReason?: string; + createdAt?: string; +} + +export interface ApiSignedTx { + tx: ApiTransaction; + ethSignature?: string; +} + +export interface ApiBatchStatus { + updatedAt: string; + lastState: L2TxStatus; +} + +export interface ApiBatchData { + batchHash: string; + transactionHashes: string[]; + createdAt: string; + batchStatus: ApiBatchStatus; +} diff --git a/sdk/zksync.js/src/utils.ts b/sdk/zksync.js/src/utils.ts index 2751f58fd1..7327fe392e 100644 --- a/sdk/zksync.js/src/utils.ts +++ b/sdk/zksync.js/src/utils.ts @@ -1,5 +1,5 @@ import { utils, constants, ethers, BigNumber, BigNumberish, Contract } from 'ethers'; -import { Provider } from '.'; +import { SyncProvider } from './provider-interface'; import { PubKeyHash, TokenAddress, @@ -880,7 +880,7 @@ export function getCREATE2AddressAndSalt( export async function getEthereumBalance( ethProvider: ethers.providers.Provider, - syncProvider: Provider, + syncProvider: SyncProvider, address: Address, token: TokenLike ): Promise { @@ -901,7 +901,7 @@ export async function getEthereumBalance( export async function getPendingBalance( ethProvider: ethers.providers.Provider, - syncProvider: Provider, + syncProvider: SyncProvider, address: Address, token: TokenLike ): Promise { diff --git a/sdk/zksync.js/src/wallet.ts b/sdk/zksync.js/src/wallet.ts index 7f3b48882e..a9239cb9f2 100644 --- a/sdk/zksync.js/src/wallet.ts +++ b/sdk/zksync.js/src/wallet.ts @@ -1,7 +1,7 @@ import { BigNumber, BigNumberish, Contract, ContractTransaction, ethers } from 'ethers'; import { ErrorCode } from '@ethersproject/logger'; import { EthMessageSigner } from './eth-message-signer'; -import { Provider } from './provider'; +import { SyncProvider } from './provider-interface'; import { Create2WalletSigner, Signer } from './signer'; import { BatchBuilder } from './batch-builder'; import { @@ -60,7 +60,7 @@ export class ZKSyncTxError extends Error { } export class Wallet { - public provider: Provider; + public provider: SyncProvider; private constructor( public ethSigner: ethers.Signer, @@ -71,14 +71,14 @@ export class Wallet { public ethSignerType?: EthSignerType ) {} - connect(provider: Provider) { + connect(provider: SyncProvider) { this.provider = provider; return this; } static async fromEthSigner( ethWallet: ethers.Signer, - provider: Provider, + provider: SyncProvider, signer?: Signer, accountId?: number, ethSignerType?: EthSignerType @@ -107,7 +107,7 @@ export class Wallet { static async fromCreate2Data( syncSigner: Signer, - provider: Provider, + provider: SyncProvider, create2Data: Create2Data, accountId?: number ): Promise { @@ -120,7 +120,7 @@ export class Wallet { static async fromEthSignerNoKeys( ethWallet: ethers.Signer, - provider: Provider, + provider: SyncProvider, accountId?: number, ethSignerType?: EthSignerType ): Promise { @@ -225,7 +225,9 @@ export class Wallet { }; } - async signRegisterFactory(factoryAddress: Address): Promise<{ + async signRegisterFactory( + factoryAddress: Address + ): Promise<{ signature: TxEthSignature; accountId: number; accountAddress: Address; @@ -311,8 +313,7 @@ export class Wallet { }): Promise { forcedExit.nonce = forcedExit.nonce != null ? await this.getNonce(forcedExit.nonce) : await this.getNonce(); if (forcedExit.fee == null) { - // Fee for forced exit is defined by `Withdraw` transaction type (as it's essentially just a forced withdraw). - const fullFee = await this.provider.getTransactionFee('Withdraw', forcedExit.target, forcedExit.token); + const fullFee = await this.provider.getTransactionFee('ForcedExit', forcedExit.target, forcedExit.token); forcedExit.fee = fullFee.totalFee; } @@ -1451,7 +1452,7 @@ export class ETHOperation { error?: ZKSyncTxError; priorityOpId?: BigNumber; - constructor(public ethTx: ContractTransaction, public zkSyncProvider: Provider) { + constructor(public ethTx: ContractTransaction, public zkSyncProvider: SyncProvider) { this.state = 'Sent'; } @@ -1480,7 +1481,14 @@ export class ETHOperation { await this.awaitEthereumTxCommit(); if (this.state !== 'Mined') return; - const receipt = await this.zkSyncProvider.notifyPriorityOp(this.priorityOpId.toNumber(), 'COMMIT'); + + let query: number | string; + if (this.zkSyncProvider.providerType === 'RPC') { + query = this.priorityOpId.toNumber(); + } else { + query = this.ethTx.hash; + } + const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'COMMIT'); if (!receipt.executed) { this.setErrorState(new ZKSyncTxError('Priority operation failed', receipt)); @@ -1495,7 +1503,13 @@ export class ETHOperation { await this.awaitReceipt(); if (this.state !== 'Committed') return; - const receipt = await this.zkSyncProvider.notifyPriorityOp(this.priorityOpId.toNumber(), 'VERIFY'); + let query: number | string; + if (this.zkSyncProvider.providerType === 'RPC') { + query = this.priorityOpId.toNumber(); + } else { + query = this.ethTx.hash; + } + const receipt = await this.zkSyncProvider.notifyPriorityOp(query, 'VERIFY'); this.state = 'Verified'; @@ -1516,7 +1530,7 @@ export class Transaction { state: 'Sent' | 'Committed' | 'Verified' | 'Failed'; error?: ZKSyncTxError; - constructor(public txData, public txHash: string, public sidechainProvider: Provider) { + constructor(public txData, public txHash: string, public sidechainProvider: SyncProvider) { this.state = 'Sent'; } @@ -1556,7 +1570,7 @@ export class Transaction { export async function submitSignedTransaction( signedTx: SignedTransaction, - provider: Provider, + provider: SyncProvider, fastProcessing?: boolean ): Promise { const transactionHash = await provider.submitTx(signedTx.tx, signedTx.ethereumSignature, fastProcessing); @@ -1564,7 +1578,7 @@ export async function submitSignedTransaction( } export async function submitSignedTransactionsBatch( - provider: Provider, + provider: SyncProvider, signedTxs: SignedTransaction[], ethSignatures?: TxEthSignature[] ): Promise { diff --git a/yarn.lock b/yarn.lock index 534397d387..aff6599e72 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,38 +16,38 @@ dependencies: "@babel/highlight" "^7.12.13" -"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.13.15", "@babel/compat-data@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.0.tgz#a901128bce2ad02565df95e6ecbf195cf9465919" - integrity sha512-vu9V3uMM/1o5Hl5OekMUowo3FqXLJSw+s+66nt0fSWVWTtmosdzn45JHOB3cPtZoe6CTBDzvSw0RdOY85Q37+Q== +"@babel/compat-data@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.12.13.tgz#27e19e0ed3726ccf54067ced4109501765e7e2e8" + integrity sha512-U/hshG5R+SIoW7HVWIdmy1cB7s3ki+r3FpyEZiCgpi4tFgPnX/vynY80ZGSASOIrUM6O7VxOgCZgdt7h97bUGg== "@babel/core@^7.11.0": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.14.3.tgz#5395e30405f0776067fbd9cf0884f15bfb770a38" - integrity sha512-jB5AmTKOCSJIZ72sd78ECEhuPiDMKlQdDI/4QRI6lzYATx5SSogS1oQA2AoPecRCknm30gHi2l+QVvNUu3wZAg== + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.12.16.tgz#8c6ba456b23b680a6493ddcfcd9d3c3ad51cab7c" + integrity sha512-t/hHIB504wWceOeaOoONOhu+gX+hpjfeN6YRBT209X/4sibZQfSF1I0HFRRlBe97UZZosGx5XwUg1ZgNbelmNw== dependencies: "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.14.3" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-module-transforms" "^7.14.2" - "@babel/helpers" "^7.14.0" - "@babel/parser" "^7.14.3" + "@babel/generator" "^7.12.15" + "@babel/helper-module-transforms" "^7.12.13" + "@babel/helpers" "^7.12.13" + "@babel/parser" "^7.12.16" "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" + "@babel/traverse" "^7.12.13" + "@babel/types" "^7.12.13" convert-source-map "^1.7.0" debug "^4.1.0" - gensync "^1.0.0-beta.2" + gensync "^1.0.0-beta.1" json5 "^2.1.2" - semver "^6.3.0" + lodash "^4.17.19" + semver "^5.4.1" source-map "^0.5.0" -"@babel/generator@^7.14.2", "@babel/generator@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.3.tgz#0c2652d91f7bddab7cccc6ba8157e4f40dcedb91" - integrity sha512-bn0S6flG/j0xtQdz3hsjJ624h3W0r3llttBMfyHX3YrZ/KtLYr15bjA0FXkgW7FpvrDuTuElXeVjiKlYRpnOFA== +"@babel/generator@^7.12.13", "@babel/generator@^7.12.15": + version "7.12.15" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.12.15.tgz#4617b5d0b25cc572474cc1aafee1edeaf9b5368f" + integrity sha512-6F2xHxBiFXWNSGb7vyCUTBF8RCLY66rS0zEPcP8t/nQyXjha5EuK4z7H5o7fWG8B4M7y6mqVWq1J+1PuwRhecQ== dependencies: - "@babel/types" "^7.14.2" + "@babel/types" "^7.12.13" jsesc "^2.5.1" source-map "^0.5.0" @@ -66,65 +66,50 @@ "@babel/helper-explode-assignable-expression" "^7.12.13" "@babel/types" "^7.12.13" -"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.13.16", "@babel/helper-compilation-targets@^7.9.6": - version "7.13.16" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.13.16.tgz#6e91dccf15e3f43e5556dffe32d860109887563c" - integrity sha512-3gmkYIrpqsLlieFwjkGgLaSHmhnvlAYzZLlYVjlW+QwI+1zE17kGxuJGmIqDQdYp56XdmGeD+Bswx0UTyG18xA== +"@babel/helper-compilation-targets@^7.12.16", "@babel/helper-compilation-targets@^7.9.6": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.12.16.tgz#6905238b4a5e02ba2d032c1a49dd1820fe8ce61b" + integrity sha512-dBHNEEaZx7F3KoUYqagIhRIeqyyuI65xMndMZ3WwGwEBI609I4TleYQHcrS627vbKyNTXqShoN+fvYD9HuQxAg== dependencies: - "@babel/compat-data" "^7.13.15" - "@babel/helper-validator-option" "^7.12.17" + "@babel/compat-data" "^7.12.13" + "@babel/helper-validator-option" "^7.12.16" browserslist "^4.14.5" - semver "^6.3.0" + semver "^5.5.0" -"@babel/helper-create-class-features-plugin@^7.13.0", "@babel/helper-create-class-features-plugin@^7.14.0", "@babel/helper-create-class-features-plugin@^7.14.2", "@babel/helper-create-class-features-plugin@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.14.3.tgz#832111bcf4f57ca57a4c5b1a000fc125abc6554a" - integrity sha512-BnEfi5+6J2Lte9LeiL6TxLWdIlEv9Woacc1qXzXBgbikcOzMRM2Oya5XGg/f/ngotv1ej2A/b+3iJH8wbS1+lQ== +"@babel/helper-create-class-features-plugin@^7.12.13": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.12.16.tgz#955d5099fd093e5afb05542190f8022105082c61" + integrity sha512-KbSEj8l9zYkMVHpQqM3wJNxS1d9h3U9vm/uE5tpjMbaj3lTp+0noe3KPsV5dSD9jxKnf9jO9Ip9FX5PKNZCKow== dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-function-name" "^7.14.2" - "@babel/helper-member-expression-to-functions" "^7.13.12" + "@babel/helper-function-name" "^7.12.13" + "@babel/helper-member-expression-to-functions" "^7.12.16" "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/helper-replace-supers" "^7.14.3" + "@babel/helper-replace-supers" "^7.12.13" "@babel/helper-split-export-declaration" "^7.12.13" "@babel/helper-create-regexp-features-plugin@^7.12.13": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.14.3.tgz#149aa6d78c016e318c43e2409a0ae9c136a86688" - integrity sha512-JIB2+XJrb7v3zceV2XzDhGIB902CmKGSpSl4q2C6agU9SNLG/2V1RtFRGPG1Ajh9STj3+q6zJMOC+N/pp2P9DA== + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.16.tgz#3b31d13f39f930fad975e151163b7df7d4ffe9d3" + integrity sha512-jAcQ1biDYZBdaAxB4yg46/XirgX7jBDiMHDbwYQOgtViLBXGxJpZQ24jutmBqAIB/q+AwB6j+NbBXjKxEY8vqg== dependencies: "@babel/helper-annotate-as-pure" "^7.12.13" regexpu-core "^4.7.1" -"@babel/helper-define-polyfill-provider@^0.2.0": - version "0.2.0" - resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.2.0.tgz#a640051772045fedaaecc6f0c6c69f02bdd34bf1" - integrity sha512-JT8tHuFjKBo8NnaUbblz7mIu1nnvUDiHVjXXkulZULyidvo/7P6TY7+YqpV37IfF+KUFxmlK04elKtGKXaiVgw== - dependencies: - "@babel/helper-compilation-targets" "^7.13.0" - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/traverse" "^7.13.0" - debug "^4.1.1" - lodash.debounce "^4.0.8" - resolve "^1.14.2" - semver "^6.1.2" - "@babel/helper-explode-assignable-expression@^7.12.13": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.13.0.tgz#17b5c59ff473d9f956f40ef570cf3a76ca12657f" - integrity sha512-qS0peLTDP8kOisG1blKbaoBg/o9OSa1qoumMjTK5pM+KDTtpxpsiubnCGP34vK8BXGcb2M9eigwgvoJryrzwWA== + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.12.13.tgz#0e46990da9e271502f77507efa4c9918d3d8634a" + integrity sha512-5loeRNvMo9mx1dA/d6yNi+YiKziJZFylZnCo1nmFF4qPU4yJ14abhWESuSMQSlQxWdxdOFzxXjk/PpfudTtYyw== dependencies: - "@babel/types" "^7.13.0" + "@babel/types" "^7.12.13" -"@babel/helper-function-name@^7.12.13", "@babel/helper-function-name@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.2.tgz#397688b590760b6ef7725b5f0860c82427ebaac2" - integrity sha512-NYZlkZRydxw+YT56IlhIcS8PAhb+FEUiOzuhFTfqDyPmzAhRge6ua0dQYT/Uh0t/EDHq05/i+e5M2d4XvjgarQ== +"@babel/helper-function-name@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz#93ad656db3c3c2232559fd7b2c3dbdcbe0eb377a" + integrity sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA== dependencies: "@babel/helper-get-function-arity" "^7.12.13" "@babel/template" "^7.12.13" - "@babel/types" "^7.14.2" + "@babel/types" "^7.12.13" "@babel/helper-get-function-arity@^7.12.13": version "7.12.13" @@ -133,41 +118,41 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-hoist-variables@^7.13.0": - version "7.13.16" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.13.16.tgz#1b1651249e94b51f8f0d33439843e33e39775b30" - integrity sha512-1eMtTrXtrwscjcAeO4BVK+vvkxaLJSPFz1w1KLawz6HLNi9bPFGBNwwDyVfiu1Tv/vRRFYfoGaKhmAQPGPn5Wg== +"@babel/helper-hoist-variables@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.12.13.tgz#13aba58b7480b502362316ea02f52cca0e9796cd" + integrity sha512-KSC5XSj5HreRhYQtZ3cnSnQwDzgnbdUDEFsxkN0m6Q3WrCRt72xrnZ8+h+pX7YxM7hr87zIO3a/v5p/H3TrnVw== dependencies: - "@babel/traverse" "^7.13.15" - "@babel/types" "^7.13.16" + "@babel/types" "^7.12.13" -"@babel/helper-member-expression-to-functions@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.13.12.tgz#dfe368f26d426a07299d8d6513821768216e6d72" - integrity sha512-48ql1CLL59aKbU94Y88Xgb2VFy7a95ykGRbJJaaVv+LX5U8wFpLfiGXJJGUozsmA1oEh/o5Bp60Voq7ACyA/Sw== +"@babel/helper-member-expression-to-functions@^7.12.13", "@babel/helper-member-expression-to-functions@^7.12.16": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.16.tgz#41e0916b99f8d5f43da4f05d85f4930fa3d62b22" + integrity sha512-zYoZC1uvebBFmj1wFAlXwt35JLEgecefATtKp20xalwEK8vHAixLBXTGxNrVGEmTT+gzOThUgr8UEdgtalc1BQ== dependencies: - "@babel/types" "^7.13.12" + "@babel/types" "^7.12.13" -"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.13.12", "@babel/helper-module-imports@^7.8.3": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.13.12.tgz#c6a369a6f3621cb25da014078684da9196b61977" - integrity sha512-4cVvR2/1B693IuOvSI20xqqa/+bl7lqAMR59R4iu39R9aOX8/JoYY1sFaNvUMyMBGnHdwvJgUrzNLoUZxXypxA== +"@babel/helper-module-imports@^7.0.0", "@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz#ec67e4404f41750463e455cc3203f6a32e93fcb0" + integrity sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g== dependencies: - "@babel/types" "^7.13.12" + "@babel/types" "^7.12.13" -"@babel/helper-module-transforms@^7.13.0", "@babel/helper-module-transforms@^7.14.0", "@babel/helper-module-transforms@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.2.tgz#ac1cc30ee47b945e3e0c4db12fa0c5389509dfe5" - integrity sha512-OznJUda/soKXv0XhpvzGWDnml4Qnwp16GN+D/kZIdLsWoHj05kyu8Rm5kXmMef+rVJZ0+4pSGLkeixdqNUATDA== +"@babel/helper-module-transforms@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz#01afb052dcad2044289b7b20beb3fa8bd0265bea" + integrity sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA== dependencies: - "@babel/helper-module-imports" "^7.13.12" - "@babel/helper-replace-supers" "^7.13.12" - "@babel/helper-simple-access" "^7.13.12" + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-replace-supers" "^7.12.13" + "@babel/helper-simple-access" "^7.12.13" "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/helper-validator-identifier" "^7.14.0" + "@babel/helper-validator-identifier" "^7.12.11" "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" + "@babel/traverse" "^7.12.13" + "@babel/types" "^7.12.13" + lodash "^4.17.19" "@babel/helper-optimise-call-expression@^7.12.13": version "7.12.13" @@ -176,36 +161,36 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.13.0.tgz#806526ce125aed03373bc416a828321e3a6a33af" - integrity sha512-ZPafIPSwzUlAoWT8DKs1W2VyF2gOWthGd5NGFMsBcMMol+ZhK+EQY/e6V96poa6PA/Bh+C9plWN0hXO1uB8AfQ== +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz#174254d0f2424d8aefb4dd48057511247b0a9eeb" + integrity sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA== -"@babel/helper-remap-async-to-generator@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.13.0.tgz#376a760d9f7b4b2077a9dd05aa9c3927cadb2209" - integrity sha512-pUQpFBE9JvC9lrQbpX0TmeNIy5s7GnZjna2lhhcHC7DzgBs6fWn722Y5cfwgrtrqc7NAJwMvOa0mKhq6XaE4jg== +"@babel/helper-remap-async-to-generator@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.12.13.tgz#170365f4140e2d20e5c88f8ba23c24468c296878" + integrity sha512-Qa6PU9vNcj1NZacZZI1Mvwt+gXDH6CTfgAkSjeRMLE8HxtDK76+YDId6NQR+z7Rgd5arhD2cIbS74r0SxD6PDA== dependencies: "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-wrap-function" "^7.13.0" - "@babel/types" "^7.13.0" + "@babel/helper-wrap-function" "^7.12.13" + "@babel/types" "^7.12.13" -"@babel/helper-replace-supers@^7.12.13", "@babel/helper-replace-supers@^7.13.12", "@babel/helper-replace-supers@^7.14.3": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.14.3.tgz#ca17b318b859d107f0e9b722d58cf12d94436600" - integrity sha512-Rlh8qEWZSTfdz+tgNV/N4gz1a0TMNwCUcENhMjHTHKp3LseYH5Jha0NSlyTQWMnjbYcwFt+bqAMqSLHVXkQ6UA== +"@babel/helper-replace-supers@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz#00ec4fb6862546bd3d0aff9aac56074277173121" + integrity sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg== dependencies: - "@babel/helper-member-expression-to-functions" "^7.13.12" + "@babel/helper-member-expression-to-functions" "^7.12.13" "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/traverse" "^7.14.2" - "@babel/types" "^7.14.2" + "@babel/traverse" "^7.12.13" + "@babel/types" "^7.12.13" -"@babel/helper-simple-access@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.13.12.tgz#dd6c538afb61819d205a012c31792a39c7a5eaf6" - integrity sha512-7FEjbrx5SL9cWvXioDbnlYTppcZGuCY6ow3/D5vMggb2Ywgu4dMrpTJX0JdQAIcRRUElOIxF3yEooa9gUb9ZbA== +"@babel/helper-simple-access@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz#8478bcc5cacf6aa1672b251c1d2dde5ccd61a6c4" + integrity sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA== dependencies: - "@babel/types" "^7.13.12" + "@babel/types" "^7.12.13" "@babel/helper-skip-transparent-expression-wrappers@^7.12.1": version "7.12.1" @@ -221,186 +206,156 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-validator-identifier@^7.12.11", "@babel/helper-validator-identifier@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.0.tgz#d26cad8a47c65286b15df1547319a5d0bcf27288" - integrity sha512-V3ts7zMSu5lfiwWDVWzRDGIN+lnCEUdaXgtVHJgLb1rGaA6jMrtB9EmE7L18foXJIE8Un/A/h6NJfGQp/e1J4A== +"@babel/helper-validator-identifier@^7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz#c9a1f021917dcb5ccf0d4e453e399022981fc9ed" + integrity sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw== -"@babel/helper-validator-option@^7.12.17": - version "7.12.17" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz#d1fbf012e1a79b7eebbfdc6d270baaf8d9eb9831" - integrity sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw== +"@babel/helper-validator-option@^7.12.16": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.16.tgz#f73cbd3bbba51915216c5dea908e9b206bb10051" + integrity sha512-uCgsDBPUQDvzr11ePPo4TVEocxj8RXjUVSC/Y8N1YpVAI/XDdUwGJu78xmlGhTxj2ntaWM7n9LQdRtyhOzT2YQ== -"@babel/helper-wrap-function@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.13.0.tgz#bdb5c66fda8526ec235ab894ad53a1235c79fcc4" - integrity sha512-1UX9F7K3BS42fI6qd2A4BjKzgGjToscyZTdp1DjknHLCIvpgne6918io+aL5LXFcER/8QWiwpoY902pVEqgTXA== +"@babel/helper-wrap-function@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.12.13.tgz#e3ea8cb3ee0a16911f9c1b50d9e99fe8fe30f9ff" + integrity sha512-t0aZFEmBJ1LojdtJnhOaQEVejnzYhyjWHSsNSNo8vOYRbAJNh6r6GQF7pd36SqG7OKGbn+AewVQ/0IfYfIuGdw== dependencies: "@babel/helper-function-name" "^7.12.13" "@babel/template" "^7.12.13" - "@babel/traverse" "^7.13.0" - "@babel/types" "^7.13.0" + "@babel/traverse" "^7.12.13" + "@babel/types" "^7.12.13" -"@babel/helpers@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.0.tgz#ea9b6be9478a13d6f961dbb5f36bf75e2f3b8f62" - integrity sha512-+ufuXprtQ1D1iZTO/K9+EBRn+qPWMJjZSw/S0KlFrxCw4tkrzv9grgpDHkY9MeQTjTY8i2sp7Jep8DfU6tN9Mg== +"@babel/helpers@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.12.13.tgz#3c75e993632e4dadc0274eae219c73eb7645ba47" + integrity sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ== dependencies: "@babel/template" "^7.12.13" - "@babel/traverse" "^7.14.0" - "@babel/types" "^7.14.0" + "@babel/traverse" "^7.12.13" + "@babel/types" "^7.12.13" "@babel/highlight@^7.10.4", "@babel/highlight@^7.12.13": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.0.tgz#3197e375711ef6bf834e67d0daec88e4f46113cf" - integrity sha512-YSCOwxvTYEIMSGaBQb5kDDsCopDdiUGsqpatp3fOlI4+2HQSkTmEVWnVuySdAC5EWCqSWWTv0ib63RjR7dTBdg== + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.12.13.tgz#8ab538393e00370b26271b01fa08f7f27f2e795c" + integrity sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww== dependencies: - "@babel/helper-validator-identifier" "^7.14.0" + "@babel/helper-validator-identifier" "^7.12.11" chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/parser@^7.12.13", "@babel/parser@^7.14.2", "@babel/parser@^7.14.3", "@babel/parser@^7.7.0": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.3.tgz#9b530eecb071fd0c93519df25c5ff9f14759f298" - integrity sha512-7MpZDIfI7sUC5zWo2+foJ50CSI5lcqDehZ0lVgIhSi4bFEk94fLAKlF3Q0nzSQQ+ca0lm+O6G9ztKVBeu8PMRQ== +"@babel/parser@^7.12.13", "@babel/parser@^7.12.16", "@babel/parser@^7.7.0": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.12.16.tgz#cc31257419d2c3189d394081635703f549fc1ed4" + integrity sha512-c/+u9cqV6F0+4Hpq01jnJO+GLp2DdT63ppz9Xa+6cHaajM9VFzK/iDXiKK65YtpeVwu+ctfS6iqlMqRgQRzeCw== -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.13.12": - version "7.13.12" - resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.13.12.tgz#a3484d84d0b549f3fc916b99ee4783f26fabad2a" - integrity sha512-d0u3zWKcoZf379fOeJdr1a5WPDny4aOFZ6hlfKivgK0LY7ZxNfoaHL2fWwdGtHyVvra38FC+HVYkO+byfSA8AQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" - "@babel/plugin-proposal-optional-chaining" "^7.13.12" - -"@babel/plugin-proposal-async-generator-functions@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.14.2.tgz#3a2085abbf5d5f962d480dbc81347385ed62eb1e" - integrity sha512-b1AM4F6fwck4N8ItZ/AtC4FP/cqZqmKRQ4FaTDutwSYyjuhtvsGEMLK4N/ztV/ImP40BjIDyMgBQAeAMsQYVFQ== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-remap-async-to-generator" "^7.13.0" - "@babel/plugin-syntax-async-generators" "^7.8.4" - -"@babel/plugin-proposal-class-properties@^7.13.0", "@babel/plugin-proposal-class-properties@^7.8.3": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.13.0.tgz#146376000b94efd001e57a40a88a525afaab9f37" - integrity sha512-KnTDjFNC1g+45ka0myZNvSBFLhNCLN+GeGYLDEA8Oq7MZ6yMgfLoIRh86GRT0FjtJhZw8JyUskP9uvj5pHM9Zg== +"@babel/plugin-proposal-async-generator-functions@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.12.13.tgz#d1c6d841802ffb88c64a2413e311f7345b9e66b5" + integrity sha512-1KH46Hx4WqP77f978+5Ye/VUbuwQld2hph70yaw2hXS2v7ER2f3nlpNMu909HO2rbvP0NKLlMVDPh9KXklVMhA== dependencies: - "@babel/helper-create-class-features-plugin" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-remap-async-to-generator" "^7.12.13" + "@babel/plugin-syntax-async-generators" "^7.8.0" -"@babel/plugin-proposal-class-static-block@^7.13.11": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.14.3.tgz#5a527e2cae4a4753119c3a3e7f64ecae8ccf1360" - integrity sha512-HEjzp5q+lWSjAgJtSluFDrGGosmwTgKwCXdDQZvhKsRlwv3YdkUEqxNrrjesJd+B9E9zvr1PVPVBvhYZ9msjvQ== +"@babel/plugin-proposal-class-properties@^7.12.13", "@babel/plugin-proposal-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.12.13.tgz#3d2ce350367058033c93c098e348161d6dc0d8c8" + integrity sha512-8SCJ0Ddrpwv4T7Gwb33EmW1V9PY5lggTO+A8WjyIwxrSHDUyBw4MtF96ifn1n8H806YlxbVCoKXbbmzD6RD+cA== dependencies: - "@babel/helper-create-class-features-plugin" "^7.14.3" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-class-static-block" "^7.12.13" + "@babel/helper-create-class-features-plugin" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-proposal-decorators@^7.8.3": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.14.2.tgz#e68c3c5e4a6a08834456568256fc3e71b93590cf" - integrity sha512-LauAqDd/VjQDtae58QgBcEOE42NNP+jB2OE+XeC3KBI/E+BhhRjtr5viCIrj1hmu1YvrguLipIPRJZmS5yUcFw== + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.12.13.tgz#d4c89b40c2b7a526b0d394de4f4def36191e413e" + integrity sha512-x2aOr5w4ARJoYHFKoG2iEUL/Xe99JAJXjAasHijXp3/KgaetJXGE62SmHgsW3Tia/XUT5AxF2YC0F+JyhPY/0Q== dependencies: - "@babel/helper-create-class-features-plugin" "^7.14.2" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-create-class-features-plugin" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-decorators" "^7.12.13" -"@babel/plugin-proposal-dynamic-import@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.14.2.tgz#01ebabd7c381cff231fa43e302939a9de5be9d9f" - integrity sha512-oxVQZIWFh91vuNEMKltqNsKLFWkOIyJc95k2Gv9lWVyDfPUQGSSlbDEgWuJUU1afGE9WwlzpucMZ3yDRHIItkA== +"@babel/plugin-proposal-dynamic-import@^7.12.16": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.12.16.tgz#b9f33b252e3406d492a15a799c9d45a9a9613473" + integrity sha512-yiDkYFapVxNOCcBfLnsb/qdsliroM+vc3LHiZwS4gh7pFjo5Xq3BDhYBNn3H3ao+hWPvqeeTdU+s+FIvokov+w== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-syntax-dynamic-import" "^7.8.0" -"@babel/plugin-proposal-export-namespace-from@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.14.2.tgz#62542f94aa9ce8f6dba79eec698af22112253791" - integrity sha512-sRxW3z3Zp3pFfLAgVEvzTFutTXax837oOatUIvSG9o5gRj9mKwm3br1Se5f4QalTQs9x4AzlA/HrCWbQIHASUQ== +"@babel/plugin-proposal-export-namespace-from@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.12.13.tgz#393be47a4acd03fa2af6e3cde9b06e33de1b446d" + integrity sha512-INAgtFo4OnLN3Y/j0VwAgw3HDXcDtX+C/erMvWzuV9v71r7urb6iyMXu7eM9IgLr1ElLlOkaHjJ0SbCmdOQ3Iw== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" -"@babel/plugin-proposal-json-strings@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.14.2.tgz#830b4e2426a782e8b2878fbfe2cba85b70cbf98c" - integrity sha512-w2DtsfXBBJddJacXMBhElGEYqCZQqN99Se1qeYn8DVLB33owlrlLftIbMzn5nz1OITfDVknXF433tBrLEAOEjA== +"@babel/plugin-proposal-json-strings@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.12.13.tgz#ced7888a2db92a3d520a2e35eb421fdb7fcc9b5d" + integrity sha512-v9eEi4GiORDg8x+Dmi5r8ibOe0VXoKDeNPYcTTxdGN4eOWikrJfDJCJrr1l5gKGvsNyGJbrfMftC2dTL6oz7pg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-syntax-json-strings" "^7.8.0" -"@babel/plugin-proposal-logical-assignment-operators@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.14.2.tgz#222348c080a1678e0e74ea63fe76f275882d1fd7" - integrity sha512-1JAZtUrqYyGsS7IDmFeaem+/LJqujfLZ2weLR9ugB0ufUPjzf8cguyVT1g5im7f7RXxuLq1xUxEzvm68uYRtGg== +"@babel/plugin-proposal-logical-assignment-operators@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.12.13.tgz#575b5d9a08d8299eeb4db6430da6e16e5cf14350" + integrity sha512-fqmiD3Lz7jVdK6kabeSr1PZlWSUVqSitmHEe3Z00dtGTKieWnX9beafvavc32kjORa5Bai4QNHgFDwWJP+WtSQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" -"@babel/plugin-proposal-nullish-coalescing-operator@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.14.2.tgz#425b11dc62fc26939a2ab42cbba680bdf5734546" - integrity sha512-ebR0zU9OvI2N4qiAC38KIAK75KItpIPTpAtd2r4OZmMFeKbKJpUFLYP2EuDut82+BmYi8sz42B+TfTptJ9iG5Q== +"@babel/plugin-proposal-nullish-coalescing-operator@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.12.13.tgz#24867307285cee4e1031170efd8a7ac807deefde" + integrity sha512-Qoxpy+OxhDBI5kRqliJFAl4uWXk3Bn24WeFstPH0iLymFehSAUR8MHpqU7njyXv/qbo7oN6yTy5bfCmXdKpo1Q== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" -"@babel/plugin-proposal-numeric-separator@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.14.2.tgz#82b4cc06571143faf50626104b335dd71baa4f9e" - integrity sha512-DcTQY9syxu9BpU3Uo94fjCB3LN9/hgPS8oUL7KrSW3bA2ePrKZZPJcc5y0hoJAM9dft3pGfErtEUvxXQcfLxUg== +"@babel/plugin-proposal-numeric-separator@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.13.tgz#bd9da3188e787b5120b4f9d465a8261ce67ed1db" + integrity sha512-O1jFia9R8BUCl3ZGB7eitaAPu62TXJRHn7rh+ojNERCFyqRwJMTmhz+tJ+k0CwI6CLjX/ee4qW74FSqlq9I35w== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-syntax-numeric-separator" "^7.10.4" -"@babel/plugin-proposal-object-rest-spread@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.14.2.tgz#e17d418f81cc103fedd4ce037e181c8056225abc" - integrity sha512-hBIQFxwZi8GIp934+nj5uV31mqclC1aYDhctDu5khTi9PCCUOczyy0b34W0oE9U/eJXiqQaKyVsmjeagOaSlbw== +"@babel/plugin-proposal-object-rest-spread@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.13.tgz#f93f3116381ff94bc676fdcb29d71045cd1ec011" + integrity sha512-WvA1okB/0OS/N3Ldb3sziSrXg6sRphsBgqiccfcQq7woEn5wQLNX82Oc4PlaFcdwcWHuQXAtb8ftbS8Fbsg/sg== dependencies: - "@babel/compat-data" "^7.14.0" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.14.2" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-transform-parameters" "^7.12.13" -"@babel/plugin-proposal-optional-catch-binding@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.14.2.tgz#150d4e58e525b16a9a1431bd5326c4eed870d717" - integrity sha512-XtkJsmJtBaUbOxZsNk0Fvrv8eiqgneug0A6aqLFZ4TSkar2L5dSXWcnUKHgmjJt49pyB/6ZHvkr3dPgl9MOWRQ== +"@babel/plugin-proposal-optional-catch-binding@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.12.13.tgz#4640520afe57728af14b4d1574ba844f263bcae5" + integrity sha512-9+MIm6msl9sHWg58NvqpNpLtuFbmpFYk37x8kgnGzAHvX35E1FyAwSUt5hIkSoWJFSAH+iwU8bJ4fcD1zKXOzg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" -"@babel/plugin-proposal-optional-chaining@^7.13.12", "@babel/plugin-proposal-optional-chaining@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.14.2.tgz#df8171a8b9c43ebf4c1dabe6311b432d83e1b34e" - integrity sha512-qQByMRPwMZJainfig10BoaDldx/+VDtNcrA7qdNaEOAj6VXud+gfrkA8j4CRAU5HjnWREXqIpSpH30qZX1xivA== +"@babel/plugin-proposal-optional-chaining@^7.12.16": + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.16.tgz#600c7531f754186b0f2096e495a92da7d88aa139" + integrity sha512-O3ohPwOhkwji5Mckb7F/PJpJVJY3DpPsrt/F0Bk40+QMk9QpAIqeGusHWqu/mYqsM8oBa6TziL/2mbERWsUZjg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.0" -"@babel/plugin-proposal-private-methods@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.13.0.tgz#04bd4c6d40f6e6bbfa2f57e2d8094bad900ef787" - integrity sha512-MXyyKQd9inhx1kDYPkFRVOBXQ20ES8Pto3T7UZ92xj2mY0EVD8oAVzeyYuVfy/mxAdTSIayOvg+aVzcHV2bn6Q== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" - -"@babel/plugin-proposal-private-property-in-object@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.14.0.tgz#b1a1f2030586b9d3489cc26179d2eb5883277636" - integrity sha512-59ANdmEwwRUkLjB7CRtwJxxwtjESw+X2IePItA+RGQh+oy5RmpCh/EvVVvh5XQc3yxsm5gtv0+i9oBZhaDNVTg== +"@babel/plugin-proposal-private-methods@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.12.13.tgz#ea78a12554d784ecf7fc55950b752d469d9c4a71" + integrity sha512-sV0V57uUwpauixvR7s2o75LmwJI6JECwm5oPUY5beZB1nBl2i37hc7CJGqB5G+58fur5Y6ugvl3LRONk5x34rg== dependencies: - "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-create-class-features-plugin" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-private-property-in-object" "^7.14.0" + "@babel/helper-create-class-features-plugin" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-proposal-unicode-property-regex@^7.12.13", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": version "7.12.13" @@ -410,7 +365,7 @@ "@babel/helper-create-regexp-features-plugin" "^7.12.13" "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-async-generators@^7.8.4": +"@babel/plugin-syntax-async-generators@^7.8.0": version "7.8.4" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== @@ -424,13 +379,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-class-static-block@^7.12.13": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.12.13.tgz#8e3d674b0613e67975ceac2776c97b60cafc5c9c" - integrity sha512-ZmKQ0ZXR0nYpHZIIuj9zE7oIqCx2hw9TKi+lIo73NNrMPAZGHfS92/VRV0ZmPj6H2ffBgyFHXvJ5NYsNeEaP2A== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - "@babel/plugin-syntax-decorators@^7.12.13": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.12.13.tgz#fac829bf3c7ef4a1bc916257b403e58c6bdaf648" @@ -438,7 +386,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-syntax-dynamic-import@^7.8.3": +"@babel/plugin-syntax-dynamic-import@^7.8.0", "@babel/plugin-syntax-dynamic-import@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== @@ -452,7 +400,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.3" -"@babel/plugin-syntax-json-strings@^7.8.3": +"@babel/plugin-syntax-json-strings@^7.8.0": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== @@ -473,7 +421,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== @@ -487,34 +435,27 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-object-rest-spread@^7.8.3": +"@babel/plugin-syntax-object-rest-spread@^7.8.0": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": +"@babel/plugin-syntax-optional-catch-binding@^7.8.0": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-optional-chaining@^7.8.3": +"@babel/plugin-syntax-optional-chaining@^7.8.0": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-private-property-in-object@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.0.tgz#762a4babec61176fec6c88480dec40372b140c0b" - integrity sha512-bda3xF8wGl5/5btF794utNOL0Jw+9jE5C1sLZcoK7c4uonE/y3iQiyG+KbkF3WBV/paX58VCpjhxLPkdj5Fe4w== - dependencies: - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/plugin-syntax-top-level-await@^7.12.13": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.13.tgz#c5f0fa6e249f5b739727f923540cf7a806130178" @@ -522,21 +463,21 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-arrow-functions@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.13.0.tgz#10a59bebad52d637a027afa692e8d5ceff5e3dae" - integrity sha512-96lgJagobeVmazXFaDrbmCLQxBysKu7U6Do3mLsx27gf5Dk85ezysrs2BZUpXD703U/Su1xTBDxxar2oa4jAGg== +"@babel/plugin-transform-arrow-functions@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.12.13.tgz#eda5670b282952100c229f8a3bd49e0f6a72e9fe" + integrity sha512-tBtuN6qtCTd+iHzVZVOMNp+L04iIJBpqkdY42tWbmjIT5wvR2kx7gxMBsyhQtFzHwBbyGi9h8J8r9HgnOpQHxg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-async-to-generator@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.13.0.tgz#8e112bf6771b82bf1e974e5e26806c5c99aa516f" - integrity sha512-3j6E004Dx0K3eGmhxVJxwwI89CTJrce7lg3UrtFuDAVQ/2+SJ/h/aSFOeE6/n0WB1GsOffsJp6MnPQNQ8nmwhg== +"@babel/plugin-transform-async-to-generator@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.12.13.tgz#fed8c69eebf187a535bfa4ee97a614009b24f7ae" + integrity sha512-psM9QHcHaDr+HZpRuJcE1PXESuGWSCcbiGFFhhwfzdbTxaGDVzuVtdNYliAwcRo3GFg0Bc8MmI+AvIGYIJG04A== dependencies: "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-remap-async-to-generator" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-remap-async-to-generator" "^7.12.13" "@babel/plugin-transform-block-scoped-functions@^7.12.13": version "7.12.13" @@ -545,39 +486,39 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-block-scoping@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.14.2.tgz#761cb12ab5a88d640ad4af4aa81f820e6b5fdf5c" - integrity sha512-neZZcP19NugZZqNwMTH+KoBjx5WyvESPSIOQb4JHpfd+zPfqcH65RMu5xJju5+6q/Y2VzYrleQTr+b6METyyxg== +"@babel/plugin-transform-block-scoping@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.12.13.tgz#f36e55076d06f41dfd78557ea039c1b581642e61" + integrity sha512-Pxwe0iqWJX4fOOM2kEZeUuAxHMWb9nK+9oh5d11bsLoB0xMg+mkDpt0eYuDZB7ETrY9bbcVlKUGTOGWy7BHsMQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-classes@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.14.2.tgz#3f1196c5709f064c252ad056207d87b7aeb2d03d" - integrity sha512-7oafAVcucHquA/VZCsXv/gmuiHeYd64UJyyTYU+MPfNu0KeNlxw06IeENBO8bJjXVbolu+j1MM5aKQtH1OMCNg== +"@babel/plugin-transform-classes@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.12.13.tgz#9728edc1838b5d62fc93ad830bd523b1fcb0e1f6" + integrity sha512-cqZlMlhCC1rVnxE5ZGMtIb896ijL90xppMiuWXcwcOAuFczynpd3KYemb91XFFPi3wJSe/OcrX9lXoowatkkxA== dependencies: "@babel/helper-annotate-as-pure" "^7.12.13" - "@babel/helper-function-name" "^7.14.2" + "@babel/helper-function-name" "^7.12.13" "@babel/helper-optimise-call-expression" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-replace-supers" "^7.13.12" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-replace-supers" "^7.12.13" "@babel/helper-split-export-declaration" "^7.12.13" globals "^11.1.0" -"@babel/plugin-transform-computed-properties@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.13.0.tgz#845c6e8b9bb55376b1fa0b92ef0bdc8ea06644ed" - integrity sha512-RRqTYTeZkZAz8WbieLTvKUEUxZlUTdmL5KGMyZj7FnMfLNKV4+r5549aORG/mgojRmFlQMJDUupwAMiF2Q7OUg== +"@babel/plugin-transform-computed-properties@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.12.13.tgz#6a210647a3d67f21f699cfd2a01333803b27339d" + integrity sha512-dDfuROUPGK1mTtLKyDPUavmj2b6kFu82SmgpztBFEO974KMjJT+Ytj3/oWsTUMBmgPcp9J5Pc1SlcAYRpJ2hRA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-destructuring@^7.13.17": - version "7.13.17" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.13.17.tgz#678d96576638c19d5b36b332504d3fd6e06dea27" - integrity sha512-UAUqiLv+uRLO+xuBKKMEpC+t7YRNVRqBsWWq1yKXbBZBje/t3IXCiSinZhjn/DC3qzBfICeYd2EFGEbHsh5RLA== +"@babel/plugin-transform-destructuring@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.12.13.tgz#fc56c5176940c5b41735c677124d1d20cecc9aeb" + integrity sha512-Dn83KykIFzjhA3FDPA1z4N+yfF3btDGhjnJwxIj0T43tP0flCujnU8fKgEkf0C1biIpSv9NZegPBQ1J6jYkwvQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-dotall-regex@^7.12.13", "@babel/plugin-transform-dotall-regex@^7.4.4": version "7.12.13" @@ -602,12 +543,12 @@ "@babel/helper-builder-binary-assignment-operator-visitor" "^7.12.13" "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-for-of@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.13.0.tgz#c799f881a8091ac26b54867a845c3e97d2696062" - integrity sha512-IHKT00mwUVYE0zzbkDgNRP6SRzvfGCYsOxIRz8KsiaaHCcT9BWIkO+H9QRJseHBLOGBZkHUdHiqj6r0POsdytg== +"@babel/plugin-transform-for-of@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.12.13.tgz#561ff6d74d9e1c8879cb12dbaf4a14cd29d15cf6" + integrity sha512-xCbdgSzXYmHGyVX3+BsQjcd4hv4vA/FDy7Kc8eOpzKmBBPEOTurt0w5fCRQaGl+GSBORKgJdstQ1rHl4jbNseQ== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-function-name@^7.12.13": version "7.12.13" @@ -631,43 +572,43 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-modules-amd@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.14.2.tgz#6622806fe1a7c07a1388444222ef9535f2ca17b0" - integrity sha512-hPC6XBswt8P3G2D1tSV2HzdKvkqOpmbyoy+g73JG0qlF/qx2y3KaMmXb1fLrpmWGLZYA0ojCvaHdzFWjlmV+Pw== +"@babel/plugin-transform-modules-amd@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.12.13.tgz#43db16249b274ee2e551e2422090aa1c47692d56" + integrity sha512-JHLOU0o81m5UqG0Ulz/fPC68/v+UTuGTWaZBUwpEk1fYQ1D9LfKV6MPn4ttJKqRo5Lm460fkzjLTL4EHvCprvA== dependencies: - "@babel/helper-module-transforms" "^7.14.2" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-module-transforms" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-commonjs@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.14.0.tgz#52bc199cb581e0992edba0f0f80356467587f161" - integrity sha512-EX4QePlsTaRZQmw9BsoPeyh5OCtRGIhwfLquhxGp5e32w+dyL8htOcDwamlitmNFK6xBZYlygjdye9dbd9rUlQ== +"@babel/plugin-transform-modules-commonjs@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.12.13.tgz#5043b870a784a8421fa1fd9136a24f294da13e50" + integrity sha512-OGQoeVXVi1259HjuoDnsQMlMkT9UkZT9TpXAsqWplS/M0N1g3TJAn/ByOCeQu7mfjc5WpSsRU+jV1Hd89ts0kQ== dependencies: - "@babel/helper-module-transforms" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-simple-access" "^7.13.12" + "@babel/helper-module-transforms" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-simple-access" "^7.12.13" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-systemjs@^7.13.8": - version "7.13.8" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.13.8.tgz#6d066ee2bff3c7b3d60bf28dec169ad993831ae3" - integrity sha512-hwqctPYjhM6cWvVIlOIe27jCIBgHCsdH2xCJVAYQm7V5yTMoilbVMi9f6wKg0rpQAOn6ZG4AOyvCqFF/hUh6+A== +"@babel/plugin-transform-modules-systemjs@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.12.13.tgz#351937f392c7f07493fc79b2118201d50404a3c5" + integrity sha512-aHfVjhZ8QekaNF/5aNdStCGzwTbU7SI5hUybBKlMzqIMC7w7Ho8hx5a4R/DkTHfRfLwHGGxSpFt9BfxKCoXKoA== dependencies: - "@babel/helper-hoist-variables" "^7.13.0" - "@babel/helper-module-transforms" "^7.13.0" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-hoist-variables" "^7.12.13" + "@babel/helper-module-transforms" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/helper-validator-identifier" "^7.12.11" babel-plugin-dynamic-import-node "^2.3.3" -"@babel/plugin-transform-modules-umd@^7.14.0": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.14.0.tgz#2f8179d1bbc9263665ce4a65f305526b2ea8ac34" - integrity sha512-nPZdnWtXXeY7I87UZr9VlsWme3Y0cfFFE41Wbxz4bbaexAjNMInXPFUpRRUJ8NoMm0Cw+zxbqjdPmLhcjfazMw== +"@babel/plugin-transform-modules-umd@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.12.13.tgz#26c66f161d3456674e344b4b1255de4d530cfb37" + integrity sha512-BgZndyABRML4z6ibpi7Z98m4EVLFI9tVsZDADC14AElFaNHHBcJIovflJ6wtCqFxwy2YJ1tJhGRsr0yLPKoN+w== dependencies: - "@babel/helper-module-transforms" "^7.14.0" - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-module-transforms" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-named-capturing-groups-regex@^7.12.13": version "7.12.13" @@ -691,12 +632,12 @@ "@babel/helper-plugin-utils" "^7.12.13" "@babel/helper-replace-supers" "^7.12.13" -"@babel/plugin-transform-parameters@^7.14.2": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.14.2.tgz#e4290f72e0e9e831000d066427c4667098decc31" - integrity sha512-NxoVmA3APNCC1JdMXkdYXuQS+EMdqy0vIwyDHeKHiJKRxmp1qGSdb0JLEIoPRhkx6H/8Qi3RJ3uqOCYw8giy9A== +"@babel/plugin-transform-parameters@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.12.13.tgz#461e76dfb63c2dfd327b8a008a9e802818ce9853" + integrity sha512-e7QqwZalNiBRHCpJg/P8s/VJeSRYgmtWySs1JwvfwPqhBbiWfOcHDKdeAi6oAyIimoKWBlwc8oTgbZHdhCoVZA== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-property-literals@^7.12.13": version "7.12.13" @@ -705,10 +646,10 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-regenerator@^7.13.15": - version "7.13.15" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.13.15.tgz#e5eb28945bf8b6563e7f818945f966a8d2997f39" - integrity sha512-Bk9cOLSz8DiurcMETZ8E2YtIVJbFCPGW28DJWUakmyVWtQSm6Wsf0p4B4BfEr/eL2Nkhe/CICiUiMOCi1TPhuQ== +"@babel/plugin-transform-regenerator@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.12.13.tgz#b628bcc9c85260ac1aeb05b45bde25210194a2f5" + integrity sha512-lxb2ZAvSLyJ2PEe47hoGWPmW22v7CtSl9jW8mingV4H2sEX/JOcrAj2nPuGWi56ERUm2bUpjKzONAuT6HCn2EA== dependencies: regenerator-transform "^0.14.2" @@ -720,16 +661,13 @@ "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-runtime@^7.11.0": - version "7.14.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.14.3.tgz#1fd885a2d0de1d3c223795a4e9be72c2db4515cf" - integrity sha512-t960xbi8wpTFE623ef7sd+UpEC5T6EEguQlTBJDEO05+XwnIWVfuqLw/vdLWY6IdFmtZE+65CZAfByT39zRpkg== - dependencies: - "@babel/helper-module-imports" "^7.13.12" - "@babel/helper-plugin-utils" "^7.13.0" - babel-plugin-polyfill-corejs2 "^0.2.0" - babel-plugin-polyfill-corejs3 "^0.2.0" - babel-plugin-polyfill-regenerator "^0.2.0" - semver "^6.3.0" + version "7.12.15" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.12.15.tgz#4337b2507288007c2b197059301aa0af8d90c085" + integrity sha512-OwptMSRnRWJo+tJ9v9wgAf72ydXWfYSXWhnQjZing8nGZSDFqU1MBleKM3+DriKkcbv7RagA8gVeB0A1PNlNow== + dependencies: + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" + semver "^5.5.1" "@babel/plugin-transform-shorthand-properties@^7.12.13": version "7.12.13" @@ -738,12 +676,12 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-spread@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.13.0.tgz#84887710e273c1815ace7ae459f6f42a5d31d5fd" - integrity sha512-V6vkiXijjzYeFmQTr3dBxPtZYLPcUfY34DebOU27jIl2M/Y8Egm52Hw82CSjjPqd54GTlJs5x+CR7HeNr24ckg== +"@babel/plugin-transform-spread@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.12.13.tgz#ca0d5645abbd560719c354451b849f14df4a7949" + integrity sha512-dUCrqPIowjqk5pXsx1zPftSq4sT0aCeZVAxhdgs3AMgyaDmoUT0G+5h3Dzja27t76aUEIJWlFgPJqJ/d4dbTtg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/helper-skip-transparent-expression-wrappers" "^7.12.1" "@babel/plugin-transform-sticky-regex@^7.12.13": @@ -753,12 +691,12 @@ dependencies: "@babel/helper-plugin-utils" "^7.12.13" -"@babel/plugin-transform-template-literals@^7.13.0": - version "7.13.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.13.0.tgz#a36049127977ad94438dee7443598d1cefdf409d" - integrity sha512-d67umW6nlfmr1iehCcBv69eSUSySk1EsIS8aTDX4Xo9qajAh6mYtcl4kJrBkGXuxZPEgVr7RVfAvNW6YQkd4Mw== +"@babel/plugin-transform-template-literals@^7.12.13": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.12.13.tgz#655037b07ebbddaf3b7752f55d15c2fd6f5aa865" + integrity sha512-arIKlWYUgmNsF28EyfmiQHJLJFlAJNYkuQO10jL46ggjBpeb2re1P9K9YGxNJB45BqTbaslVysXDYm/g3sN/Qg== dependencies: - "@babel/helper-plugin-utils" "^7.13.0" + "@babel/helper-plugin-utils" "^7.12.13" "@babel/plugin-transform-typeof-symbol@^7.12.13": version "7.12.13" @@ -783,85 +721,78 @@ "@babel/helper-plugin-utils" "^7.12.13" "@babel/preset-env@^7.11.0": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.14.2.tgz#e80612965da73579c84ad2f963c2359c71524ed5" - integrity sha512-7dD7lVT8GMrE73v4lvDEb85cgcQhdES91BSD7jS/xjC6QY8PnRhux35ac+GCpbiRhp8crexBvZZqnaL6VrY8TQ== - dependencies: - "@babel/compat-data" "^7.14.0" - "@babel/helper-compilation-targets" "^7.13.16" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/helper-validator-option" "^7.12.17" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.13.12" - "@babel/plugin-proposal-async-generator-functions" "^7.14.2" - "@babel/plugin-proposal-class-properties" "^7.13.0" - "@babel/plugin-proposal-class-static-block" "^7.13.11" - "@babel/plugin-proposal-dynamic-import" "^7.14.2" - "@babel/plugin-proposal-export-namespace-from" "^7.14.2" - "@babel/plugin-proposal-json-strings" "^7.14.2" - "@babel/plugin-proposal-logical-assignment-operators" "^7.14.2" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.14.2" - "@babel/plugin-proposal-numeric-separator" "^7.14.2" - "@babel/plugin-proposal-object-rest-spread" "^7.14.2" - "@babel/plugin-proposal-optional-catch-binding" "^7.14.2" - "@babel/plugin-proposal-optional-chaining" "^7.14.2" - "@babel/plugin-proposal-private-methods" "^7.13.0" - "@babel/plugin-proposal-private-property-in-object" "^7.14.0" + version "7.12.16" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.12.16.tgz#16710e3490e37764b2f41886de0a33bc4ae91082" + integrity sha512-BXCAXy8RE/TzX416pD2hsVdkWo0G+tYd16pwnRV4Sc0fRwTLRS/Ssv8G5RLXUGQv7g4FG7TXkdDJxCjQ5I+Zjg== + dependencies: + "@babel/compat-data" "^7.12.13" + "@babel/helper-compilation-targets" "^7.12.16" + "@babel/helper-module-imports" "^7.12.13" + "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-validator-option" "^7.12.16" + "@babel/plugin-proposal-async-generator-functions" "^7.12.13" + "@babel/plugin-proposal-class-properties" "^7.12.13" + "@babel/plugin-proposal-dynamic-import" "^7.12.16" + "@babel/plugin-proposal-export-namespace-from" "^7.12.13" + "@babel/plugin-proposal-json-strings" "^7.12.13" + "@babel/plugin-proposal-logical-assignment-operators" "^7.12.13" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.12.13" + "@babel/plugin-proposal-numeric-separator" "^7.12.13" + "@babel/plugin-proposal-object-rest-spread" "^7.12.13" + "@babel/plugin-proposal-optional-catch-binding" "^7.12.13" + "@babel/plugin-proposal-optional-chaining" "^7.12.16" + "@babel/plugin-proposal-private-methods" "^7.12.13" "@babel/plugin-proposal-unicode-property-regex" "^7.12.13" - "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-async-generators" "^7.8.0" "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.12.13" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" + "@babel/plugin-syntax-dynamic-import" "^7.8.0" "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.0" "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.0" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.0" "@babel/plugin-syntax-top-level-await" "^7.12.13" - "@babel/plugin-transform-arrow-functions" "^7.13.0" - "@babel/plugin-transform-async-to-generator" "^7.13.0" + "@babel/plugin-transform-arrow-functions" "^7.12.13" + "@babel/plugin-transform-async-to-generator" "^7.12.13" "@babel/plugin-transform-block-scoped-functions" "^7.12.13" - "@babel/plugin-transform-block-scoping" "^7.14.2" - "@babel/plugin-transform-classes" "^7.14.2" - "@babel/plugin-transform-computed-properties" "^7.13.0" - "@babel/plugin-transform-destructuring" "^7.13.17" + "@babel/plugin-transform-block-scoping" "^7.12.13" + "@babel/plugin-transform-classes" "^7.12.13" + "@babel/plugin-transform-computed-properties" "^7.12.13" + "@babel/plugin-transform-destructuring" "^7.12.13" "@babel/plugin-transform-dotall-regex" "^7.12.13" "@babel/plugin-transform-duplicate-keys" "^7.12.13" "@babel/plugin-transform-exponentiation-operator" "^7.12.13" - "@babel/plugin-transform-for-of" "^7.13.0" + "@babel/plugin-transform-for-of" "^7.12.13" "@babel/plugin-transform-function-name" "^7.12.13" "@babel/plugin-transform-literals" "^7.12.13" "@babel/plugin-transform-member-expression-literals" "^7.12.13" - "@babel/plugin-transform-modules-amd" "^7.14.2" - "@babel/plugin-transform-modules-commonjs" "^7.14.0" - "@babel/plugin-transform-modules-systemjs" "^7.13.8" - "@babel/plugin-transform-modules-umd" "^7.14.0" + "@babel/plugin-transform-modules-amd" "^7.12.13" + "@babel/plugin-transform-modules-commonjs" "^7.12.13" + "@babel/plugin-transform-modules-systemjs" "^7.12.13" + "@babel/plugin-transform-modules-umd" "^7.12.13" "@babel/plugin-transform-named-capturing-groups-regex" "^7.12.13" "@babel/plugin-transform-new-target" "^7.12.13" "@babel/plugin-transform-object-super" "^7.12.13" - "@babel/plugin-transform-parameters" "^7.14.2" + "@babel/plugin-transform-parameters" "^7.12.13" "@babel/plugin-transform-property-literals" "^7.12.13" - "@babel/plugin-transform-regenerator" "^7.13.15" + "@babel/plugin-transform-regenerator" "^7.12.13" "@babel/plugin-transform-reserved-words" "^7.12.13" "@babel/plugin-transform-shorthand-properties" "^7.12.13" - "@babel/plugin-transform-spread" "^7.13.0" + "@babel/plugin-transform-spread" "^7.12.13" "@babel/plugin-transform-sticky-regex" "^7.12.13" - "@babel/plugin-transform-template-literals" "^7.13.0" + "@babel/plugin-transform-template-literals" "^7.12.13" "@babel/plugin-transform-typeof-symbol" "^7.12.13" "@babel/plugin-transform-unicode-escapes" "^7.12.13" "@babel/plugin-transform-unicode-regex" "^7.12.13" - "@babel/preset-modules" "^0.1.4" - "@babel/types" "^7.14.2" - babel-plugin-polyfill-corejs2 "^0.2.0" - babel-plugin-polyfill-corejs3 "^0.2.0" - babel-plugin-polyfill-regenerator "^0.2.0" - core-js-compat "^3.9.0" - semver "^6.3.0" + "@babel/preset-modules" "^0.1.3" + "@babel/types" "^7.12.13" + core-js-compat "^3.8.0" + semver "^5.5.0" -"@babel/preset-modules@^0.1.4": +"@babel/preset-modules@^0.1.3": version "0.1.4" resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.4.tgz#362f2b68c662842970fdb5e254ffc8fc1c2e415e" integrity sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg== @@ -873,9 +804,9 @@ esutils "^2.0.2" "@babel/runtime@^7.11.0", "@babel/runtime@^7.8.4": - version "7.14.0" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.14.0.tgz#46794bc20b612c5f75e62dd071e24dfd95f1cbe6" - integrity sha512-JELkvo/DlpNdJ7dlyw/eY7E0suy5i5GQH+Vlxaq1nsNJ+H7f4Vtv3jMeCEgRhZZQFXTjldYfQgv2qmM6M1v5wA== + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.12.13.tgz#0a21452352b02542db0ffb928ac2d3ca7cb6d66d" + integrity sha512-8+3UMPBrjFa/6TtKi/7sehPKqfAm4g6K+YQjyyFOLUTxzOngcRZTlAVY8sc2CORJYqdHQY8gRPHmn+qo15rCBw== dependencies: regenerator-runtime "^0.13.4" @@ -888,26 +819,28 @@ "@babel/parser" "^7.12.13" "@babel/types" "^7.12.13" -"@babel/traverse@^7.0.0", "@babel/traverse@^7.13.0", "@babel/traverse@^7.13.15", "@babel/traverse@^7.14.0", "@babel/traverse@^7.14.2", "@babel/traverse@^7.7.0": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.2.tgz#9201a8d912723a831c2679c7ebbf2fe1416d765b" - integrity sha512-TsdRgvBFHMyHOOzcP9S6QU0QQtjxlRpEYOy3mcCO5RgmC305ki42aSAmfZEMSSYBla2oZ9BMqYlncBaKmD/7iA== +"@babel/traverse@^7.0.0", "@babel/traverse@^7.12.13", "@babel/traverse@^7.7.0": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.12.13.tgz#689f0e4b4c08587ad26622832632735fb8c4e0c0" + integrity sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA== dependencies: "@babel/code-frame" "^7.12.13" - "@babel/generator" "^7.14.2" - "@babel/helper-function-name" "^7.14.2" + "@babel/generator" "^7.12.13" + "@babel/helper-function-name" "^7.12.13" "@babel/helper-split-export-declaration" "^7.12.13" - "@babel/parser" "^7.14.2" - "@babel/types" "^7.14.2" + "@babel/parser" "^7.12.13" + "@babel/types" "^7.12.13" debug "^4.1.0" globals "^11.1.0" + lodash "^4.17.19" -"@babel/types@^7.0.0", "@babel/types@^7.12.1", "@babel/types@^7.12.13", "@babel/types@^7.13.0", "@babel/types@^7.13.12", "@babel/types@^7.13.16", "@babel/types@^7.14.0", "@babel/types@^7.14.2", "@babel/types@^7.4.4", "@babel/types@^7.7.0": - version "7.14.2" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.14.2.tgz#4208ae003107ef8a057ea8333e56eb64d2f6a2c3" - integrity sha512-SdjAG/3DikRHpUOjxZgnkbR11xUlyDMUFJdvnIgZEE16mqmY0BINMmc4//JMJglEmn6i7sq6p+mGrFWyZ98EEw== +"@babel/types@^7.0.0", "@babel/types@^7.12.1", "@babel/types@^7.12.13", "@babel/types@^7.4.4", "@babel/types@^7.7.0": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.12.13.tgz#8be1aa8f2c876da11a9cf650c0ecf656913ad611" + integrity sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ== dependencies: - "@babel/helper-validator-identifier" "^7.14.0" + "@babel/helper-validator-identifier" "^7.12.11" + lodash "^4.17.19" to-fast-properties "^2.0.0" "@ensdomains/ens@^0.4.4": @@ -926,10 +859,10 @@ resolved "https://registry.yarnpkg.com/@ensdomains/resolver/-/resolver-0.2.4.tgz#c10fe28bf5efbf49bff4666d909aed0265efbc89" integrity sha512-bvaTH34PMCbv6anRa9I/0zjLJgY4EuznbEMgbV77JBCQ9KNC46rzi0avuxpOfu+xDjPEtSFGqVEOr5GlUSGudA== -"@eslint/eslintrc@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.1.tgz#442763b88cecbe3ee0ec7ca6d6dd6168550cbf14" - integrity sha512-5v7TDE9plVhvxQeWLXDTvFvJBdH6pEsdnl2g/dAptmuFEPedQ4Erq5rsDsX+mvAM610IhNaO2W5V1dOOnDKxkQ== +"@eslint/eslintrc@^0.3.0": + version "0.3.0" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.3.0.tgz#d736d6963d7003b6514e6324bec9c602ac340318" + integrity sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg== dependencies: ajv "^6.12.4" debug "^4.1.1" @@ -938,38 +871,36 @@ ignore "^4.0.6" import-fresh "^3.2.1" js-yaml "^3.13.1" + lodash "^4.17.20" minimatch "^3.0.4" strip-json-comments "^3.1.1" -"@ethereum-waffle/chai@^3.3.0": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@ethereum-waffle/chai/-/chai-3.3.1.tgz#3f20b810d0fa516f19af93c50c3be1091333fa8e" - integrity sha512-+vepCjttfOzCSnmiVEmd1bR8ctA2wYVrtWa8bDLhnTpj91BIIHotNDTwpeq7fyjrOCIBTN3Ai8ACfjNoatc4OA== +"@ethereum-waffle/chai@^3.2.2": + version "3.2.2" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/chai/-/chai-3.2.2.tgz#33a349688386c9a8fdc4da5baea329036b9fe75e" + integrity sha512-S2jKmCsCrrS35fw1C6rUwH9CRboytge37PDYBDqlGpIvQQws9v+IvBjv8tLRT2BWCZSS9dvwbvBYTJL31y5ytw== dependencies: - "@ethereum-waffle/provider" "^3.3.1" + "@ethereum-waffle/provider" "^3.2.2" ethers "^5.0.0" -"@ethereum-waffle/compiler@^3.3.0": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@ethereum-waffle/compiler/-/compiler-3.3.1.tgz#946128fd565aa4347075fd716dbd0f3f38189280" - integrity sha512-X/TeQugt94AQwXEdCjIQxcXYGawNulVBYEBE7nloj4wE/RBxNolXwjoVNjcS4kuiMMbKkdO0JkL5sn6ixx8bDg== +"@ethereum-waffle/compiler@^3.2.2": + version "3.2.2" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/compiler/-/compiler-3.2.2.tgz#73d5bce44bcdc880d8630b9064591470c0123f57" + integrity sha512-6Y0TLIq26psgeoUSXCZIffeQHVqs6TOaJjHlQieJBx19defQIq5cYt8dRo1AZZGf+Eyjc2PZJERME/CfXiJgiQ== dependencies: "@resolver-engine/imports" "^0.3.3" "@resolver-engine/imports-fs" "^0.3.3" - "@typechain/ethers-v5" "^2.0.0" "@types/mkdirp" "^0.5.2" "@types/node-fetch" "^2.5.5" ethers "^5.0.1" mkdirp "^0.5.1" node-fetch "^2.6.0" solc "^0.6.3" - ts-generator "^0.1.1" - typechain "^3.0.0" -"@ethereum-waffle/ens@^3.2.4": - version "3.2.4" - resolved "https://registry.yarnpkg.com/@ethereum-waffle/ens/-/ens-3.2.4.tgz#c486be4879ea7107e1ff01b24851a5e44f5946ce" - integrity sha512-lkRVPCEkk7KOwH9MqFMB+gL0X8cZNsm+MnKpP9CNbAyhFos2sCDGcY8t6BA12KBK6pdMuuRXPxYL9WfPl9bqSQ== +"@ethereum-waffle/ens@^3.2.2": + version "3.2.2" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/ens/-/ens-3.2.2.tgz#2a1ea3270b8d64498324a80cd659db843b1ba4b3" + integrity sha512-bvoi/52dWEpLpvOBOm4fCkGEv7T88M7QI4StFAh7tRlCbp2oIZ0VcItQrIrz7Hek5BPMS/AJF2QtYoec4CtxBg== dependencies: "@ensdomains/ens" "^0.4.4" "@ensdomains/resolver" "^0.2.4" @@ -983,87 +914,17 @@ "@ethersproject/abi" "^5.0.1" ethers "^5.0.1" -"@ethereum-waffle/provider@^3.3.0", "@ethereum-waffle/provider@^3.3.1": - version "3.3.2" - resolved "https://registry.yarnpkg.com/@ethereum-waffle/provider/-/provider-3.3.2.tgz#33677baf6af5cbb087c3072d84f38c152968ebb1" - integrity sha512-ilz6cXK0ylSKCmZktTMpY4gjo0CN6rb86JfN7+RZYk6tKtZA6sXoOe95skWEQkGf1fZk7G817fTzLb0CmFDp1g== +"@ethereum-waffle/provider@^3.2.2": + version "3.2.2" + resolved "https://registry.yarnpkg.com/@ethereum-waffle/provider/-/provider-3.2.2.tgz#6ab422015641f340ba71739d6ab85896277281e5" + integrity sha512-2UCNHsgr1fiI6JA7kmpSqt9AdOajGRK4Wyh24DeoAkCcZuaOdUY80fEmkSzhq8w3jIIvWRUQajBJPieEKw5NIw== dependencies: - "@ethereum-waffle/ens" "^3.2.4" + "@ethereum-waffle/ens" "^3.2.2" ethers "^5.0.1" - ganache-core "^2.13.2" + ganache-core "^2.10.2" patch-package "^6.2.2" postinstall-postinstall "^2.1.0" -"@ethereumjs/block@^3.2.0", "@ethereumjs/block@^3.2.1": - version "3.2.1" - resolved "https://registry.yarnpkg.com/@ethereumjs/block/-/block-3.2.1.tgz#c24c345e6dd6299efa4bed40979280b7dda96d3a" - integrity sha512-FCxo5KwwULne2A2Yuae4iaGGqSsRjwzXOlDhGalOFiBbLfP3hE04RHaHGw4c8vh1PfOrLauwi0dQNUBkOG3zIA== - dependencies: - "@ethereumjs/common" "^2.2.0" - "@ethereumjs/tx" "^3.1.3" - ethereumjs-util "^7.0.10" - merkle-patricia-tree "^4.1.0" - -"@ethereumjs/blockchain@^5.2.1": - version "5.2.1" - resolved "https://registry.yarnpkg.com/@ethereumjs/blockchain/-/blockchain-5.2.1.tgz#83ed83647667265f1666f111caf065ef9d1e82b5" - integrity sha512-+hshP2qSOOFsiYvZCbaDQFG7jYTWafE8sfBi+pAsdhAHfP7BN7VLyob7qoQISgwS1s7NTR4c4+2t/woU9ahItw== - dependencies: - "@ethereumjs/block" "^3.2.0" - "@ethereumjs/common" "^2.2.0" - "@ethereumjs/ethash" "^1.0.0" - debug "^2.2.0" - ethereumjs-util "^7.0.9" - level-mem "^5.0.1" - lru-cache "^5.1.1" - rlp "^2.2.4" - semaphore-async-await "^1.5.1" - -"@ethereumjs/common@^2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.2.0.tgz#850a3e3e594ee707ad8d44a11e8152fb62450535" - integrity sha512-PyQiTG00MJtBRkJmv46ChZL8u2XWxNBeAthznAUIUiefxPAXjbkuiCZOuncgJS34/XkMbNc9zMt/PlgKRBElig== - dependencies: - crc-32 "^1.2.0" - ethereumjs-util "^7.0.9" - -"@ethereumjs/ethash@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@ethereumjs/ethash/-/ethash-1.0.0.tgz#4e77f85b37be1ade5393e8719bdabac3e796ddaa" - integrity sha512-iIqnGG6NMKesyOxv2YctB2guOVX18qMAWlj3QlZyrc+GqfzLqoihti+cVNQnyNxr7eYuPdqwLQOFuPe6g/uKjw== - dependencies: - "@types/levelup" "^4.3.0" - buffer-xor "^2.0.1" - ethereumjs-util "^7.0.7" - miller-rabin "^4.0.0" - -"@ethereumjs/tx@^3.1.3": - version "3.1.4" - resolved "https://registry.yarnpkg.com/@ethereumjs/tx/-/tx-3.1.4.tgz#04cf9e9406da5f04a1a26c458744641f4b4b8dd0" - integrity sha512-6cJpmmjCpG5ZVN9NJYtWvmrEQcevw9DIR8hj2ca2PszD2fxbIFXky3Z37gpf8S6u0Npv09kG8It+G4xjydZVLg== - dependencies: - "@ethereumjs/common" "^2.2.0" - ethereumjs-util "^7.0.10" - -"@ethereumjs/vm@^5.3.2": - version "5.3.2" - resolved "https://registry.yarnpkg.com/@ethereumjs/vm/-/vm-5.3.2.tgz#b4d83a3d50a7ad22d6d412cc21bbde221b3e2871" - integrity sha512-QmCUQrW6xbhgEbQh9njue4kAJdM056C+ytBFUTF/kDYa3kNDm4Qxp9HUyTlt1OCSXvDhws0qqlh8+q+pmXpN7g== - dependencies: - "@ethereumjs/block" "^3.2.1" - "@ethereumjs/blockchain" "^5.2.1" - "@ethereumjs/common" "^2.2.0" - "@ethereumjs/tx" "^3.1.3" - async-eventemitter "^0.2.4" - core-js-pure "^3.0.1" - debug "^2.2.0" - ethereumjs-util "^7.0.10" - functional-red-black-tree "^1.0.1" - mcl-wasm "^0.7.1" - merkle-patricia-tree "^4.1.0" - rustbn.js "~0.2.0" - util.promisify "^1.0.1" - "@ethersproject/abi@5.0.0-beta.153": version "5.0.0-beta.153" resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.0-beta.153.tgz#43a37172b33794e4562999f6e2d555b7599a8eee" @@ -1079,343 +940,341 @@ "@ethersproject/properties" ">=5.0.0-beta.131" "@ethersproject/strings" ">=5.0.0-beta.130" -"@ethersproject/abi@5.1.2", "@ethersproject/abi@^5.0.1", "@ethersproject/abi@^5.0.2", "@ethersproject/abi@^5.1.0": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.1.2.tgz#a8e75cd0455e6dc9e4861c3d1c22bbe436c1d775" - integrity sha512-uMhoQVPX0UtfzTpekYQSEUcJGDgsJ25ifz+SV6PDETWaUFhcR8RNgb1QPTASP13inW8r6iy0/Xdq9D5hK2pNvA== - dependencies: - "@ethersproject/address" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/hash" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - -"@ethersproject/abstract-provider@5.1.0", "@ethersproject/abstract-provider@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.1.0.tgz#1f24c56cda5524ef4ed3cfc562a01d6b6f8eeb0b" - integrity sha512-8dJUnT8VNvPwWhYIau4dwp7qe1g+KgdRm4XTWvjkI9gAT2zZa90WF5ApdZ3vl1r6NDmnn6vUVvyphClRZRteTQ== - dependencies: - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/networks" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" - "@ethersproject/web" "^5.1.0" - -"@ethersproject/abstract-signer@5.1.0", "@ethersproject/abstract-signer@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.1.0.tgz#744c7a2d0ebe3cc0bc38294d0f53d5ca3f4e49e3" - integrity sha512-qQDMkjGZSSJSKl6AnfTgmz9FSnzq3iEoEbHTYwjDlEAv+LNP7zd4ixCcVWlWyk+2siud856M5CRhAmPdupeN9w== - dependencies: - "@ethersproject/abstract-provider" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - -"@ethersproject/address@5.1.0", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.1.0.tgz#3854fd7ebcb6af7597de66f847c3345dae735b58" - integrity sha512-rfWQR12eHn2cpstCFS4RF7oGjfbkZb0oqep+BfrT+gWEGWG2IowJvIsacPOvzyS1jhNF4MQ4BS59B04Mbovteg== - dependencies: - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/rlp" "^5.1.0" - -"@ethersproject/base64@5.1.0", "@ethersproject/base64@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.1.0.tgz#27240c174d0a4e13f6eae87416fd876caf7f42b6" - integrity sha512-npD1bLvK4Bcxz+m4EMkx+F8Rd7CnqS9DYnhNu0/GlQBXhWjvfoAZzk5HJ0f1qeyp8d+A86PTuzLOGOXf4/CN8g== +"@ethersproject/abi@5.0.12", "@ethersproject/abi@^5.0.1", "@ethersproject/abi@^5.0.10", "@ethersproject/abi@^5.0.2": + version "5.0.12" + resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.12.tgz#9aebe6aedc05ce45bb6c41b06d80bd195b7de77c" + integrity sha512-Ujr/3bwyYYjXLDQfebeiiTuvOw9XtUKM8av6YkoBeMXyGQM9GkjrQlwJMNwGTmqjATH/ZNbRgCh98GjOLiIB1Q== + dependencies: + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + +"@ethersproject/abstract-provider@5.0.9", "@ethersproject/abstract-provider@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.0.9.tgz#a55410b73e3994842884eb82b1f43e3a9f653eea" + integrity sha512-X9fMkqpeu9ayC3JyBkeeZhn35P4xQkpGX/l+FrxDtEW9tybf/UWXSMi8bGThpPtfJ6q6U2LDetXSpSwK4TfYQQ== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/networks" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/web" "^5.0.12" + +"@ethersproject/abstract-signer@5.0.13", "@ethersproject/abstract-signer@^5.0.10": + version "5.0.13" + resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.0.13.tgz#59b4d0367d6327ec53bc269c6730c44a4a3b043c" + integrity sha512-VBIZEI5OK0TURoCYyw0t3w+TEO4kdwnI9wvt4kqUwyxSn3YCRpXYVl0Xoe7XBR/e5+nYOi2MyFGJ3tsFwONecQ== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + +"@ethersproject/address@5.0.10", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.0.10.tgz#2bc69fdff4408e0570471cd19dee577ab06a10d0" + integrity sha512-70vqESmW5Srua1kMDIN6uVfdneZMaMyRYH4qPvkAXGkbicrCOsA9m01vIloA4wYiiF+HLEfL1ENKdn5jb9xiAw== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/rlp" "^5.0.7" + +"@ethersproject/base64@5.0.8", "@ethersproject/base64@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.0.8.tgz#1bc4b4b8c59c1debf972c7164b96c0b8964a20a1" + integrity sha512-PNbpHOMgZpZ1skvQl119pV2YkCPXmZTxw+T92qX0z7zaMFPypXWTZBzim+hUceb//zx4DFjeGT4aSjZRTOYThg== dependencies: - "@ethersproject/bytes" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" -"@ethersproject/basex@5.1.0", "@ethersproject/basex@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.1.0.tgz#80da2e86f9da0cb5ccd446b337364d791f6a131c" - integrity sha512-vBKr39bum7DDbOvkr1Sj19bRMEPA4FnST6Utt6xhDzI7o7L6QNkDn2yrCfP+hnvJGhZFKtLygWwqlTBZoBXYLg== +"@ethersproject/basex@5.0.8", "@ethersproject/basex@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.0.8.tgz#6867fad20047aa29fbd4b880f27894ed04cc7bb8" + integrity sha512-PCVKZIShBQUqAXjJSvaCidThPvL0jaaQZcewJc0sf8Xx05BizaOS8r3jdPdpNdY+/qZtRDqwHTSKjvR/xssyLQ== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/properties" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/properties" "^5.0.7" -"@ethersproject/bignumber@5.1.1", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.1.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.1.1.tgz#84812695253ccbc639117f7ac49ee1529b68e637" - integrity sha512-AVz5iqz7+70RIqoQTznsdJ6DOVBYciNlvO+AlQmPTB6ofCvoihI9bQdr6wljsX+d5W7Yc4nyvQvP4JMzg0Agig== +"@ethersproject/bignumber@5.0.14", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.0.13": + version "5.0.14" + resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.0.14.tgz#605bc61dcbd4a8c6df8b5a7a77c0210273f3de8a" + integrity sha512-Q4TjMq9Gg3Xzj0aeJWqJgI3tdEiPiET7Y5OtNtjTAODZ2kp4y9jMNg97zVcvPedFvGROdpGDyCI77JDFodUzOw== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" bn.js "^4.4.0" -"@ethersproject/bytes@5.1.0", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.1.0.tgz#55dfa9c4c21df1b1b538be3accb50fb76d5facfd" - integrity sha512-sGTxb+LVjFxJcJeUswAIK6ncgOrh3D8c192iEJd7mLr95V6du119rRfYT/b87WPkZ5I3gRBUYIYXtdgCWACe8g== - dependencies: - "@ethersproject/logger" "^5.1.0" - -"@ethersproject/constants@5.1.0", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.1.0.tgz#4e7da6367ea0e9be87585d8b09f3fccf384b1452" - integrity sha512-0/SuHrxc8R8k+JiLmJymxHJbojUDWBQqO+b+XFdwaP0jGzqC09YDy/CAlSZB6qHsBifY8X3I89HcK/oMqxRdBw== - dependencies: - "@ethersproject/bignumber" "^5.1.0" - -"@ethersproject/contracts@5.1.1": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.1.1.tgz#c66cb6d618fcbd73e20a6b808e8f768b2b781d0b" - integrity sha512-6WwktLJ0DFWU8pDkgH4IGttQHhQN4SnwKFu9h+QYVe48VGWtbDu4W8/q/7QA1u/HWlWMrKxqawPiZUJj0UMvOw== - dependencies: - "@ethersproject/abi" "^5.1.0" - "@ethersproject/abstract-provider" "^5.1.0" - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/address" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" - -"@ethersproject/hash@5.1.0", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.1.0.tgz#40961d64837d57f580b7b055e0d74174876d891e" - integrity sha512-fNwry20yLLPpnRRwm3fBL+2ksgO+KMadxM44WJmRIoTKzy4269+rbq9KFoe2LTqq2CXJM2CE70beGaNrpuqflQ== - dependencies: - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/address" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - -"@ethersproject/hdnode@5.1.0", "@ethersproject/hdnode@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.1.0.tgz#2bf5c4048935136ce83e9242e1bd570afcc0bc83" - integrity sha512-obIWdlujloExPHWJGmhJO/sETOOo7SEb6qemV4f8kyFoXg+cJK+Ta9SvBrj7hsUK85n3LZeZJZRjjM7oez3Clg== - dependencies: - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/basex" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/pbkdf2" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/sha2" "^5.1.0" - "@ethersproject/signing-key" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" - "@ethersproject/wordlists" "^5.1.0" - -"@ethersproject/json-wallets@5.1.0", "@ethersproject/json-wallets@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.1.0.tgz#bba7af2e520e8aea4d3829d80520db5d2e4fb8d2" - integrity sha512-00n2iBy27w8zrGZSiU762UOVuzCQZxUZxopsZC47++js6xUFuI74DHcJ5K/2pddlF1YBskvmMuboEu1geK8mnA== - dependencies: - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/address" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/hdnode" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/pbkdf2" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/random" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" +"@ethersproject/bytes@5.0.10", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.0.10.tgz#aa49afe7491ba24ff76fa33d98677351263f9ba4" + integrity sha512-vpu0v1LZ1j1s9kERQIMnVU69MyHEzUff7nqK9XuCU4vx+AM8n9lU2gj7jtJIvGSt9HzatK/6I6bWusI5nyuaTA== + dependencies: + "@ethersproject/logger" "^5.0.8" + +"@ethersproject/constants@5.0.9", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.0.9.tgz#81ac44c3bf612de63eb1c490b314ea1b932cda9f" + integrity sha512-2uAKH89UcaJP/Sc+54u92BtJtZ4cPgcS1p0YbB1L3tlkavwNvth+kNCUplIB1Becqs7BOZr0B/3dMNjhJDy4Dg== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + +"@ethersproject/contracts@5.0.11": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.0.11.tgz#e6cc57698a05be2329cb2ca3d7e87686f95e438a" + integrity sha512-FTUUd/6x00dYL2VufE2VowZ7h3mAyBfCQMGwI3tKDIWka+C0CunllFiKrlYCdiHFuVeMotR65dIcnzbLn72MCw== + dependencies: + "@ethersproject/abi" "^5.0.10" + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + +"@ethersproject/hash@5.0.11", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.0.10": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.0.11.tgz#da89517438bbbf8a39df56fff09f0a71669ae7a7" + integrity sha512-H3KJ9fk33XWJ2djAW03IL7fg3DsDMYjO1XijiUb1hJ85vYfhvxu0OmsU7d3tg2Uv1H1kFSo8ghr3WFQ8c+NL3g== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + +"@ethersproject/hdnode@5.0.9", "@ethersproject/hdnode@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.0.9.tgz#ce65b430d3d3f0cd3c8f9dfaaf376b55881d9dba" + integrity sha512-S5UMmIC6XfFtqhUK4uTjD8GPNzSbE+sZ/0VMqFnA3zAJ+cEFZuEyhZDYnl2ItGJzjT4jsy+uEy1SIl3baYK1PQ== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/basex" "^5.0.7" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/pbkdf2" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/wordlists" "^5.0.8" + +"@ethersproject/json-wallets@5.0.11", "@ethersproject/json-wallets@^5.0.10": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.0.11.tgz#86fdc41b7762acb443d6a896f6c61231ab2aee5d" + integrity sha512-0GhWScWUlXXb4qJNp0wmkU95QS3YdN9UMOfMSEl76CRANWWrmyzxcBVSXSBu5iQ0/W8wO+xGlJJ3tpA6v3mbIw== + dependencies: + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hdnode" "^5.0.8" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/pbkdf2" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" aes-js "3.0.0" scrypt-js "3.0.1" -"@ethersproject/keccak256@5.1.0", "@ethersproject/keccak256@>=5.0.0-beta.127", "@ethersproject/keccak256@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.1.0.tgz#fdcd88fb13bfef4271b225cdd8dec4d315c8e60e" - integrity sha512-vrTB1W6AEYoadww5c9UyVJ2YcSiyIUTNDRccZIgwTmFFoSHwBtcvG1hqy9RzJ1T0bMdATbM9Hfx2mJ6H0i7Hig== +"@ethersproject/keccak256@5.0.8", "@ethersproject/keccak256@>=5.0.0-beta.127", "@ethersproject/keccak256@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.0.8.tgz#13aaf69e1c8bd15fc59a2ebd055c0878f2a059c8" + integrity sha512-zoGbwXcWWs9MX4NOAZ7N0hhgIRl4Q/IO/u9c/RHRY4WqDy3Ywm0OLamEV53QDwhjwn3YiiVwU1Ve5j7yJ0a/KQ== dependencies: - "@ethersproject/bytes" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" js-sha3 "0.5.7" -"@ethersproject/logger@5.1.0", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.1.0.tgz#4cdeeefac029373349d5818f39c31b82cc6d9bbf" - integrity sha512-wtUaD1lBX10HBXjjKV9VHCBnTdUaKQnQ2XSET1ezglqLdPdllNOIlLfhyCRqXm5xwcjExVI5ETokOYfjPtaAlw== - -"@ethersproject/networks@5.1.0", "@ethersproject/networks@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.1.0.tgz#f537290cb05aa6dc5e81e910926c04cfd5814bca" - integrity sha512-A/NIrIED/G/IgU1XUukOA3WcFRxn2I4O5GxsYGA5nFlIi+UZWdGojs85I1VXkR1gX9eFnDXzjE6OtbgZHjFhIA== - dependencies: - "@ethersproject/logger" "^5.1.0" +"@ethersproject/logger@5.0.9", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.0.9.tgz#0e6a0b3ecc938713016954daf4ac7967467aa763" + integrity sha512-kV3Uamv3XOH99Xf3kpIG3ZkS7mBNYcLDM00JSDtNgNB4BihuyxpQzIZPRIDmRi+95Z/R1Bb0X2kUNHa/kJoVrw== -"@ethersproject/pbkdf2@5.1.0", "@ethersproject/pbkdf2@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.1.0.tgz#6b740a85dc780e879338af74856ca2c0d3b24d19" - integrity sha512-B8cUbHHTgs8OtgJIafrRcz/YPDobVd5Ru8gTnShOiM9EBuFpYHQpq3+8iQJ6pyczDu6HP/oc/njAsIBhwFZYew== +"@ethersproject/networks@5.0.8", "@ethersproject/networks@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.0.8.tgz#37e6f8c058f2d540373ea5939056cd3de069132e" + integrity sha512-PYpptlO2Tu5f/JEBI5hdlMds5k1DY1QwVbh3LKPb3un9dQA2bC51vd2/gRWAgSBpF3kkmZOj4FhD7ATLX4H+DA== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/sha2" "^5.1.0" + "@ethersproject/logger" "^5.0.8" -"@ethersproject/properties@5.1.0", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.1.0.tgz#9484bd6def16595fc6e4bdc26f29dff4d3f6ac42" - integrity sha512-519KKTwgmH42AQL3+GFV3SX6khYEfHsvI6v8HYejlkigSDuqttdgVygFTDsGlofNFchhDwuclrxQnD5B0YLNMg== +"@ethersproject/pbkdf2@5.0.8", "@ethersproject/pbkdf2@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.0.8.tgz#06a086b1ac04c75e6846afd6cf6170a49a634411" + integrity sha512-UlmAMGbIPaS2xXsI38FbePVTfJMuU9jnwcqVn3p88HxPF4kD897ha+l3TNsBqJqf32UbQL5GImnf1oJkSKq4vQ== dependencies: - "@ethersproject/logger" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/sha2" "^5.0.7" -"@ethersproject/providers@5.1.2": - version "5.1.2" - resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.1.2.tgz#4e4459698903f911402fe91aa7544eb07f3921ed" - integrity sha512-GqsS8rd+eyd4eNkcNgzZ4l9IRULBPUZa7JPnv22k4MHflMobUseyhfbVnmoN5bVNNkOxjV1IPTw9i0sV1hwdpg== - dependencies: - "@ethersproject/abstract-provider" "^5.1.0" - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/address" "^5.1.0" - "@ethersproject/basex" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/hash" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/networks" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/random" "^5.1.0" - "@ethersproject/rlp" "^5.1.0" - "@ethersproject/sha2" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" - "@ethersproject/web" "^5.1.0" +"@ethersproject/properties@5.0.8", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.0.8.tgz#e45d28d25402c73394873dbf058f856c966cae01" + integrity sha512-zEnLMze2Eu2VDPj/05QwCwMKHh506gpT9PP9KPVd4dDB+5d6AcROUYVLoIIQgBYK7X/Gw0UJmG3oVtnxOQafAw== + dependencies: + "@ethersproject/logger" "^5.0.8" + +"@ethersproject/providers@5.0.23": + version "5.0.23" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.0.23.tgz#1e26512303d60bbd557242532fdb5fa3c5d5fb73" + integrity sha512-eJ94z2tgPaUgUmxwd3BVkIzkgkbNIkY6wRPVas04LVaBTycObQbgj794aaUu2bfk7+Bn2B/gjUZtJW1ybxh9/A== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/basex" "^5.0.7" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/networks" "^5.0.7" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/rlp" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/web" "^5.0.12" bech32 "1.1.4" ws "7.2.3" -"@ethersproject/random@5.1.0", "@ethersproject/random@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.1.0.tgz#0bdff2554df03ebc5f75689614f2d58ea0d9a71f" - integrity sha512-+uuczLQZ4+no9cP6TCoCktXx0u2YbNaRT7lRkSt12d8263e702f0u+4JnnRO8Qmv5nylWJebnqCHzyxP+6mLqw== +"@ethersproject/random@5.0.8", "@ethersproject/random@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.0.8.tgz#8d3726be48e95467abce9b23c93adbb1de009dda" + integrity sha512-4rHtotmd9NjklW0eDvByicEkL+qareIyFSbG1ShC8tPJJSAC0g55oQWzw+3nfdRCgBHRuEE7S8EcPcTVPvZ9cA== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" -"@ethersproject/rlp@5.1.0", "@ethersproject/rlp@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.1.0.tgz#700f4f071c27fa298d3c1d637485fefe919dd084" - integrity sha512-vDTyHIwNPrecy55gKGZ47eJZhBm8LLBxihzi5ou+zrSvYTpkSTWRcKUlXFDFQVwfWB+P5PGyERAdiDEI76clxw== +"@ethersproject/rlp@5.0.8", "@ethersproject/rlp@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.0.8.tgz#ff54e206d0ae28640dd054f2bcc7070f06f9dfbe" + integrity sha512-E4wdFs8xRNJfzNHmnkC8w5fPeT4Wd1U2cust3YeT16/46iSkLT8nn8ilidC6KhR7hfuSZE4UqSPzyk76p7cdZg== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" -"@ethersproject/sha2@5.1.0", "@ethersproject/sha2@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.1.0.tgz#6ca42d1a26884b3e32ffa943fe6494af7211506c" - integrity sha512-+fNSeZRstOpdRJpdGUkRONFCaiAqWkc91zXgg76Nlp5ndBQE25Kk5yK8gCPG1aGnCrbariiPr5j9DmrYH78JCA== +"@ethersproject/sha2@5.0.8", "@ethersproject/sha2@^5.0.7": + version "5.0.8" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.0.8.tgz#9903c67e562739d8b312820b0a265b9c9bf35fc3" + integrity sha512-ILP1ZgyvDj4rrdE+AXrTv9V88m7x87uga2VZ/FeULKPumOEw/4bGnJz/oQ8zDnDvVYRCJ+48VaQBS2CFLbk1ww== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" hash.js "1.1.3" -"@ethersproject/signing-key@5.1.0", "@ethersproject/signing-key@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.1.0.tgz#6eddfbddb6826b597b9650e01acf817bf8991b9c" - integrity sha512-tE5LFlbmdObG8bY04NpuwPWSRPgEswfxweAI1sH7TbP0ml1elNfqcq7ii/3AvIN05i5U0Pkm3Tf8bramt8MmLw== +"@ethersproject/signing-key@5.0.10", "@ethersproject/signing-key@^5.0.8": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.0.10.tgz#05e26e04f0aa5360dc78674d7331bacea8fea5c1" + integrity sha512-w5it3GbFOvN6e0mTd5gDNj+bwSe6L9jqqYjU+uaYS8/hAEp4qYLk5p8ZjbJJkNn7u1p0iwocp8X9oH/OdK8apA== dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - bn.js "^4.4.0" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" elliptic "6.5.4" -"@ethersproject/solidity@5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.1.0.tgz#095a9c75244edccb26c452c155736d363399b954" - integrity sha512-kPodsGyo9zg1g9XSXp1lGhFaezBAUUsAUB1Vf6OkppE5Wksg4Et+x3kG4m7J/uShDMP2upkJtHNsIBK2XkVpKQ== - dependencies: - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/sha2" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - -"@ethersproject/strings@5.1.0", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.1.0.tgz#0f95a56c3c8c9d5510a06c241d818779750e2da5" - integrity sha512-perBZy0RrmmL0ejiFGUOlBVjMsUceqLut3OBP3zP96LhiJWWbS8u1NqQVgN4/Gyrbziuda66DxiQocXhsvx+Sw== - dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - -"@ethersproject/transactions@5.1.1", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.1.0": - version "5.1.1" - resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.1.1.tgz#5a6bbb25fb062c3cc75eb0db12faefcdd3870813" - integrity sha512-Nwgbp09ttIVN0OoUBatCXaHxR7grWPHbozJN8v7AXDLrl6nnOIBEMDh+yJTnosSQlFhcyjfTGGN+Mx6R8HdvMw== - dependencies: - "@ethersproject/address" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/rlp" "^5.1.0" - "@ethersproject/signing-key" "^5.1.0" - -"@ethersproject/units@5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.1.0.tgz#b6ab3430ebc22adc3cb4839516496f167bee3ad5" - integrity sha512-isvJrx6qG0nKWfxsGORNjmOq/nh175fStfvRTA2xEKrGqx8JNJY83fswu4GkILowfriEM/eYpretfJnfzi7YhA== - dependencies: - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/constants" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - -"@ethersproject/wallet@5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.1.0.tgz#134c5816eaeaa586beae9f9ff67891104a2c9a15" - integrity sha512-ULmUtiYQLTUS+y3DgkLzRhFEK10zMwmjOthnjiZxee3Q/MVwr3rnmuAnXIUZrPjna6hvUPnyRIdW5XuF0Ld0YQ== - dependencies: - "@ethersproject/abstract-provider" "^5.1.0" - "@ethersproject/abstract-signer" "^5.1.0" - "@ethersproject/address" "^5.1.0" - "@ethersproject/bignumber" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/hash" "^5.1.0" - "@ethersproject/hdnode" "^5.1.0" - "@ethersproject/json-wallets" "^5.1.0" - "@ethersproject/keccak256" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/random" "^5.1.0" - "@ethersproject/signing-key" "^5.1.0" - "@ethersproject/transactions" "^5.1.0" - "@ethersproject/wordlists" "^5.1.0" - -"@ethersproject/web@5.1.0", "@ethersproject/web@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.1.0.tgz#ed56bbe4e3d9a8ffe3b2ed882da5c62d3551381b" - integrity sha512-LTeluWgTq04+RNqAkVhpydPcRZK/kKxD2Vy7PYGrAD27ABO9kTqTBKwiOuzTyAHKUQHfnvZbXmxBXJAGViSDcA== - dependencies: - "@ethersproject/base64" "^5.1.0" - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/strings" "^5.1.0" - -"@ethersproject/wordlists@5.1.0", "@ethersproject/wordlists@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.1.0.tgz#54eb9ef3a00babbff90ffe124e19c89e07e6aace" - integrity sha512-NsUCi/TpBb+oTFvMSccUkJGtp5o/84eOyqp5q5aBeiNBSLkYyw21znRn9mAmxZgySpxgruVgKbaapnYPgvctPQ== - dependencies: - "@ethersproject/bytes" "^5.1.0" - "@ethersproject/hash" "^5.1.0" - "@ethersproject/logger" "^5.1.0" - "@ethersproject/properties" "^5.1.0" - "@ethersproject/strings" "^5.1.0" +"@ethersproject/solidity@5.0.9": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.0.9.tgz#49100fbe9f364ac56f7ff7c726f4f3d151901134" + integrity sha512-LIxSAYEQgLRXE3mRPCq39ou61kqP8fDrGqEeNcaNJS3aLbmAOS8MZp56uK++WsdI9hj8sNsFh78hrAa6zR9Jag== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/sha2" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + +"@ethersproject/strings@5.0.9", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.0.9.tgz#8e2eb2918b140231e1d1b883d77e43213a8ac280" + integrity sha512-ogxBpcUpdO524CYs841MoJHgHxEPUy0bJFDS4Ezg8My+WYVMfVAOlZSLss0Rurbeeam8CpUVDzM4zUn09SU66Q== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + +"@ethersproject/transactions@5.0.10", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.0.9": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.0.10.tgz#d50cafd80d27206336f80114bc0f18bc18687331" + integrity sha512-Tqpp+vKYQyQdJQQk4M73tDzO7ODf2D42/sJOcKlDAAbdSni13v6a+31hUdo02qYXhVYwIs+ZjHnO4zKv5BNk8w== + dependencies: + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/rlp" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + +"@ethersproject/units@5.0.10": + version "5.0.10" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.0.10.tgz#9cca3b65cd0c92fab1bd33f2abd233546dd61987" + integrity sha512-eaiHi9ham5lbC7qpqxpae7OY/nHJUnRUnFFuEwi2VB5Nwe3Np468OAV+e+HR+jAK4fHXQE6PFBTxWGtnZuO37g== + dependencies: + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/constants" "^5.0.8" + "@ethersproject/logger" "^5.0.8" + +"@ethersproject/wallet@5.0.11": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.0.11.tgz#9891936089d1b91e22ed59f850bc344b1544bf26" + integrity sha512-2Fg/DOvUltR7aZTOyWWlQhru+SKvq2UE3uEhXSyCFgMqDQNuc2nHXh1SHJtN65jsEbjVIppOe1Q7EQMvhmeeRw== + dependencies: + "@ethersproject/abstract-provider" "^5.0.8" + "@ethersproject/abstract-signer" "^5.0.10" + "@ethersproject/address" "^5.0.9" + "@ethersproject/bignumber" "^5.0.13" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/hdnode" "^5.0.8" + "@ethersproject/json-wallets" "^5.0.10" + "@ethersproject/keccak256" "^5.0.7" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/random" "^5.0.7" + "@ethersproject/signing-key" "^5.0.8" + "@ethersproject/transactions" "^5.0.9" + "@ethersproject/wordlists" "^5.0.8" + +"@ethersproject/web@5.0.13", "@ethersproject/web@^5.0.12": + version "5.0.13" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.0.13.tgz#5a92ac6d835d2ebce95b6b645a86668736e2f532" + integrity sha512-G3x/Ns7pQm21ALnWLbdBI5XkW/jrsbXXffI9hKNPHqf59mTxHYtlNiSwxdoTSwCef3Hn7uvGZpaSgTyxs7IufQ== + dependencies: + "@ethersproject/base64" "^5.0.7" + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" + +"@ethersproject/wordlists@5.0.9", "@ethersproject/wordlists@^5.0.8": + version "5.0.9" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.0.9.tgz#f16cc0b317637c3ae9c689ebd7bc2cbbffadd013" + integrity sha512-Sn6MTjZkfbriod6GG6+p43W09HOXT4gwcDVNj0YoPYlo4Zq2Fk6b1CU9KUX3c6aI17PrgYb4qwZm5BMuORyqyQ== + dependencies: + "@ethersproject/bytes" "^5.0.9" + "@ethersproject/hash" "^5.0.10" + "@ethersproject/logger" "^5.0.8" + "@ethersproject/properties" "^5.0.7" + "@ethersproject/strings" "^5.0.8" "@hapi/address@2.x.x": version "2.1.4" @@ -1497,15 +1356,36 @@ "@nodelib/fs.scandir" "2.1.4" fastq "^1.6.0" +"@nomiclabs/ethereumjs-vm@4.2.2": + version "4.2.2" + resolved "https://registry.yarnpkg.com/@nomiclabs/ethereumjs-vm/-/ethereumjs-vm-4.2.2.tgz#2f8817113ca0fb6c44c1b870d0a809f0e026a6cc" + integrity sha512-8WmX94mMcJaZ7/m7yBbyuS6B+wuOul+eF+RY9fBpGhNaUpyMR/vFIcDojqcWQ4Yafe1tMKY5LDu2yfT4NZgV4Q== + dependencies: + async "^2.1.2" + async-eventemitter "^0.2.2" + core-js-pure "^3.0.1" + ethereumjs-account "^3.0.0" + ethereumjs-block "^2.2.2" + ethereumjs-blockchain "^4.0.3" + ethereumjs-common "^1.5.0" + ethereumjs-tx "^2.1.2" + ethereumjs-util "^6.2.0" + fake-merkle-patricia-tree "^1.0.1" + functional-red-black-tree "^1.0.1" + merkle-patricia-tree "3.0.0" + rustbn.js "~0.2.0" + safe-buffer "^5.1.1" + util.promisify "^1.0.0" + "@nomiclabs/hardhat-ethers@^2.0.0": - version "2.0.2" - resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-ethers/-/hardhat-ethers-2.0.2.tgz#c472abcba0c5185aaa4ad4070146e95213c68511" - integrity sha512-6quxWe8wwS4X5v3Au8q1jOvXYEPkS1Fh+cME5u6AwNdnI4uERvPlVjlgRWzpnb+Rrt1l/cEqiNRH9GlsBMSDQg== + version "2.0.1" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-ethers/-/hardhat-ethers-2.0.1.tgz#f86a6fa210dbe6270adffccc75e93ed60a856904" + integrity sha512-uTFHDhhvJ+UjfvvMeQxD3ZALuzuI3FXzTYT1Z5N3ebyZL5z4Ogwt55GB0R9tdKY0p5HhDhBjU/gsCjUEwIVoaw== "@nomiclabs/hardhat-etherscan@^2.1.0": - version "2.1.2" - resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-etherscan/-/hardhat-etherscan-2.1.2.tgz#333b70a6116e922d16de2ef833dcb7191319afdd" - integrity sha512-SExzaBuHlnmHw0HKkElHITzdvhUQmlIRc2tlaywzgvPbh7WoI24nYqZ4N0CO+JXSDgRpFycvQNA8zRaCqjuqUg== + version "2.1.1" + resolved "https://registry.yarnpkg.com/@nomiclabs/hardhat-etherscan/-/hardhat-etherscan-2.1.1.tgz#186f3fa652a0ca20fb77aa857cfad2da845d5cbf" + integrity sha512-8TNUFsO5DpAfwNlXMDhcEtFAMOYsVNaQL2vq5vuCD45kUKBgL8H21++zOk231ha9D7LQWBMCIg7A7iPxw6Jwmg== dependencies: "@ethersproject/abi" "^5.0.2" "@ethersproject/address" "^5.0.2" @@ -1696,20 +1576,6 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== -"@sinonjs/commons@^1.7.0": - version "1.8.3" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^7.0.4": - version "7.0.5" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.0.5.tgz#558a7f8145a01366c44b3dcbdd7172c05c461564" - integrity sha512-fUt6b15bjV/VW93UP5opNXJxdwZSbK1EdiwnhN7XrQrcpaOhMJpZ/CjwFpM3THpxwA+YviBUJKSuEqKlCK5alw== - dependencies: - "@sinonjs/commons" "^1.7.0" - "@soda/friendly-errors-webpack-plugin@^1.7.1": version "1.8.0" resolved "https://registry.yarnpkg.com/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.8.0.tgz#84751d82a93019d5c92c0cf0e45ac59087cd2240" @@ -1730,10 +1596,10 @@ resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.11.1.tgz#fa840af64840c930f24a9c82c08d4a092a068add" integrity sha512-H8BSBoKE8EubJa0ONqecA2TviT3TnHeC4NpgnAHSUiuhZoQBfPB4L2P9bs8R6AoTW10Endvh3vc+fomVMIDIYQ== -"@solidity-parser/parser@^0.12.0", "@solidity-parser/parser@^0.12.1": - version "0.12.2" - resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.12.2.tgz#1afad367cb29a2ed8cdd4a3a62701c2821fb578f" - integrity sha512-d7VS7PxgMosm5NyaiyDJRNID5pK4AWj1l64Dbz0147hJgy5k2C0/ZiKK/9u5c5K+HRUVHmp+RMvGEjGh84oA5Q== +"@solidity-parser/parser@^0.8.2": + version "0.8.2" + resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.8.2.tgz#a6a5e93ac8dca6884a99a532f133beba59b87b69" + integrity sha512-8LySx3qrNXPgB5JiULfG10O3V7QTxI/TLzSw5hFQhXWSkVxZBAv4rZQ0sYgLEbc8g3L2lmnujj1hKul38Eu5NQ== "@szmarczak/http-timer@^1.1.2": version "1.1.2" @@ -1749,17 +1615,17 @@ dependencies: ethers "^5.0.2" -"@types/abstract-leveldown@*": - version "5.0.1" - resolved "https://registry.yarnpkg.com/@types/abstract-leveldown/-/abstract-leveldown-5.0.1.tgz#3c7750d0186b954c7f2d2f6acc8c3c7ba0c3412e" - integrity sha512-wYxU3kp5zItbxKmeRYCEplS2MW7DzyBnxPGj+GJVHZEUZiK/nn5Ei1sUFgURDh+X051+zsGe28iud3oHjrYWQQ== +"@types/anymatch@*": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@types/anymatch/-/anymatch-1.3.1.tgz#336badc1beecb9dacc38bea2cf32adf627a8421a" + integrity sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA== "@types/argparse@^1.0.36": version "1.0.38" resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-1.0.38.tgz#a81fd8606d481f873a3800c6ebae4f1d768a56a9" integrity sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA== -"@types/bn.js@*", "@types/bn.js@^5.1.0": +"@types/bn.js@*": version "5.1.0" resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.0.tgz#32c5d271503a12653c62cf4d2b45e6eab8cebc68" integrity sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA== @@ -1782,21 +1648,21 @@ "@types/node" "*" "@types/chai-as-promised@^7.1.3": - version "7.1.4" - resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-7.1.4.tgz#caf64e76fb056b8c8ced4b761ed499272b737601" - integrity sha512-1y3L1cHePcIm5vXkh1DSGf/zQq5n5xDKG1fpCvf18+uOkpce0Z1ozNFPkyWsVswK7ntN1sZBw3oU6gmN+pDUcA== + version "7.1.3" + resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-7.1.3.tgz#779166b90fda611963a3adbfd00b339d03b747bd" + integrity sha512-FQnh1ohPXJELpKhzjuDkPLR2BZCAqed+a6xV4MI/T3XzHfd2FlarfUGUdZYgqYe8oxkYn0fchHEeHfHqdZ96sg== dependencies: "@types/chai" "*" "@types/chai@*", "@types/chai@^4.2.14": - version "4.2.18" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.18.tgz#0c8e298dbff8205e2266606c1ea5fbdba29b46e4" - integrity sha512-rS27+EkB/RE1Iz3u0XtVL5q36MGDWbgYe7zWiodyKNUnthxY0rukK5V36eiUCtCisB7NN8zKYH6DO2M37qxFEQ== + version "4.2.15" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.15.tgz#b7a6d263c2cecf44b6de9a051cf496249b154553" + integrity sha512-rYff6FI+ZTKAPkJUoyz7Udq3GaoDZnxYDEvdEdFZASiA7PoErltHezDishqQiSDWrGxvxmplH304jyzQmjp0AQ== "@types/connect-history-api-fallback@*": - version "1.3.4" - resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.4.tgz#8c0f0e6e5d8252b699f5a662f51bdf82fd9d8bb8" - integrity sha512-Kf8v0wljR5GSCOCF/VQWdV3ZhKOVA73drXtY3geMTQgHy9dgqQ0dLrf31M0hcuWkhFzK5sP0kkS3mJzcKVtZbw== + version "1.3.3" + resolved "https://registry.yarnpkg.com/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.3.tgz#4772b79b8b53185f0f4c9deab09236baf76ee3b4" + integrity sha512-7SxFCd+FLlxCfwVwbyPxbR4khL9aNikJhrorw8nUIOqeuooc9gifBuDQOJw5kzN7i6i3vLn9G8Wde/4QDihpYw== dependencies: "@types/express-serve-static-core" "*" "@types/node" "*" @@ -1814,9 +1680,9 @@ integrity sha512-0stqrMZB7vxsRTe//XEpr6A9+fThL2y/g6qzDG5ZgMJwuOceqOBDfh+g99tuN/XS58V52aPNTfWVS49Xvh842w== "@types/estree@*": - version "0.0.47" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.47.tgz#d7a51db20f0650efec24cd04994f523d93172ed4" - integrity sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg== + version "0.0.46" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.46.tgz#0fb6bfbbeabd7a30880504993369c4bf1deab1fe" + integrity sha512-laIjwTQaD+5DukBZaygQ79K1Z0jb1bPEMRrkXSLjtCcZm+abyp5YbrqpSLzD42FwWW6gK/aS4NYpJ804nG2brg== "@types/estree@0.0.39": version "0.0.39" @@ -1824,9 +1690,9 @@ integrity sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": - version "4.17.19" - resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.19.tgz#00acfc1632e729acac4f1530e9e16f6dd1508a1d" - integrity sha512-DJOSHzX7pCiSElWaGR8kCprwibCB/3yW6vcT8VG3P0SJjnv19gnWG/AZMfM60Xj/YJIp/YCaDHyvzsFVeniARA== + version "4.17.18" + resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.18.tgz#8371e260f40e0e1ca0c116a9afcd9426fa094c40" + integrity sha512-m4JTwx5RUBNZvky/JJ8swEJPKFd8si08pPF2PfizYjGZOKr/svUWPcoUmLow6MmPzhasphB7gSTINY67xn3JNA== dependencies: "@types/node" "*" "@types/qs" "*" @@ -1857,10 +1723,19 @@ "@types/minimatch" "*" "@types/node" "*" -"@types/http-proxy@^1.17.5": - version "1.17.6" - resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.6.tgz#62dc3fade227d6ac2862c8f19ee0da9da9fd8616" - integrity sha512-+qsjqR75S/ib0ig0R9WN+CDoZeOBU6F2XLewgC4KVgdXiNHiKKHFEMRHOrs5PbYE97D5vataw5wPj4KLYfUkuQ== +"@types/http-proxy-middleware@*": + version "0.19.3" + resolved "https://registry.yarnpkg.com/@types/http-proxy-middleware/-/http-proxy-middleware-0.19.3.tgz#b2eb96fbc0f9ac7250b5d9c4c53aade049497d03" + integrity sha512-lnBTx6HCOUeIJMLbI/LaL5EmdKLhczJY5oeXZpX/cXE4rRqb3RmV7VcMpiEfYkmTjipv3h7IAyIINe4plEv7cA== + dependencies: + "@types/connect" "*" + "@types/http-proxy" "*" + "@types/node" "*" + +"@types/http-proxy@*": + version "1.17.5" + resolved "https://registry.yarnpkg.com/@types/http-proxy/-/http-proxy-1.17.5.tgz#c203c5e6e9dc6820d27a40eb1e511c70a220423d" + integrity sha512-GNkDE7bTv6Sf8JbV2GksknKOsk7OznNYHSdrtvPJXO0qJ9odZig6IZKUi5RFGi6d1bf6dgIAe4uXi3DBc7069Q== dependencies: "@types/node" "*" @@ -1874,14 +1749,6 @@ resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad" integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== -"@types/levelup@^4.3.0": - version "4.3.1" - resolved "https://registry.yarnpkg.com/@types/levelup/-/levelup-4.3.1.tgz#7a53b9fd510716e11b2065332790fdf5f9b950b9" - integrity sha512-n//PeTpbHLjMLTIgW5B/g06W/6iuTBHuvUka2nFL9APMSVMNe2r4enADfu3CIE9IyV9E+uquf9OEQQqrDeg24A== - dependencies: - "@types/abstract-leveldown" "*" - "@types/node" "*" - "@types/lru-cache@^5.1.0": version "5.1.0" resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.0.tgz#57f228f2b80c046b4a1bd5cac031f81f207f4f03" @@ -1893,9 +1760,9 @@ integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw== "@types/minimatch@*": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.4.tgz#f0ec25dbf2f0e4b18647313ac031134ca5b24b21" - integrity sha512-1z8k4wzFnNjVK/tlxvrWuK5WMt6mydWWP7+zvH5eFep4oj+UkrfiJTRtjCeBXNpwaA/FYqqtb4/QS4ianFpIRA== + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" + integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== "@types/minimist@^1.2.0": version "1.2.1" @@ -1917,9 +1784,9 @@ "@types/mocha" "*" "@types/mocha@*", "@types/mocha@^8.0.3": - version "8.2.2" - resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-8.2.2.tgz#91daa226eb8c2ff261e6a8cbf8c7304641e095e0" - integrity sha512-Lwh0lzzqT5Pqh6z61P3c3P5nm6fzQK/MMHl9UKeneAeInVflBSz1O2EkX6gM6xfJd7FBXBY5purtLx7fUiZ7Hw== + version "8.2.0" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-8.2.0.tgz#3eb56d13a1de1d347ecb1957c6860c911704bc44" + integrity sha512-/Sge3BymXo4lKc31C8OINJgXLaw+7vL1/L1pGiBNpGrBiT8FQiaFpSYV0uhTaG4y78vcMBTMFsWaHDvuD+xGzQ== "@types/mock-fs@^4.13.0": version "4.13.0" @@ -1929,27 +1796,22 @@ "@types/node" "*" "@types/node-fetch@^2.5.5", "@types/node-fetch@^2.5.7": - version "2.5.10" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.10.tgz#9b4d4a0425562f9fcea70b12cb3fcdd946ca8132" - integrity sha512-IpkX0AasN44hgEad0gEF/V6EgR5n69VEqPEgnmoM8GsIGro3PowbWs4tR6IhxUTyPLpOn+fiGG6nrQhcmoCuIQ== + version "2.5.8" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.8.tgz#e199c835d234c7eb0846f6618012e558544ee2fb" + integrity sha512-fbjI6ja0N5ZA8TV53RUqzsKNkl9fv8Oj3T7zxW7FGv1GSH7gwJaNF8dzCjrqKaxKeUpTz4yT1DaJFq/omNpGfw== dependencies: "@types/node" "*" form-data "^3.0.0" -"@types/node@*": - version "15.3.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-15.3.1.tgz#23a06b87eedb524016616e886b116b8fdcb180af" - integrity sha512-weaeiP4UF4XgF++3rpQhpIJWsCTS4QJw5gvBhQu6cFIxTwyxWIe3xbnrY/o2lTCQ0lsdb8YIUDUvLR4Vuz5rbw== +"@types/node@*", "@types/node@^14.14.5", "@types/node@^14.6.1": + version "14.14.28" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.28.tgz#cade4b64f8438f588951a6b35843ce536853f25b" + integrity sha512-lg55ArB+ZiHHbBBttLpzD07akz0QPrZgUODNakeC09i62dnrywr9mFErHuaPlB6I7z+sEbK+IYmplahvplCj2g== "@types/node@^12.12.6": - version "12.20.13" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.13.tgz#e743bae112bd779ac9650f907197dd2caa7f0364" - integrity sha512-1x8W5OpxPq+T85OUsHRP6BqXeosKmeXRtjoF39STcdf/UWLqUsoehstZKOi0CunhVqHG17AyZgpj20eRVooK6A== - -"@types/node@^14.14.5", "@types/node@^14.6.1": - version "14.17.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.17.0.tgz#3ba770047723b3eeb8dc9fca02cce8a7fb6378da" - integrity sha512-w8VZUN/f7SSbvVReb9SWp6cJFevxb4/nkG65yLAya//98WgocKm5PLDAtSs5CtJJJM+kHmJjO/6mmYW4MHShZA== + version "12.20.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.1.tgz#63d36c10e162666f0107f247cdca76542c3c7472" + integrity sha512-tCkE96/ZTO+cWbln2xfyvd6ngHLanvVlJ3e5BeirJ3BYI5GbAyubIrmV4JjjugDly5D9fHjOL5MNsqsCnqwW6g== "@types/normalize-package-data@^2.4.0": version "2.4.0" @@ -1964,9 +1826,9 @@ "@types/node" "*" "@types/prettier@^2.1.1": - version "2.2.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" - integrity sha512-PijRCG/K3s3w1We6ynUKdxEc5AcuuH3NBmMDP8uvKVp6X43UY7NQlTzczakXP3DJR0F4dfNQIGjU2cUeRYs2AA== + version "2.2.1" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.1.tgz#374e31645d58cb18a07b3ecd8e9dede4deb2cccd" + integrity sha512-DxZZbyMAM9GWEzXL+BMZROWz9oo6A9EilwwOMET2UVu2uZTqMWS5S69KVtuVKaRjCUpcrOXRalet86/OpG4kqw== "@types/q@^1.5.1": version "1.5.4" @@ -1974,9 +1836,9 @@ integrity sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug== "@types/qs@*": - version "6.9.6" - resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.6.tgz#df9c3c8b31a247ec315e6996566be3171df4b3b1" - integrity sha512-0/HnwIfW4ki2D8L8c9GVcG5I72s9jP5GSLVF0VIXDW00kmIpA6O33G7a8n59Tmh7Nz0WUC3rSb7PTY/sdW2JzA== + version "6.9.5" + resolved "https://registry.yarnpkg.com/@types/qs/-/qs-6.9.5.tgz#434711bdd49eb5ee69d90c1d67c354a9a8ecb18b" + integrity sha512-/JHkVHtx/REVG0VVToGRGH2+23hsYLHdyG+GrvoUGlGAd0ErauXDyvHtRI/7H7mzLm+tBCKA7pfcpkQ1lf58iQ== "@types/range-parser@*": version "1.2.3" @@ -1998,9 +1860,9 @@ "@types/node" "*" "@types/secp256k1@^4.0.1": - version "4.0.2" - resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.2.tgz#20c29a87149d980f64464e56539bf4810fdb5d1d" - integrity sha512-QMg+9v0bbNJ2peLuHRWxzmy0HRJIG6gFZNhaRSp7S3ggSbCCxiqQB2/ybvhXyhHOCequpNkrx7OavNhrWOsW0A== + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.1.tgz#fb3aa61a1848ad97d7425ff9dcba784549fca5a4" + integrity sha512-+ZjSA8ELlOp8SlKi0YLB2tz9d5iPNEmOBd+8Rz21wTMdaXQIa9b6TEnD6l5qKOCypE7FSyPyck12qZJxSDNoog== dependencies: "@types/node" "*" @@ -2021,11 +1883,16 @@ "@types/sinon" "*" "@types/sinon@*": - version "10.0.0" - resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.0.tgz#eecc3847af03d45ffe53d55aaaaf6ecb28b5e584" - integrity sha512-jDZ55oCKxqlDmoTBBbBBEx+N8ZraUVhggMZ9T5t+6/Dh8/4NiOjSUfpLrPiEwxQDlAe3wpAkoXhWvE6LibtsMQ== + version "9.0.10" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-9.0.10.tgz#7fb9bcb6794262482859cab66d59132fca18fcf7" + integrity sha512-/faDC0erR06wMdybwI/uR8wEKV/E83T0k4sepIpB7gXuy2gzx2xiOjmztq6a2Y6rIGJ04D+6UU0VBmWy+4HEMA== dependencies: - "@sinonjs/fake-timers" "^7.0.4" + "@types/sinonjs__fake-timers" "*" + +"@types/sinonjs__fake-timers@*": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-6.0.2.tgz#3a84cf5ec3249439015e14049bd3161419bf9eae" + integrity sha512-dIPoZ3g5gcx9zZEszaxLSVTvMReD3xxyyDnQUjA6IYDG9Ba2AV0otMPs+77sG9ojB4Qr2N2Vk5RnKeuA0X/0bg== "@types/source-list-map@*": version "0.1.2" @@ -2039,22 +1906,22 @@ dependencies: "@types/node" "*" -"@types/tapable@^1": - version "1.0.7" - resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.7.tgz#545158342f949e8fd3bfd813224971ecddc3fac4" - integrity sha512-0VBprVqfgFD7Ehb2vd8Lh9TG3jP98gvr8rgehQqzztZNI7o8zS8Ad4jyZneKELphpuE212D8J70LnSNQSyO6bQ== +"@types/tapable@*": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/tapable/-/tapable-1.0.6.tgz#a9ca4b70a18b270ccb2bc0aaafefd1d486b7ea74" + integrity sha512-W+bw9ds02rAQaMvaLYxAbJ6cvguW/iJXNT6lTssS1ps6QdrMKttqEAMEG/b5CR8TZl3/L7/lH0ZV5nNR1LXikA== "@types/uglify-js@*": - version "3.13.0" - resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.13.0.tgz#1cad8df1fb0b143c5aba08de5712ea9d1ff71124" - integrity sha512-EGkrJD5Uy+Pg0NUR8uA4bJ5WMfljyad0G+784vLCNUkD+QwOJXUbBYExXfVGf7YtyzdQp3L/XMYcliB987kL5Q== + version "3.12.0" + resolved "https://registry.yarnpkg.com/@types/uglify-js/-/uglify-js-3.12.0.tgz#2bb061c269441620d46b946350c8f16d52ef37c5" + integrity sha512-sYAF+CF9XZ5cvEBkI7RtrG9g2GtMBkviTnBxYYyq+8BWvO4QtXfwwR6a2LFwCi4evMKZfpv6U43ViYvv17Wz3Q== dependencies: source-map "^0.6.1" "@types/underscore@*": - version "1.11.2" - resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.11.2.tgz#9441e0f6402bbcb72dbee771582fa57c5a1dedd3" - integrity sha512-Ls2ylbo7++ITrWk2Yc3G/jijwSq5V3GT0tlgVXEl2kKYXY3ImrtmTCoE2uyTWFRI5owMBriloZFWbE1SXOsE7w== + version "1.10.24" + resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.10.24.tgz#dede004deed3b3f99c4db0bdb9ee21cae25befdd" + integrity sha512-T3NQD8hXNW2sRsSbLNjF/aBo18MyJlbw0lSpQHB/eZZtScPdexN4HSa8cByYwTw9Wy7KuOFr81mlDQcQQaZ79w== "@types/web3@1.0.19": version "1.0.19" @@ -2065,15 +1932,15 @@ "@types/underscore" "*" "@types/webpack-dev-server@^3.11.0": - version "3.11.4" - resolved "https://registry.yarnpkg.com/@types/webpack-dev-server/-/webpack-dev-server-3.11.4.tgz#90d47dd660b696d409431ab8c1e9fa3615103a07" - integrity sha512-DCKORHjqNNVuMIDWFrlljftvc9CL0+09p3l7lBpb8dRqgN5SmvkWCY4MPKxoI6wJgdRqohmoNbptkxqSKAzLRg== + version "3.11.1" + resolved "https://registry.yarnpkg.com/@types/webpack-dev-server/-/webpack-dev-server-3.11.1.tgz#f8f4dac1da226d530bd15a1d5dc34b23ba766ccb" + integrity sha512-rIb+LtUkKnh7+oIJm3WiMJONd71Q0lZuqGLcSqhZ5qjN9gV/CNmZe7Bai+brnBPZ/KVYOsr+4bFLiNZwjBicLw== dependencies: "@types/connect-history-api-fallback" "*" "@types/express" "*" + "@types/http-proxy-middleware" "*" "@types/serve-static" "*" - "@types/webpack" "^4" - http-proxy-middleware "^1.0.0" + "@types/webpack" "*" "@types/webpack-sources@*": version "2.1.0" @@ -2084,25 +1951,25 @@ "@types/source-list-map" "*" source-map "^0.7.3" -"@types/webpack@^4", "@types/webpack@^4.0.0": - version "4.41.29" - resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.29.tgz#2e66c1de8223c440366469415c50a47d97625773" - integrity sha512-6pLaORaVNZxiB3FSHbyBiWM7QdazAWda1zvAq4SbZObZqHSDbWLi62iFdblVea6SK9eyBIVp5yHhKt/yNQdR7Q== +"@types/webpack@*", "@types/webpack@^4.0.0": + version "4.41.26" + resolved "https://registry.yarnpkg.com/@types/webpack/-/webpack-4.41.26.tgz#27a30d7d531e16489f9c7607c747be6bc1a459ef" + integrity sha512-7ZyTfxjCRwexh+EJFwRUM+CDB2XvgHl4vfuqf1ZKrgGvcS5BrNvPQqJh3tsZ0P6h6Aa1qClVHaJZszLPzpqHeA== dependencies: + "@types/anymatch" "*" "@types/node" "*" - "@types/tapable" "^1" + "@types/tapable" "*" "@types/uglify-js" "*" "@types/webpack-sources" "*" - anymatch "^3.0.0" source-map "^0.6.0" "@typescript-eslint/eslint-plugin@^4.10.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.24.0.tgz#03801ffc25b2af9d08f3dc9bccfc0b7ce3780d0f" - integrity sha512-qbCgkPM7DWTsYQGjx9RTuQGswi+bEt0isqDBeo+CKV0953zqI0Tp7CZ7Fi9ipgFA6mcQqF4NOVNwS/f2r6xShw== + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.15.1.tgz#835f64aa0a403e5e9e64c10ceaf8d05c3f015180" + integrity sha512-yW2epMYZSpNJXZy22Biu+fLdTG8Mn6b22kR3TqblVk50HGNV8Zya15WAXuQCr8tKw4Qf1BL4QtI6kv6PCkLoJw== dependencies: - "@typescript-eslint/experimental-utils" "4.24.0" - "@typescript-eslint/scope-manager" "4.24.0" + "@typescript-eslint/experimental-utils" "4.15.1" + "@typescript-eslint/scope-manager" "4.15.1" debug "^4.1.1" functional-red-black-tree "^1.0.1" lodash "^4.17.15" @@ -2110,60 +1977,60 @@ semver "^7.3.2" tsutils "^3.17.1" -"@typescript-eslint/experimental-utils@4.24.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.24.0.tgz#c23ead9de44b99c3a5fd925c33a106b00165e172" - integrity sha512-IwTT2VNDKH1h8RZseMH4CcYBz6lTvRoOLDuuqNZZoThvfHEhOiZPQCow+5El3PtyxJ1iDr6UXZwYtE3yZQjhcw== +"@typescript-eslint/experimental-utils@4.15.1": + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.15.1.tgz#d744d1ac40570a84b447f7aa1b526368afd17eec" + integrity sha512-9LQRmOzBRI1iOdJorr4jEnQhadxK4c9R2aEAsm7WE/7dq8wkKD1suaV0S/JucTL8QlYUPU1y2yjqg+aGC0IQBQ== dependencies: "@types/json-schema" "^7.0.3" - "@typescript-eslint/scope-manager" "4.24.0" - "@typescript-eslint/types" "4.24.0" - "@typescript-eslint/typescript-estree" "4.24.0" + "@typescript-eslint/scope-manager" "4.15.1" + "@typescript-eslint/types" "4.15.1" + "@typescript-eslint/typescript-estree" "4.15.1" eslint-scope "^5.0.0" eslint-utils "^2.0.0" "@typescript-eslint/parser@^4.10.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.24.0.tgz#2e5f1cc78ffefe43bfac7e5659309a92b09a51bd" - integrity sha512-dj1ZIh/4QKeECLb2f/QjRwMmDArcwc2WorWPRlB8UNTZlY1KpTVsbX7e3ZZdphfRw29aTFUSNuGB8w9X5sS97w== + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.15.1.tgz#4c91a0602733db63507e1dbf13187d6c71a153c4" + integrity sha512-V8eXYxNJ9QmXi5ETDguB7O9diAXlIyS+e3xzLoP/oVE4WCAjssxLIa0mqCLsCGXulYJUfT+GV70Jv1vHsdKwtA== dependencies: - "@typescript-eslint/scope-manager" "4.24.0" - "@typescript-eslint/types" "4.24.0" - "@typescript-eslint/typescript-estree" "4.24.0" + "@typescript-eslint/scope-manager" "4.15.1" + "@typescript-eslint/types" "4.15.1" + "@typescript-eslint/typescript-estree" "4.15.1" debug "^4.1.1" -"@typescript-eslint/scope-manager@4.24.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.24.0.tgz#38088216f0eaf235fa30ed8cabf6948ec734f359" - integrity sha512-9+WYJGDnuC9VtYLqBhcSuM7du75fyCS/ypC8c5g7Sdw7pGL4NDTbeH38eJPfzIydCHZDoOgjloxSAA3+4l/zsA== +"@typescript-eslint/scope-manager@4.15.1": + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.15.1.tgz#f6511eb38def2a8a6be600c530c243bbb56ac135" + integrity sha512-ibQrTFcAm7yG4C1iwpIYK7vDnFg+fKaZVfvyOm3sNsGAerKfwPVFtYft5EbjzByDJ4dj1WD8/34REJfw/9wdVA== dependencies: - "@typescript-eslint/types" "4.24.0" - "@typescript-eslint/visitor-keys" "4.24.0" + "@typescript-eslint/types" "4.15.1" + "@typescript-eslint/visitor-keys" "4.15.1" -"@typescript-eslint/types@4.24.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.24.0.tgz#6d0cca2048cbda4e265e0c4db9c2a62aaad8228c" - integrity sha512-tkZUBgDQKdvfs8L47LaqxojKDE+mIUmOzdz7r+u+U54l3GDkTpEbQ1Jp3cNqqAU9vMUCBA1fitsIhm7yN0vx9Q== +"@typescript-eslint/types@4.15.1": + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.15.1.tgz#da702f544ef1afae4bc98da699eaecd49cf31c8c" + integrity sha512-iGsaUyWFyLz0mHfXhX4zO6P7O3sExQpBJ2dgXB0G5g/8PRVfBBsmQIc3r83ranEQTALLR3Vko/fnCIVqmH+mPw== -"@typescript-eslint/typescript-estree@4.24.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.24.0.tgz#b49249679a98014d8b03e8d4b70864b950e3c90f" - integrity sha512-kBDitL/by/HK7g8CYLT7aKpAwlR8doshfWz8d71j97n5kUa5caHWvY0RvEUEanL/EqBJoANev8Xc/mQ6LLwXGA== +"@typescript-eslint/typescript-estree@4.15.1": + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.15.1.tgz#fa9a9ff88b4a04d901ddbe5b248bc0a00cd610be" + integrity sha512-z8MN3CicTEumrWAEB2e2CcoZa3KP9+SMYLIA2aM49XW3cWIaiVSOAGq30ffR5XHxRirqE90fgLw3e6WmNx5uNw== dependencies: - "@typescript-eslint/types" "4.24.0" - "@typescript-eslint/visitor-keys" "4.24.0" + "@typescript-eslint/types" "4.15.1" + "@typescript-eslint/visitor-keys" "4.15.1" debug "^4.1.1" globby "^11.0.1" is-glob "^4.0.1" semver "^7.3.2" tsutils "^3.17.1" -"@typescript-eslint/visitor-keys@4.24.0": - version "4.24.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.24.0.tgz#a8fafdc76cad4e04a681a945fbbac4e35e98e297" - integrity sha512-4ox1sjmGHIxjEDBnMCtWFFhErXtKA1Ec0sBpuz0fqf3P+g3JFGyTxxbF06byw0FRsPnnbq44cKivH7Ks1/0s6g== +"@typescript-eslint/visitor-keys@4.15.1": + version "4.15.1" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.15.1.tgz#c76abbf2a3be8a70ed760f0e5756bf62de5865dd" + integrity sha512-tYzaTP9plooRJY8eNlpAewTOqtWW/4ff/5wBjNVaJ0S0wC4Gpq/zDVRTJa5bq2v1pCNQ08xxMCndcvR+h7lMww== dependencies: - "@typescript-eslint/types" "4.24.0" + "@typescript-eslint/types" "4.15.1" eslint-visitor-keys "^2.0.0" "@ungap/promise-all-settled@1.1.2": @@ -2186,10 +2053,10 @@ resolved "https://registry.yarnpkg.com/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-1.0.2.tgz#9b9c691cd06fc855221a2475c3cc831d774bc7dc" integrity sha512-hz4R8tS5jMn8lDq6iD+yWL6XNB699pGIVLk7WSJnn1dbpjaazsjZQkieJoRX6gW5zpYSCFqQ7jUquPNY65tQYA== -"@vue/babel-plugin-jsx@^1.0.3": - version "1.0.6" - resolved "https://registry.yarnpkg.com/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.0.6.tgz#184bf3541ab6efdbe5079ab8b20c19e2af100bfb" - integrity sha512-RzYsvBhzKUmY2YG6LoV+W5PnlnkInq0thh1AzCmewwctAgGN6e9UFon6ZrQQV1CO5G5PeME7MqpB+/vvGg0h4g== +"@vue/babel-plugin-jsx@^1.0.0-0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.0.3.tgz#ad5ee86ebc9fc40900add9914534e223c719eace" + integrity sha512-+52ZQFmrM0yh61dQlgwQlfHZXmYbswbQEL25SOSt9QkjegAdfIGu87oELw0l8H6cuJYazZCiNjPR9eU++ZIbxg== dependencies: "@babel/helper-module-imports" "^7.0.0" "@babel/plugin-syntax-jsx" "^7.0.0" @@ -2213,10 +2080,10 @@ lodash.kebabcase "^4.1.1" svg-tags "^1.0.0" -"@vue/babel-preset-app@^4.5.13": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-4.5.13.tgz#cb475321e4c73f7f110dac29a48c2a9cb80afeb6" - integrity sha512-pM7CR3yXB6L8Gfn6EmX7FLNE3+V/15I3o33GkSNsWvgsMp6HVGXKkXgojrcfUUauyL1LZOdvTmu4enU2RePGHw== +"@vue/babel-preset-app@^4.5.11": + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-4.5.11.tgz#f677bc10472e418f71f61f10dde5a79976a215b8" + integrity sha512-9VoFlm/9vhynKNGM+HA7qBsoQSUEnuG5i5kcFI9vTLLrh8A0fxrwUyVLLppO6T1sAZ6vrKdQFnEkjL+RkRAwWQ== dependencies: "@babel/core" "^7.11.0" "@babel/helper-compilation-targets" "^7.9.6" @@ -2228,14 +2095,14 @@ "@babel/plugin-transform-runtime" "^7.11.0" "@babel/preset-env" "^7.11.0" "@babel/runtime" "^7.11.0" - "@vue/babel-plugin-jsx" "^1.0.3" - "@vue/babel-preset-jsx" "^1.2.4" + "@vue/babel-plugin-jsx" "^1.0.0-0" + "@vue/babel-preset-jsx" "^1.1.2" babel-plugin-dynamic-import-node "^2.3.3" core-js "^3.6.5" core-js-compat "^3.6.5" semver "^6.1.0" -"@vue/babel-preset-jsx@^1.2.4": +"@vue/babel-preset-jsx@^1.1.2": version "1.2.4" resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.2.4.tgz#92fea79db6f13b01e80d3a0099e2924bdcbe4e87" integrity sha512-oRVnmN2a77bYDJzeGSt92AuHXbkIxbf/XXSE3klINnh9AXBmVS1DGa1f0d+dDYpLfsAKElMnqKTQfKn7obcL4w== @@ -2298,40 +2165,40 @@ "@vue/babel-plugin-transform-vue-jsx" "^1.2.1" camelcase "^5.0.0" -"@vue/cli-overlay@^4.5.13": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-overlay/-/cli-overlay-4.5.13.tgz#4f1fd2161be8f69d6cba8079f3f0d7dc4dee47a7" - integrity sha512-jhUIg3klgi5Cxhs8dnat5hi/W2tQJvsqCxR0u6hgfSob0ORODgUBlN+F/uwq7cKIe/pzedVUk1y07F13GQvPqg== +"@vue/cli-overlay@^4.5.11": + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-overlay/-/cli-overlay-4.5.11.tgz#ea99493131182285f7ac2762290354d6e5b188e8" + integrity sha512-aDQNw+oGk5+KR0vL9TocjfzyYHTJxR2lS8iPbcL4lRglCs2dudOE7QWXypj5dM4rQus0jJ5fxJTS55o9uy9fcQ== "@vue/cli-plugin-babel@^4.0.0": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-plugin-babel/-/cli-plugin-babel-4.5.13.tgz#a89c482edcc4ea1d135645cec502a7f5fd4c30e7" - integrity sha512-ykvEAfD8PgGs+dGMGqr7l/nRmIS39NRzWLhMluPLTvDV1L+IxcoB73HNLGA/aENDpl8CuWrTE+1VgydcOhp+wg== + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-plugin-babel/-/cli-plugin-babel-4.5.11.tgz#7c1db4ca2f911e2156e7d1cf774fe2ad0f7428eb" + integrity sha512-ogUMeO2waDtghIWwmuAzMJAnnPdmqRdJlwJDca9u6BK9jX1bxNThBSFS/MN2VmlYzulOnqH4zAC87jTWNg/czg== dependencies: "@babel/core" "^7.11.0" - "@vue/babel-preset-app" "^4.5.13" - "@vue/cli-shared-utils" "^4.5.13" + "@vue/babel-preset-app" "^4.5.11" + "@vue/cli-shared-utils" "^4.5.11" babel-loader "^8.1.0" cache-loader "^4.1.0" thread-loader "^2.1.3" webpack "^4.0.0" -"@vue/cli-plugin-router@^4.5.13": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-plugin-router/-/cli-plugin-router-4.5.13.tgz#0b67c8898a2bf132941919a2a2e5f3aacbd9ffbe" - integrity sha512-tgtMDjchB/M1z8BcfV4jSOY9fZSMDTPgF9lsJIiqBWMxvBIsk9uIZHxp62DibYME4CCKb/nNK61XHaikFp+83w== +"@vue/cli-plugin-router@^4.5.11": + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-plugin-router/-/cli-plugin-router-4.5.11.tgz#3b6df738c5a1a5f50376822bf661d9a3b0c3fa62" + integrity sha512-09tzw3faOs48IUPwLutYaNC7eoyyL140fKruTwdFdXuBLDdSQVida57Brx0zj2UKXc5qF8hk4GoGrOshN0KfNg== dependencies: - "@vue/cli-shared-utils" "^4.5.13" + "@vue/cli-shared-utils" "^4.5.11" -"@vue/cli-plugin-vuex@^4.5.13": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-plugin-vuex/-/cli-plugin-vuex-4.5.13.tgz#98646d8bc1e69cf6c6a6cba2fed3eace0356c360" - integrity sha512-I1S9wZC7iI0Wn8kw8Zh+A2Qkf6s1M6vTGBkx8boXjuzfwEEyEHRxadsVCecZc8Mkpydo0nykj+MyYF96TKFuVA== +"@vue/cli-plugin-vuex@^4.5.11": + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-plugin-vuex/-/cli-plugin-vuex-4.5.11.tgz#f6f619bcfb66c86cc45340d73152844635e548bd" + integrity sha512-JBPeZLubiSHbRkEKDj0tnLiU43AJ3vt6JULn4IKWH1XWZ6MFC8vElaP5/AA4O3Zko5caamDDBq3TRyxdA2ncUQ== "@vue/cli-service@^4.0.0": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-service/-/cli-service-4.5.13.tgz#a09e684a801684b6e24e5414ad30650970eec9ed" - integrity sha512-CKAZN4iokMMsaUyJRU22oUAz3oS/X9sVBSKAF2/shFBV5xh3jqAlKl8OXZYz4cXGFLA6djNuYrniuLAo7Ku97A== + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-service/-/cli-service-4.5.11.tgz#b157e2eee2351889cbbd4ccb4a4a9d8575409175" + integrity sha512-FXeJh2o6B8q/njv2Ebhe9EsLXt9sPMXGDY5zVvcV5jgj9wkoej9yLfnmwWCau5kegNClP6bcM+BEHuMYxJ+ubQ== dependencies: "@intervolga/optimize-cssnano-plugin" "^1.0.5" "@soda/friendly-errors-webpack-plugin" "^1.7.1" @@ -2339,10 +2206,10 @@ "@types/minimist" "^1.2.0" "@types/webpack" "^4.0.0" "@types/webpack-dev-server" "^3.11.0" - "@vue/cli-overlay" "^4.5.13" - "@vue/cli-plugin-router" "^4.5.13" - "@vue/cli-plugin-vuex" "^4.5.13" - "@vue/cli-shared-utils" "^4.5.13" + "@vue/cli-overlay" "^4.5.11" + "@vue/cli-plugin-router" "^4.5.11" + "@vue/cli-plugin-vuex" "^4.5.11" + "@vue/cli-shared-utils" "^4.5.11" "@vue/component-compiler-utils" "^3.1.2" "@vue/preload-webpack-plugin" "^1.1.0" "@vue/web-component-wrapper" "^1.2.0" @@ -2377,8 +2244,8 @@ pnp-webpack-plugin "^1.6.4" portfinder "^1.0.26" postcss-loader "^3.0.0" - ssri "^8.0.1" - terser-webpack-plugin "^1.4.4" + ssri "^7.1.0" + terser-webpack-plugin "^2.3.6" thread-loader "^2.1.3" url-loader "^2.2.0" vue-loader "^15.9.2" @@ -2391,10 +2258,10 @@ optionalDependencies: vue-loader-v16 "npm:vue-loader@^16.1.0" -"@vue/cli-shared-utils@^4.5.13": - version "4.5.13" - resolved "https://registry.yarnpkg.com/@vue/cli-shared-utils/-/cli-shared-utils-4.5.13.tgz#acd40f31b4790f1634292bdaa5fca95dc1e0ff50" - integrity sha512-HpnOrkLg42RFUsQGMJv26oTG3J3FmKtO2WSRhKIIL+1ok3w9OjGCtA3nMMXN27f9eX14TqO64M36DaiSZ1fSiw== +"@vue/cli-shared-utils@^4.5.11": + version "4.5.11" + resolved "https://registry.yarnpkg.com/@vue/cli-shared-utils/-/cli-shared-utils-4.5.11.tgz#fff71673ee9128f998c691515b9d327071b4f41e" + integrity sha512-+aaQ+ThQG3+WMexfSWNl0y6f43edqVqRNbguE53F3TIH81I7saS5S750ayqXhZs2r6STJJyqorQnKtAWfHo29A== dependencies: "@hapi/joi" "^15.0.1" chalk "^2.4.2" @@ -2628,17 +2495,6 @@ abstract-leveldown@^5.0.0, abstract-leveldown@~5.0.0: dependencies: xtend "~4.0.0" -abstract-leveldown@^6.2.1: - version "6.3.0" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.3.0.tgz#d25221d1e6612f820c35963ba4bd739928f6026a" - integrity sha512-TU5nlYgta8YrBMNpc9FwQzRbiXsj49gsALsXadbGHt9CROPzX5fB0rWDR5mtdpOOKa5XqRFpbj1QroPAoPzVjQ== - dependencies: - buffer "^5.5.0" - immediate "^3.2.3" - level-concat-iterator "~2.0.0" - level-supports "~1.0.0" - xtend "~4.0.0" - abstract-leveldown@~2.6.0: version "2.6.3" resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-2.6.3.tgz#1c5e8c6a5ef965ae8c35dfb3a8770c476b82c4b8" @@ -2646,17 +2502,6 @@ abstract-leveldown@~2.6.0: dependencies: xtend "~4.0.0" -abstract-leveldown@~6.2.1: - version "6.2.3" - resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-6.2.3.tgz#036543d87e3710f2528e47040bc3261b77a9a8eb" - integrity sha512-BsLm5vFMRUrrLeCcRc+G0t2qOaTzpoJQLOubq2XM72eNpjF5UdU5o/5NvlNhx95XHcAvcl8OMXr4mlg/fRgUXQ== - dependencies: - buffer "^5.5.0" - immediate "^3.2.3" - level-concat-iterator "~2.0.0" - level-supports "~1.0.0" - xtend "~4.0.0" - accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" @@ -2712,6 +2557,14 @@ agent-base@6: dependencies: debug "4" +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" @@ -2732,10 +2585,10 @@ ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.6.1, ajv@ json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.1: - version "8.5.0" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.5.0.tgz#695528274bcb5afc865446aa275484049a18ae4b" - integrity sha512-Y2l399Tt1AguU3BPRP9Fn4eN+Or+StUGWCUpbnFyXSo8NZ9S4uj+AG2pjs5apK+ZMOwYOz1+a+VKvKH7CudXgQ== +ajv@^7.0.2: + version "7.1.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.1.0.tgz#f982ea7933dc7f1012eae9eec5a86687d805421b" + integrity sha512-svS9uILze/cXbH0z2myCK2Brqprx/+JJYK5pHicT/GQiBfzzhUVAIT6MwqJg8y4xV/zoGsUeuPuwtoiKSGE15g== dependencies: fast-deep-equal "^3.1.1" json-schema-traverse "^1.0.0" @@ -2773,11 +2626,11 @@ ansi-escapes@^3.2.0: integrity sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ== ansi-escapes@^4.3.0: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + version "4.3.1" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61" + integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA== dependencies: - type-fest "^0.21.3" + type-fest "^0.11.0" ansi-html@0.0.7: version "0.0.7" @@ -2829,9 +2682,9 @@ antlr4@4.7.1: integrity sha512-haHyTW7Y9joE5MVs37P2lNYfU2RWBLfcRDD8OWldcdZm5TiCE91B5Xl1oWSwiDUSd4rlExpt2pu1fksYQjRBYQ== antlr4@^4.7.1: - version "4.9.2" - resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.9.2.tgz#abbc53d275954b1b6f4d8b3468b4a2cb258121fc" - integrity sha512-UjMSlenUORL+a+6g4RNZxRh5LcFWybRi2g0ASDBpgXBY6nlavg0BRVAVEQF0dz8jH6SyX3lV7uP5y/krJzc+Hw== + version "4.9.1" + resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.9.1.tgz#cd9cc8d96c9d8bc5b758fe9e02f4066f46fc287e" + integrity sha512-sBI/Pf2aF5leFoAqx3S0if6IGX4vKDsWPHkVt0PfvZb8BnxrrVkNP4Tczc5cp/ZduhfNXvdQTOKRy7ihbql76g== any-promise@^1.0.0: version "1.3.0" @@ -2846,10 +2699,10 @@ anymatch@^2.0.0: micromatch "^3.1.4" normalize-path "^2.1.1" -anymatch@^3.0.0, anymatch@~3.1.1: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== +anymatch@~3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" + integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== dependencies: normalize-path "^3.0.0" picomatch "^2.0.4" @@ -3015,7 +2868,7 @@ async-each@^1.0.1: resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== -async-eventemitter@^0.2.2, async-eventemitter@^0.2.4: +async-eventemitter@^0.2.2: version "0.2.4" resolved "https://registry.yarnpkg.com/async-eventemitter/-/async-eventemitter-0.2.4.tgz#f5e7c8ca7d3e46aab9ec40a292baf686a0bafaca" integrity sha512-pd20BwL7Yt1zwDFy+8MX8F1+WCT8aQeKj0kQnTrH9WaeRETlRamVhD0JtRPmrV4GfOJ2F9CvdQkZeZhnh2TuHw== @@ -3303,30 +3156,6 @@ babel-plugin-dynamic-import-node@^2.3.3: dependencies: object.assign "^4.1.0" -babel-plugin-polyfill-corejs2@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.2.0.tgz#686775bf9a5aa757e10520903675e3889caeedc4" - integrity sha512-9bNwiR0dS881c5SHnzCmmGlMkJLl0OUZvxrxHo9w/iNoRuqaPjqlvBf4HrovXtQs/au5yKkpcdgfT1cC5PAZwg== - dependencies: - "@babel/compat-data" "^7.13.11" - "@babel/helper-define-polyfill-provider" "^0.2.0" - semver "^6.1.1" - -babel-plugin-polyfill-corejs3@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.2.0.tgz#f4b4bb7b19329827df36ff56f6e6d367026cb7a2" - integrity sha512-zZyi7p3BCUyzNxLx8KV61zTINkkV65zVkDAFNZmrTCRVhjo1jAS+YLvDJ9Jgd/w2tsAviCwFHReYfxO3Iql8Yg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.2.0" - core-js-compat "^3.9.1" - -babel-plugin-polyfill-regenerator@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.2.0.tgz#853f5f5716f4691d98c84f8069c7636ea8da7ab8" - integrity sha512-J7vKbCuD2Xi/eEHxquHN14bXAW9CXtecwuLrOIDJtcZzTaPzV1VdEfoUf9AzcRBMolKUQKM9/GVojeh0hFiqMg== - dependencies: - "@babel/helper-define-polyfill-provider" "^0.2.0" - babel-plugin-syntax-async-functions@^6.8.0: version "6.13.0" resolved "https://registry.yarnpkg.com/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz#cad9cad1191b5ad634bf30ae0872391e0647be95" @@ -3679,9 +3508,9 @@ backoff@^2.5.0: precond "0.2" balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= base-x@^3.0.2, base-x@^3.0.8: version "3.0.8" @@ -3809,14 +3638,14 @@ bn.js@4.11.6: integrity sha1-UzRK2xRhehP26N0s4okF0cC6MhU= bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.10.0, bn.js@^4.11.0, bn.js@^4.11.1, bn.js@^4.11.6, bn.js@^4.11.8, bn.js@^4.11.9, bn.js@^4.4.0, bn.js@^4.8.0: - version "4.12.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.12.0.tgz#775b3f278efbb9718eec7361f483fb36fbbfea88" - integrity sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA== + version "4.11.9" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.9.tgz#26d556829458f9d1e81fc48952493d0ba3507828" + integrity sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw== bn.js@^5.0.0, bn.js@^5.1.1, bn.js@^5.1.2: - version "5.2.0" - resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.0.tgz#358860674396c6997771a9d051fcc1b57d4ae002" - integrity sha512-D7iWRBvnZE8ecXiLj/9wbxH7Tk79fAh8IHaTNq1RWRixsS02W+5qS+iE9yq6RYl0asXx5tw0bLhmT5pIfbSquw== + version "5.1.3" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.3.tgz#beca005408f642ebebea80b042b4d18d2ac0ee6b" + integrity sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ== body-parser@1.19.0, body-parser@^1.16.0: version "1.19.0" @@ -3977,16 +3806,16 @@ browserslist@^3.2.6: caniuse-lite "^1.0.30000844" electron-to-chromium "^1.3.47" -browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.6: - version "4.16.6" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.6.tgz#d7901277a5a88e554ed305b183ec9b0c08f66fa2" - integrity sha512-Wspk/PqO+4W9qp5iUTJsa1B/QrYn1keNCcEP5OvP7WBwT4KaDly0uONYmC6Xa3Z5IqnUgS0KcgLYu1l74x0ZXQ== +browserslist@^4.0.0, browserslist@^4.12.0, browserslist@^4.14.5, browserslist@^4.16.1: + version "4.16.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.3.tgz#340aa46940d7db878748567c5dea24a48ddf3717" + integrity sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw== dependencies: - caniuse-lite "^1.0.30001219" - colorette "^1.2.2" - electron-to-chromium "^1.3.723" + caniuse-lite "^1.0.30001181" + colorette "^1.2.1" + electron-to-chromium "^1.3.649" escalade "^3.1.1" - node-releases "^1.1.71" + node-releases "^1.1.70" bs58@^4.0.0: version "4.0.1" @@ -4126,6 +3955,30 @@ cacache@^12.0.2, cacache@^12.0.3: unique-filename "^1.1.1" y18n "^4.0.0" +cacache@^13.0.1: + version "13.0.1" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-13.0.1.tgz#a8000c21697089082f85287a1aec6e382024a71c" + integrity sha512-5ZvAxd05HDDU+y9BVvcqYu2LLXmPnQ0hW62h32g4xBTgL/MppR4/04NHfj/ycM2y6lmTnbw6HVi+1eN0Psba6w== + dependencies: + chownr "^1.1.2" + figgy-pudding "^3.5.1" + fs-minipass "^2.0.0" + glob "^7.1.4" + graceful-fs "^4.2.2" + infer-owner "^1.0.4" + lru-cache "^5.1.1" + minipass "^3.0.0" + minipass-collect "^1.0.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.2" + mkdirp "^0.5.1" + move-concurrently "^1.0.1" + p-map "^3.0.0" + promise-inflight "^1.0.1" + rimraf "^2.7.1" + ssri "^7.0.0" + unique-filename "^1.1.1" + cache-base@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" @@ -4257,15 +4110,15 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000844, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001219: - version "1.0.30001228" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001228.tgz#bfdc5942cd3326fa51ee0b42fbef4da9d492a7fa" - integrity sha512-QQmLOGJ3DEgokHbMSA8cj2a+geXqmnpyOFT0lhQV6P3/YOJvGDEwoedcwxEQ30gJIwIIunHIicunJ2rzK5gB2A== +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000844, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001181: + version "1.0.30001187" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001187.tgz#5706942631f83baa5a0218b7dfa6ced29f845438" + integrity sha512-w7/EP1JRZ9552CyrThUnay2RkZ1DXxKe/Q2swTC4+LElLh9RRYrL1Z+27LlakB8kzY0fSmHw9mc7XYDUKAKWMA== case-sensitive-paths-webpack-plugin@^2.3.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.4.0.tgz#db64066c6422eed2e08cc14b986ca43796dbc6d4" - integrity sha512-roIFONhcxog0JSSWbvVAh3OocukmSgpqOH6YpMkCvav/ySIV3JKg4Dc8vYtQjYi/UxpNE36r/9v+VqTQqgkYmw== + version "2.3.0" + resolved "https://registry.yarnpkg.com/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz#23ac613cc9a856e4f88ff8bb73bbb5e989825cf7" + integrity sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ== caseless@~0.12.0: version "0.12.0" @@ -4288,15 +4141,15 @@ chai-as-promised@^7.1.1: check-error "^1.0.2" chai@^4.2.0: - version "4.3.4" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.4.tgz#b55e655b31e1eac7099be4c08c21964fce2e6c49" - integrity sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA== + version "4.3.0" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.0.tgz#5523a5faf7f819c8a92480d70a8cccbadacfc25f" + integrity sha512-/BFd2J30EcOwmdOgXvVsmM48l0Br0nmZPlO0uOW4XKh6kpsUumRXBgPV+IlaqFaqr9cYbeoZAM1Npx0i4A+aiA== dependencies: assertion-error "^1.1.0" check-error "^1.0.2" deep-eql "^3.0.1" get-func-name "^2.0.0" - pathval "^1.1.1" + pathval "^1.1.0" type-detect "^4.0.5" chalk@^1.1.1, chalk@^1.1.3: @@ -4320,9 +4173,9 @@ chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.4.1, chalk@^2.4 supports-color "^5.3.0" chalk@^4.0.0, chalk@^4.1.0: - version "4.1.1" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" - integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== + version "4.1.0" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" + integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== dependencies: ansi-styles "^4.1.0" supports-color "^7.1.0" @@ -4403,15 +4256,17 @@ chokidar@^2.1.8: optionalDependencies: fsevents "^1.2.7" -chownr@^1.1.1: +chownr@^1.1.1, chownr@^1.1.2: version "1.1.4" resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.4.tgz#6fc9d7b42d32a583596337666e7d08084da2cc6b" integrity sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg== chrome-trace-event@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz#1015eced4741e15d06664a957dbbf50d041e26ac" - integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== + version "1.0.2" + resolved "https://registry.yarnpkg.com/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz#234090ee97c7d4ad1a2c4beae27505deffc608a4" + integrity sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ== + dependencies: + tslib "^1.9.0" ci-info@^2.0.0: version "2.0.0" @@ -4459,6 +4314,11 @@ clean-css@4.2.x: dependencies: source-map "~0.6.0" +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + cli-cursor@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" @@ -4467,21 +4327,21 @@ cli-cursor@^2.1.0: restore-cursor "^2.0.0" cli-highlight@^2.1.4: - version "2.1.11" - resolved "https://registry.yarnpkg.com/cli-highlight/-/cli-highlight-2.1.11.tgz#49736fa452f0aaf4fae580e30acb26828d2dc1bf" - integrity sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg== + version "2.1.10" + resolved "https://registry.yarnpkg.com/cli-highlight/-/cli-highlight-2.1.10.tgz#26a087da9209dce4fcb8cf5427dc97cd96ac173a" + integrity sha512-CcPFD3JwdQ2oSzy+AMG6j3LRTkNjM82kzcSKzoVw6cLanDCJNlsLjeqVTOTfOfucnWv5F0rmBemVf1m9JiIasw== dependencies: chalk "^4.0.0" - highlight.js "^10.7.1" + highlight.js "^10.0.0" mz "^2.4.0" parse5 "^5.1.1" parse5-htmlparser2-tree-adapter "^6.0.0" yargs "^16.0.0" cli-spinners@^2.0.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.6.0.tgz#36c7dc98fb6a9a76bd6238ec3f77e2425627e939" - integrity sha512-t+4/y50K/+4xcCRosKkA7W4gTr1MySvLV0q+PxmG7FJ5g+66ChKurYjxBCjHggHH3HA5Hh9cy+lcUGWDqVH+4Q== + version "2.5.0" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.5.0.tgz#12763e47251bf951cb75c201dfa58ff1bcb2d047" + integrity sha512-PC+AmIuK04E6aeSs/pUccSujsTzBhu4HzC2dL+CfJB/Jcc2qTRbEwZQDfIUpt2Xl8BodYBEq8w4fc0kU2I9DjQ== cli-table3@^0.6.0: version "0.6.0" @@ -4616,9 +4476,9 @@ color-name@^1.0.0, color-name@~1.1.4: integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== color-string@^1.5.4: - version "1.5.5" - resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.5.tgz#65474a8f0e7439625f3d27a6a19d89fc45223014" - integrity sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg== + version "1.5.4" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6" + integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw== dependencies: color-name "^1.0.0" simple-swizzle "^0.2.2" @@ -4631,10 +4491,10 @@ color@^3.0.0: color-convert "^1.9.1" color-string "^1.5.4" -colorette@^1.1.0, colorette@^1.2.1, colorette@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" - integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +colorette@^1.1.0, colorette@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.1.tgz#4d0b921325c14faf92633086a536db6e89564b1b" + integrity sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw== colors@^1.1.2, colors@^1.4.0: version "1.4.0" @@ -4859,18 +4719,18 @@ copy-webpack-plugin@^5.1.1: serialize-javascript "^4.0.0" webpack-log "^2.0.0" -core-js-compat@^3.6.5, core-js-compat@^3.9.0, core-js-compat@^3.9.1: - version "3.12.1" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.12.1.tgz#2c302c4708505fa7072b0adb5156d26f7801a18b" - integrity sha512-i6h5qODpw6EsHAoIdQhKoZdWn+dGBF3dSS8m5tif36RlWvW3A6+yu2S16QHUo3CrkzrnEskMAt9f8FxmY9fhWQ== +core-js-compat@^3.6.5, core-js-compat@^3.8.0: + version "3.8.3" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.8.3.tgz#9123fb6b9cad30f0651332dc77deba48ef9b0b3f" + integrity sha512-1sCb0wBXnBIL16pfFG1Gkvei6UzvKyTNYpiC41yrdjEv0UoJoq9E/abTMzyYJ6JpTkAj15dLjbqifIzEBDVvog== dependencies: - browserslist "^4.16.6" + browserslist "^4.16.1" semver "7.0.0" core-js-pure@^3.0.1: - version "3.12.1" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.12.1.tgz#934da8b9b7221e2a2443dc71dfa5bd77a7ea00b8" - integrity sha512-1cch+qads4JnDSWsvc7d6nzlKAippwjUlf6vykkTLW53VSV+NkE6muGBToAjEA8pG90cSfcud3JgVmW2ds5TaQ== + version "3.8.3" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.8.3.tgz#10e9e3b2592ecaede4283e8f3ad7020811587c02" + integrity sha512-V5qQZVAr9K0xu7jXg1M7qTEwuxUgqr7dUOezGaNa7i+Xn9oXAU/d1fzqD9ObuwpVQOaorO5s70ckyi1woP9lVA== core-js@^2.4.0, core-js@^2.5.0: version "2.6.12" @@ -4878,9 +4738,9 @@ core-js@^2.4.0, core-js@^2.5.0: integrity sha512-Kb2wC0fvsWfQrgk8HU5lW6U/Lcs8+9aaYcy4ZFc6DDlo4nZ7n70dEgE5rtR0oG6ufKDUnrwfWL1mXR5ljDatrQ== core-js@^3.3.2, core-js@^3.6.5: - version "3.12.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.12.1.tgz#6b5af4ff55616c08a44d386f1f510917ff204112" - integrity sha512-Ne9DKPHTObRuB09Dru5AjwKjY4cJHVGu+y5f7coGn1E9Grkc3p2iBwE9AI/nJzsE29mQF7oq+mhYYRqOMFN1Bw== + version "3.8.3" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.8.3.tgz#c21906e1f14f3689f93abcc6e26883550dd92dd0" + integrity sha512-KPYXeVZYemC2TkNEkX/01I+7yd+nX3KddKwZ1Ww7SKWdI2wQprSgLmrTddT8nw92AjEklTsPBoSdQBhbI1bQ6Q== core-util-is@1.0.2, core-util-is@~1.0.0: version "1.0.2" @@ -4905,14 +4765,6 @@ cosmiconfig@^5.0.0, cosmiconfig@^5.0.7: js-yaml "^3.13.1" parse-json "^4.0.0" -crc-32@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/crc-32/-/crc-32-1.2.0.tgz#cb2db6e29b88508e32d9dd0ec1693e7b41a18208" - integrity sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA== - dependencies: - exit-on-epipe "~1.0.1" - printj "~1.1.0" - create-ecdh@^4.0.0: version "4.0.4" resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" @@ -5058,9 +4910,9 @@ css-tree@1.0.0-alpha.37: source-map "^0.6.1" css-tree@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.3.tgz#eb4870fb6fd7707327ec95c2ff2ab09b5e8db91d" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== + version "1.1.2" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.1.2.tgz#9ae393b5dafd7dae8a622475caec78d3d8fbd7b5" + integrity sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ== dependencies: mdn-data "2.0.14" source-map "^0.6.1" @@ -5075,10 +4927,10 @@ cssesc@^3.0.0: resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-default@^4.0.0, cssnano-preset-default@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.8.tgz#920622b1fc1e95a34e8838203f1397a504f2d3ff" - integrity sha512-LdAyHuq+VRyeVREFmuxUZR1TXjQm8QQU/ktoo/x7bz+SdOge1YKc5eMN6pRW7YWBmyq59CqYba1dJ5cUukEjLQ== +cssnano-preset-default@^4.0.0, cssnano-preset-default@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz#51ec662ccfca0f88b396dcd9679cdb931be17f76" + integrity sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA== dependencies: css-declaration-sorter "^4.0.1" cssnano-util-raw-cache "^4.0.1" @@ -5108,7 +4960,7 @@ cssnano-preset-default@^4.0.0, cssnano-preset-default@^4.0.8: postcss-ordered-values "^4.1.2" postcss-reduce-initial "^4.0.3" postcss-reduce-transforms "^4.0.2" - postcss-svgo "^4.0.3" + postcss-svgo "^4.0.2" postcss-unique-selectors "^4.0.1" cssnano-util-get-arguments@^4.0.0: @@ -5134,12 +4986,12 @@ cssnano-util-same-parent@^4.0.0: integrity sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q== cssnano@^4.0.0, cssnano@^4.1.10: - version "4.1.11" - resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.11.tgz#c7b5f5b81da269cb1fd982cb960c1200910c9a99" - integrity sha512-6gZm2htn7xIPJOHY824ERgj8cNPgPxyCSnkXc4v7YvNW+TdVfzgngHcEhy/8D11kUWRUMbke+tC+AUcUsnMz2g== + version "4.1.10" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.10.tgz#0ac41f0b13d13d465487e111b778d42da631b8b2" + integrity sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ== dependencies: cosmiconfig "^5.0.0" - cssnano-preset-default "^4.0.8" + cssnano-preset-default "^4.0.7" is-resolvable "^1.0.0" postcss "^7.0.0" @@ -5330,14 +5182,6 @@ deferred-leveldown@~4.0.0: abstract-leveldown "~5.0.0" inherits "^2.0.3" -deferred-leveldown@~5.3.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz#27a997ad95408b61161aa69bd489b86c71b78058" - integrity sha512-a59VOT+oDy7vtAbLRCZwWgxu2BaCfd5Hk7wxJd48ei7I+nsg8Orlb9CLG0PMZienk9BSUKgeAqkO2+Lw+1+Ukw== - dependencies: - abstract-leveldown "~6.2.1" - inherits "^2.0.3" - define-properties@^1.1.2, define-properties@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" @@ -5421,9 +5265,9 @@ detect-indent@^4.0.0: repeating "^2.0.0" detect-node@^2.0.4: - version "2.1.0" - resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" - integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + version "2.0.4" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" + integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== diff@3.5.0: version "3.5.0" @@ -5526,9 +5370,9 @@ domelementtype@1, domelementtype@^1.3.1: integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== domelementtype@^2.0.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.2.0.tgz#9a0b6c2782ed6a1c7323d42267183df9bd8b1d57" - integrity sha512-DtBMo82pv1dFtUmHyr48beiuq792Sxohr+8Hm9zoxklYPfa6n0Z3Byjj2IV7bmr2IyqClnqEQhfgHJJ5QF0R5A== + version "2.1.0" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.1.0.tgz#a851c080a6d1c3d94344aed151d99f669edf585e" + integrity sha512-LsTgx/L5VpD+Q8lmsXSHW2WpA+eBlZ9HPf3erD1IoPF00/3JKHZ3BknUVA2QGDNu69ZNmyFmCWBSO45XjYKC5w== domhandler@^2.3.0: version "2.4.2" @@ -5558,9 +5402,9 @@ dotenv-expand@^5.1.0: integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== dotenv@^8.2.0: - version "8.6.0" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.6.0.tgz#061af664d19f7f4d8fc6e4ff9b584ce237adcb8b" - integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== + version "8.2.0" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-8.2.0.tgz#97e619259ada750eea3e4ea3e26bceea5424b16a" + integrity sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw== dotignore@~0.1.2: version "0.1.2" @@ -5619,10 +5463,10 @@ ejs@^2.6.1: resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba" integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA== -electron-to-chromium@^1.3.47, electron-to-chromium@^1.3.723: - version "1.3.734" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.734.tgz#c8d318a4eb27509190cf3a08870dbcbf06c74dcb" - integrity sha512-iQF2mjPZ6zNNq45kbJ6MYZYCBNdv2JpGiJC/lVx4tGJWi9MNg73KkL9sWGN4X4I/CP2SBLWsT8nPADZZpAHIyw== +electron-to-chromium@^1.3.47, electron-to-chromium@^1.3.649: + version "1.3.665" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.665.tgz#6d0937376f6a919c0f289202c4be77790a6175e5" + integrity sha512-LIjx1JheOz7LM8DMEQ2tPnbBzJ4nVG1MKutsbEMLnJfwfVdPIsyagqfLp56bOWhdBrYGXWHaTayYkllIU2TauA== elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.5.2, elliptic@^6.5.3: version "6.5.4" @@ -5647,10 +5491,10 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== -emoji-regex@^9.2.2: - version "9.2.2" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" - integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== +emoji-regex@^9.2.1: + version "9.2.1" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.1.tgz#c9b25604256bb3428964bead3ab63069d736f7ee" + integrity sha512-117l1H6U4X3Krn+MrzYrL57d5H7siRHWraBs7s+LjRuFK7Fe7hJqnJ0skWlinqsycVLU5YAo6L8CsEYQ0V5prg== emojis-list@^2.0.0: version "2.1.0" @@ -5678,16 +5522,6 @@ encoding-down@5.0.4, encoding-down@~5.0.0: level-errors "^2.0.0" xtend "^4.0.1" -encoding-down@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" - integrity sha512-QKrV0iKR6MZVJV08QY0wp1e7vF6QbhnbQhb07bwpEyuz4uZiZgPlEGdkCROuFkUwdxlFaiPIhjyarH1ee/3vhw== - dependencies: - abstract-leveldown "^6.2.1" - inherits "^2.0.3" - level-codec "^9.0.0" - level-errors "^2.0.0" - encoding@^0.1.11: version "0.1.13" resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" @@ -5734,9 +5568,9 @@ entities@~2.0.0: integrity sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ== env-paths@^2.2.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.1.tgz#420399d416ce1fbe9bc0a07c62fa68d67fd0f8f2" - integrity sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A== + version "2.2.0" + resolved "https://registry.yarnpkg.com/env-paths/-/env-paths-2.2.0.tgz#cdca557dc009152917d6166e2febe1f039685e43" + integrity sha512-6u0VYSCo/OW6IoD5WCLLy9JUGARbamfSavcNXry/eu8aHVFei6CD3Sw+VGX5alea1i9pgPHW0mbu6Xj0uBh7gA== errno@^0.1.3, errno@~0.1.1, errno@~0.1.7: version "0.1.8" @@ -5759,27 +5593,42 @@ error-stack-parser@^2.0.2: dependencies: stackframe "^1.1.1" -es-abstract@^1.17.0-next.0, es-abstract@^1.17.2, es-abstract@^1.18.0-next.2: - version "1.18.0" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0.tgz#ab80b359eecb7ede4c298000390bc5ac3ec7b5a4" - integrity sha512-LJzK7MrQa8TS0ja2w3YNLzUgJCGPdPOV1yVvezjNnS89D+VR08+Szt2mz3YB2Dck/+w5tfIq/RoUAFqJJGM2yw== +es-abstract@^1.17.0-next.0, es-abstract@^1.17.2: + version "1.17.7" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.7.tgz#a4de61b2f66989fc7421676c1cb9787573ace54c" + integrity sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" + is-regex "^1.1.1" + object-inspect "^1.8.0" + object-keys "^1.1.1" + object.assign "^4.1.1" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-abstract@^1.18.0-next.1: + version "1.18.0-next.2" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.2.tgz#088101a55f0541f595e7e057199e27ddc8f3a5c2" + integrity sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw== dependencies: call-bind "^1.0.2" es-to-primitive "^1.2.1" function-bind "^1.1.1" - get-intrinsic "^1.1.1" + get-intrinsic "^1.0.2" has "^1.0.3" - has-symbols "^1.0.2" - is-callable "^1.2.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" is-negative-zero "^2.0.1" - is-regex "^1.1.2" - is-string "^1.0.5" + is-regex "^1.1.1" object-inspect "^1.9.0" object-keys "^1.1.1" object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.0" + string.prototype.trimend "^1.0.3" + string.prototype.trimstart "^1.0.3" es-to-primitive@^1.2.1: version "1.2.1" @@ -5882,9 +5731,9 @@ eslint-visitor-keys@^1.0.0, eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3 integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== eslint-visitor-keys@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + version "2.0.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8" + integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== eslint@^5.6.0: version "5.16.0" @@ -5929,12 +5778,12 @@ eslint@^5.6.0: text-table "^0.2.0" eslint@^7.16.0: - version "7.26.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.26.0.tgz#d416fdcdcb3236cd8f282065312813f8c13982f6" - integrity sha512-4R1ieRf52/izcZE7AlLy56uIHHDLT74Yzz2Iv2l6kDaYvEu9x+wMB5dZArVL8SYGXSYV2YAg70FcW5Y5nGGNIg== + version "7.20.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.20.0.tgz#db07c4ca4eda2e2316e7aa57ac7fc91ec550bdc7" + integrity sha512-qGi0CTcOGP2OtCQBgWZlQjcTuP0XkIpYFj25XtRTQSHC+umNnp7UMshr2G8SLsRFYDdAPFeHOsiteadmMH02Yw== dependencies: "@babel/code-frame" "7.12.11" - "@eslint/eslintrc" "^0.4.1" + "@eslint/eslintrc" "^0.3.0" ajv "^6.10.0" chalk "^4.0.0" cross-spawn "^7.0.2" @@ -5947,10 +5796,10 @@ eslint@^7.16.0: espree "^7.3.1" esquery "^1.4.0" esutils "^2.0.2" - file-entry-cache "^6.0.1" + file-entry-cache "^6.0.0" functional-red-black-tree "^1.0.1" glob-parent "^5.0.0" - globals "^13.6.0" + globals "^12.1.0" ignore "^4.0.6" import-fresh "^3.0.0" imurmurhash "^0.1.4" @@ -5958,7 +5807,7 @@ eslint@^7.16.0: js-yaml "^3.13.1" json-stable-stringify-without-jsonify "^1.0.1" levn "^0.4.1" - lodash "^4.17.21" + lodash "^4.17.20" minimatch "^3.0.4" natural-compare "^1.4.0" optionator "^0.9.1" @@ -6212,14 +6061,14 @@ ethereum-cryptography@^0.1.2, ethereum-cryptography@^0.1.3: setimmediate "^1.0.5" ethereum-waffle@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/ethereum-waffle/-/ethereum-waffle-3.3.0.tgz#166a0cc1d3b2925f117b20ef0951b3fe72e38e79" - integrity sha512-4xm3RWAPCu5LlaVxYEg0tG3L7g5ovBw1GY/UebrzZ+OTx22vcPjI+bvelFlGBpkdnO5yOIFXjH2eK59tNAe9IA== + version "3.2.2" + resolved "https://registry.yarnpkg.com/ethereum-waffle/-/ethereum-waffle-3.2.2.tgz#dbcdb96ebfa35d4deb6b749906ff7e12f593284f" + integrity sha512-Q8XrcFmQGDKKH0Lr867WA9Rl0oWQGMZcFrFPMV2KBIOkdeQnRlGEJq8RGFxj4MMWWxkoXIoxWgxg7U3qdgddEw== dependencies: - "@ethereum-waffle/chai" "^3.3.0" - "@ethereum-waffle/compiler" "^3.3.0" + "@ethereum-waffle/chai" "^3.2.2" + "@ethereum-waffle/compiler" "^3.2.2" "@ethereum-waffle/mock-contract" "^3.2.2" - "@ethereum-waffle/provider" "^3.3.0" + "@ethereum-waffle/provider" "^3.2.2" ethers "^5.0.1" ethereumjs-abi@0.6.5: @@ -6364,12 +6213,12 @@ ethereumjs-util@^5.0.0, ethereumjs-util@^5.0.1, ethereumjs-util@^5.1.1, ethereum rlp "^2.0.0" safe-buffer "^5.1.1" -ethereumjs-util@^7.0.10, ethereumjs-util@^7.0.2, ethereumjs-util@^7.0.7, ethereumjs-util@^7.0.8, ethereumjs-util@^7.0.9: - version "7.0.10" - resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-7.0.10.tgz#5fb7b69fa1fda0acc59634cf39d6b0291180fc1f" - integrity sha512-c/xThw6A+EAnej5Xk5kOzFzyoSnw0WX0tSlZ6pAsfGVvQj3TItaDg9b1+Fz1RJXA+y2YksKwQnuzgt1eY6LKzw== +ethereumjs-util@^7.0.2: + version "7.0.8" + resolved "https://registry.yarnpkg.com/ethereumjs-util/-/ethereumjs-util-7.0.8.tgz#5258762b7b17e3d828e41834948363ff0a703ffd" + integrity sha512-JJt7tDpCAmDPw/sGoFYeq0guOVqT3pTE9xlEbBmc/nlCij3JRCoS2c96SQ6kXVHOT3xWUNLDm5QCJLQaUnVAtQ== dependencies: - "@types/bn.js" "^5.1.0" + "@types/bn.js" "^4.11.3" bn.js "^5.1.2" create-hash "^1.1.2" ethereum-cryptography "^0.1.3" @@ -6430,40 +6279,40 @@ ethereumjs-wallet@0.6.5: uuid "^3.3.2" ethers@^5.0.0, ethers@^5.0.1, ethers@^5.0.18, ethers@^5.0.19, ethers@^5.0.2, ethers@^5.0.26: - version "5.1.4" - resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.1.4.tgz#8ae973705ed962f8f41dc59693704002a38dd18b" - integrity sha512-EAPQ/fgGRu0PoR/VNFnHTMOtG/IZ0AItdW55C9T8ffmVu0rnyllZL404eBF66elJehOLz2kxnUrhXpE7TCpW7g== - dependencies: - "@ethersproject/abi" "5.1.2" - "@ethersproject/abstract-provider" "5.1.0" - "@ethersproject/abstract-signer" "5.1.0" - "@ethersproject/address" "5.1.0" - "@ethersproject/base64" "5.1.0" - "@ethersproject/basex" "5.1.0" - "@ethersproject/bignumber" "5.1.1" - "@ethersproject/bytes" "5.1.0" - "@ethersproject/constants" "5.1.0" - "@ethersproject/contracts" "5.1.1" - "@ethersproject/hash" "5.1.0" - "@ethersproject/hdnode" "5.1.0" - "@ethersproject/json-wallets" "5.1.0" - "@ethersproject/keccak256" "5.1.0" - "@ethersproject/logger" "5.1.0" - "@ethersproject/networks" "5.1.0" - "@ethersproject/pbkdf2" "5.1.0" - "@ethersproject/properties" "5.1.0" - "@ethersproject/providers" "5.1.2" - "@ethersproject/random" "5.1.0" - "@ethersproject/rlp" "5.1.0" - "@ethersproject/sha2" "5.1.0" - "@ethersproject/signing-key" "5.1.0" - "@ethersproject/solidity" "5.1.0" - "@ethersproject/strings" "5.1.0" - "@ethersproject/transactions" "5.1.1" - "@ethersproject/units" "5.1.0" - "@ethersproject/wallet" "5.1.0" - "@ethersproject/web" "5.1.0" - "@ethersproject/wordlists" "5.1.0" + version "5.0.31" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.0.31.tgz#60e3b1425864fe5d2babc147ede01be8382a7d2a" + integrity sha512-zpq0YbNFLFn+t+ibS8UkVWFeK5w6rVMSvbSHrHAQslfazovLnQ/mc2gdN5+6P45/k8fPgHrfHrYvJ4XvyK/S1A== + dependencies: + "@ethersproject/abi" "5.0.12" + "@ethersproject/abstract-provider" "5.0.9" + "@ethersproject/abstract-signer" "5.0.13" + "@ethersproject/address" "5.0.10" + "@ethersproject/base64" "5.0.8" + "@ethersproject/basex" "5.0.8" + "@ethersproject/bignumber" "5.0.14" + "@ethersproject/bytes" "5.0.10" + "@ethersproject/constants" "5.0.9" + "@ethersproject/contracts" "5.0.11" + "@ethersproject/hash" "5.0.11" + "@ethersproject/hdnode" "5.0.9" + "@ethersproject/json-wallets" "5.0.11" + "@ethersproject/keccak256" "5.0.8" + "@ethersproject/logger" "5.0.9" + "@ethersproject/networks" "5.0.8" + "@ethersproject/pbkdf2" "5.0.8" + "@ethersproject/properties" "5.0.8" + "@ethersproject/providers" "5.0.23" + "@ethersproject/random" "5.0.8" + "@ethersproject/rlp" "5.0.8" + "@ethersproject/sha2" "5.0.8" + "@ethersproject/signing-key" "5.0.10" + "@ethersproject/solidity" "5.0.9" + "@ethersproject/strings" "5.0.9" + "@ethersproject/transactions" "5.0.10" + "@ethersproject/units" "5.0.10" + "@ethersproject/wallet" "5.0.11" + "@ethersproject/web" "5.0.13" + "@ethersproject/wordlists" "5.0.9" ethjs-abi@0.2.0: version "0.2.0" @@ -6601,14 +6450,14 @@ eventemitter3@^4.0.0: integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400" - integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q== + version "3.2.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379" + integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg== eventsource@^1.0.7: - version "1.1.0" - resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.1.0.tgz#00e8ca7c92109e94b0ddf32dac677d841028cfaf" - integrity sha512-VSJjT5oCNrFvCS6igjzPAt5hBzQ2qPBFIbJ03zLI9SE0mxwZpMw6BfJrbFHm1a141AavMEB8JHmBhWAd66PfCg== + version "1.0.7" + resolved "https://registry.yarnpkg.com/eventsource/-/eventsource-1.0.7.tgz#8fbc72c93fcd34088090bc0a4e64f4b5cee6d8d0" + integrity sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ== dependencies: original "^1.0.0" @@ -6649,11 +6498,6 @@ execa@^3.3.0: signal-exit "^3.0.2" strip-final-newline "^2.0.0" -exit-on-epipe@~1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz#0bdd92e87d5285d267daa8171d0eb06159689692" - integrity sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw== - expand-brackets@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" @@ -6815,9 +6659,9 @@ fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= fastq@^1.6.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.0.tgz#bb9fb955a07130a918eb63c1f5161cc32a5d0858" - integrity sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g== + version "1.10.1" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.10.1.tgz#8b8f2ac8bf3632d67afcd65dac248d5fdc45385e" + integrity sha512-AWuv6Ery3pM+dY7LYS8YIaCiQvUaos9OB1RyNgaOWnaX+Tik7Onvcsf8x8c+YtDeT0maYLniBip2hox5KtEXXA== dependencies: reusify "^1.0.4" @@ -6854,10 +6698,10 @@ file-entry-cache@^5.0.1: dependencies: flat-cache "^2.0.1" -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== +file-entry-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.0.tgz#7921a89c391c6d93efec2169ac6bf300c527ea0a" + integrity sha512-fqoO76jZ3ZnYrXLDRxBR1YvOvc0k844kcOg40bgsPrE25LAb/PDqTY+ho64Xh2c8ZXgIKldchCFHczG2UVRcWA== dependencies: flat-cache "^3.0.4" @@ -6981,13 +6825,6 @@ find-yarn-workspace-root@^1.2.1: fs-extra "^4.0.3" micromatch "^3.1.4" -find-yarn-workspace-root@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz#f47fb8d239c900eb78179aa81b66673eac88f7bd" - integrity sha512-1IMnbjt4KzsQfnhnzNd8wUEgXZ44IzZaZmnLYx7D5FZlaHt2gW20Cri8Q+E/t5tIj4+epTBub+2Zxu/vNILzqQ== - dependencies: - micromatch "^4.0.2" - flat-cache@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" @@ -7048,9 +6885,9 @@ follow-redirects@1.5.10: debug "=3.1.0" follow-redirects@^1.0.0, follow-redirects@^1.10.0, follow-redirects@^1.12.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.1.tgz#d9114ded0a1cfdd334e164e6662ad02bfd91ff43" - integrity sha512-HWqDgT7ZEkqRzBvc2s64vSZ/hfOceEol3ac/7tKwzuvEyWx3/4UegXh5oBOIotkGsObyk3xznnSRVADBgWSQVg== + version "1.13.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.13.2.tgz#dd73c8effc12728ba5cf4259d760ea5fb83e3147" + integrity sha512-6mPTgLxYm3r6Bkkg0vNM0HTjfGrOEtsfbhagQvbxDEsEkpNhw582upBaoRZylzen6krEmxXJgt9Ju6HiI4O7BA== for-each@^0.3.3, for-each@~0.3.3: version "0.3.3" @@ -7167,6 +7004,13 @@ fs-minipass@^1.2.5: dependencies: minipass "^2.6.0" +fs-minipass@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" + integrity sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg== + dependencies: + minipass "^3.0.0" + fs-write-stream-atomic@^1.0.8: version "1.0.10" resolved "https://registry.yarnpkg.com/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz#b47df53493ef911df75731e70a9ded0189db40c9" @@ -7225,7 +7069,7 @@ functional-red-black-tree@^1.0.1, functional-red-black-tree@~1.0.1: resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= -ganache-core@^2.13.2: +ganache-core@^2.10.2: version "2.13.2" resolved "https://registry.yarnpkg.com/ganache-core/-/ganache-core-2.13.2.tgz#27e6fc5417c10e6e76e2e646671869d7665814a3" integrity sha512-tIF5cR+ANQz0+3pHWxHjIwHqFXcVo0Mb+kcsNhglNFALcYo49aQpnS9dqHartqPfMFjiHh/qFoD3mYK0d/qGgw== @@ -7283,7 +7127,7 @@ gaze@^1.0.0: dependencies: globule "^1.0.0" -gensync@^1.0.0-beta.2: +gensync@^1.0.0-beta.1: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== @@ -7303,7 +7147,7 @@ get-func-name@^2.0.0: resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE= -get-intrinsic@^1.0.2, get-intrinsic@^1.1.1: +get-intrinsic@^1.0.2: version "1.1.1" resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.1.tgz#15f59f376f855c446963948f0d24cd3637b4abc6" integrity sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q== @@ -7362,9 +7206,9 @@ glob-parent@^3.1.0: path-dirname "^1.0.0" glob-parent@^5.0.0, glob-parent@^5.1.0, glob-parent@~5.1.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + version "5.1.1" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" + integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== dependencies: is-glob "^4.0.1" @@ -7385,7 +7229,7 @@ glob@7.1.3: once "^1.3.0" path-is-absolute "^1.0.0" -glob@7.1.6: +glob@7.1.6, glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1, glob@~7.1.2, glob@~7.1.6: version "7.1.6" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== @@ -7397,18 +7241,6 @@ glob@7.1.6: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.0, glob@^7.0.3, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.1, glob@~7.1.2, glob@~7.1.6: - version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" - integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - global@~4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/global/-/global-4.4.0.tgz#3e7b105179006a323ed71aafca3e9c57a5cc6406" @@ -7429,13 +7261,6 @@ globals@^12.1.0: dependencies: type-fest "^0.8.1" -globals@^13.6.0: - version "13.8.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.8.0.tgz#3e20f504810ce87a8d72e55aecf8435b50f4c1b3" - integrity sha512-rHtdA6+PDBIjeEvA91rpqzEvk/k3/i7EeNQiryiWuJH0Hw9cpyJMAt2jtbAwUaRdhD+573X4vWw6IcjKPasi9Q== - dependencies: - type-fest "^0.20.2" - globals@^9.18.0: version "9.18.0" resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a" @@ -7456,9 +7281,9 @@ globby@10.0.1: slash "^3.0.0" globby@^11.0.1: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== + version "11.0.2" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.2.tgz#1af538b766a3b540ebfb58a32b2e2d5897321d83" + integrity sha512-2ZThXDvvV8fYFRVIxnrMQBipZQDr7MxKAmQK1vujaj9/7eF0efG7BPUKJ7jP7G5SLF37xKDXvO4S/KKLj/Z0og== dependencies: array-union "^2.1.0" dir-glob "^3.0.1" @@ -7550,7 +7375,7 @@ got@^7.1.0: url-parse-lax "^1.0.0" url-to-options "^1.0.1" -graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0: +graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0, graceful-fs@^4.2.2: version "4.2.6" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== @@ -7578,6 +7403,18 @@ handle-thing@^2.0.0: resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.1.tgz#857f79ce359580c340d43081cc648970d0bb234e" integrity sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== +handlebars@4.7.6: + version "4.7.6" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.6.tgz#d4c05c1baf90e9945f77aa68a7a219aa4a7df74e" + integrity sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + handlebars@^4.7.6: version "4.7.7" resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" @@ -7604,31 +7441,27 @@ har-validator@~5.1.3: har-schema "^2.0.0" hardhat-contract-sizer@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/hardhat-contract-sizer/-/hardhat-contract-sizer-2.0.3.tgz#604455fd803865f81c29f60364e863eaa19395a7" - integrity sha512-iaixOzWxwOSIIE76cl2uk4m9VXI1hKU3bFt+gl7jDhyb2/JB2xOp5wECkfWqAoc4V5lD4JtjldZlpSTbzX+nPQ== + version "2.0.2" + resolved "https://registry.yarnpkg.com/hardhat-contract-sizer/-/hardhat-contract-sizer-2.0.2.tgz#735e00c4776188683886226b9b37dfe75cbd16fa" + integrity sha512-6vDj3OoqPvzuEnURY2lN6veFXH8uIBRgnaIJoN7cpn7ENuciIR3qotNgLtbC6BNp4y0Cn/8FeMfKS6MdAvIdgQ== dependencies: cli-table3 "^0.6.0" colors "^1.4.0" hardhat-typechain@^0.3.3: - version "0.3.5" - resolved "https://registry.yarnpkg.com/hardhat-typechain/-/hardhat-typechain-0.3.5.tgz#8e50616a9da348b33bd001168c8fda9c66b7b4af" - integrity sha512-w9lm8sxqTJACY+V7vijiH+NkPExnmtiQEjsV9JKD1KgMdVk2q8y+RhvU/c4B7+7b1+HylRUCxpOIvFuB3rE4+w== + version "0.3.4" + resolved "https://registry.yarnpkg.com/hardhat-typechain/-/hardhat-typechain-0.3.4.tgz#6d7d86184152b5a3c83bde0fbbb26224b6fbfd70" + integrity sha512-oI9YSutDfZnRlAV1bYTpLkBHw4rkhz9JDWAEXz2PR34ylpgLWxPFbPiHT5QXadGTlBqJUAT6JSTxCK1kADMkjA== hardhat@^2.0.8: - version "2.3.0" - resolved "https://registry.yarnpkg.com/hardhat/-/hardhat-2.3.0.tgz#5c29f8b4d08155c3dc8c908af9713fd5079522d5" - integrity sha512-nc4ro2bM4wPaA6/0Y22o5F5QrifQk2KCyPUUKLPUeFFZoGNGYB8vmeW/k9gV9DdMukdWTzfYlKc2Jn4bfb6tDQ== - dependencies: - "@ethereumjs/block" "^3.2.1" - "@ethereumjs/blockchain" "^5.2.1" - "@ethereumjs/common" "^2.2.0" - "@ethereumjs/tx" "^3.1.3" - "@ethereumjs/vm" "^5.3.2" + version "2.0.10" + resolved "https://registry.yarnpkg.com/hardhat/-/hardhat-2.0.10.tgz#9b50da13b6915bb9b61b7f38f8f2b9b352447462" + integrity sha512-ZAcC+9Nb1AEb22/2hWj/zLPyIRLD9y1O3LW2KhbONpxn1bf0qWLW8QegB9J3KP9Bvt8LbW9pWuSyRQJU0vUWqA== + dependencies: + "@nomiclabs/ethereumjs-vm" "4.2.2" "@sentry/node" "^5.18.1" "@solidity-parser/parser" "^0.11.0" - "@types/bn.js" "^5.1.0" + "@types/bn.js" "^4.11.5" "@types/lru-cache" "^5.1.0" abort-controller "^3.0.0" adm-zip "^0.4.16" @@ -7642,7 +7475,11 @@ hardhat@^2.0.8: eth-sig-util "^2.5.2" ethereum-cryptography "^0.1.2" ethereumjs-abi "^0.6.8" - ethereumjs-util "^7.0.10" + ethereumjs-account "^3.0.0" + ethereumjs-block "^2.2.2" + ethereumjs-common "^1.5.0" + ethereumjs-tx "^2.1.2" + ethereumjs-util "^6.2.0" find-up "^2.1.0" fp-ts "1.19.3" fs-extra "^7.0.1" @@ -7650,8 +7487,7 @@ hardhat@^2.0.8: immutable "^4.0.0-rc.12" io-ts "1.10.4" lodash "^4.17.11" - merkle-patricia-tree "^4.1.0" - mnemonist "^0.38.0" + merkle-patricia-tree "3.0.0" mocha "^7.1.2" node-fetch "^2.6.0" qs "^6.7.0" @@ -7674,11 +7510,6 @@ has-ansi@^2.0.0: dependencies: ansi-regex "^2.0.0" -has-bigints@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" - integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== - has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" @@ -7694,10 +7525,10 @@ has-symbol-support-x@^1.4.1: resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== -has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" - integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== +has-symbols@^1.0.0, has-symbols@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" + integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== has-to-string-tag-x@^1.2.0: version "1.4.1" @@ -7799,10 +7630,10 @@ hex-color-regex@^1.1.0: resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== -highlight.js@^10.7.1: - version "10.7.2" - resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.7.2.tgz#89319b861edc66c48854ed1e6da21ea89f847360" - integrity sha512-oFLl873u4usRM9K63j4ME9u3etNF0PLiJhSQ8rdfuL51Wn3zkD6drf9ZW0dOzjnZI22YYG24z30JcmfCZjMgYg== +highlight.js@^10.0.0: + version "10.6.0" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.6.0.tgz#0073aa71d566906965ba6e1b7be7b2682f5e18b6" + integrity sha512-8mlRcn5vk/r4+QcqerapwBYTe+iPL5ih6xrNylxrnBdHQiijDETfXX7VIxC3UiCRiINBJfANBAsPzAvRQj8RpQ== hmac-drbg@^1.0.1: version "1.0.1" @@ -7827,9 +7658,9 @@ hoopy@^0.1.4: integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ== hosted-git-info@^2.1.4, hosted-git-info@^2.6.0: - version "2.8.9" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.9.tgz#dffc0bf9a21c02209090f2aa69429e1414daf3f9" - integrity sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw== + version "2.8.8" + resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488" + integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg== hpack.js@^2.1.6: version "2.1.6" @@ -7851,6 +7682,11 @@ hsla-regex@^1.0.0: resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= +html-comment-regex@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7" + integrity sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ== + html-entities@^1.3.1: version "1.4.0" resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-1.4.0.tgz#cfbd1b01d2afaf9adca1b10ae7dffab98c71d2dc" @@ -7966,18 +7802,7 @@ http-proxy-middleware@0.19.1: lodash "^4.17.11" micromatch "^3.1.10" -http-proxy-middleware@^1.0.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-1.3.1.tgz#43700d6d9eecb7419bf086a128d0f7205d9eb665" - integrity sha512-13eVVDYS4z79w7f1+NPllJtOQFx/FdUW4btIvVRMaRlUY9VGstAbo5MOhLEuUgZFRHn3x50ufn25zkj/boZnEg== - dependencies: - "@types/http-proxy" "^1.17.5" - http-proxy "^1.18.1" - is-glob "^4.0.1" - is-plain-obj "^3.0.0" - micromatch "^4.0.2" - -http-proxy@^1.17.0, http-proxy@^1.18.1: +http-proxy@^1.17.0: version "1.18.1" resolved "https://registry.yarnpkg.com/http-proxy/-/http-proxy-1.18.1.tgz#401541f0534884bbf95260334e72f88ee3976549" integrity sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ== @@ -8136,12 +7961,17 @@ indent-string@^2.1.0: dependencies: repeating "^2.0.0" +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= -infer-owner@^1.0.3: +infer-owner@^1.0.3, infer-owner@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/infer-owner/-/infer-owner-1.0.4.tgz#c4cefcaa8e51051c2a40ba2ce8a3d27295af9467" integrity sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A== @@ -8276,11 +8106,6 @@ is-arrayish@^0.3.1: resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== -is-bigint@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.2.tgz#ffb381442503235ad245ea89e45b3dbff040ee5a" - integrity sha512-0JV5+SOCQkIdzjBK9buARcV804Ddu7A0Qet6sHi3FimE9ne6m4BGQZfRn+NZiXbBk4F4XmHfDZIipLj9pX8dSA== - is-binary-path@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" @@ -8295,13 +8120,6 @@ is-binary-path@~2.1.0: dependencies: binary-extensions "^2.0.0" -is-boolean-object@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.1.tgz#3c0878f035cb821228d350d2e1e36719716a3de8" - integrity sha512-bXdQWkECBUIAcCkeH1unwJLIpZYaa5VvuygSyS/c2lf719mTKZDU5UdDRlpd01UjADgmW8RfqaP+mRaVPdr/Ng== - dependencies: - call-bind "^1.0.2" - is-buffer@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" @@ -8312,7 +8130,7 @@ is-buffer@^2.0.2, is-buffer@~2.0.3: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.3: +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.3.tgz#8b1e0500b73a1d76c70487636f368e519de8db8e" integrity sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ== @@ -8337,9 +8155,9 @@ is-color-stop@^1.0.0: rgba-regex "^1.0.0" is-core-module@^2.2.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" - integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A== + version "2.2.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" + integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== dependencies: has "^1.0.3" @@ -8358,9 +8176,9 @@ is-data-descriptor@^1.0.0: kind-of "^6.0.0" is-date-object@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.4.tgz#550cfcc03afada05eea3dd30981c7b09551f73e5" - integrity sha512-/b4ZVsG7Z5XVtIxs/h9W8nvfLgSAyKYdtGWQLbqy6jA1icmgjf8WCoTKgeS4wy5tYaPePouzFMANbnj94c2Z+A== + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== is-descriptor@^0.1.0: version "0.1.6" @@ -8386,9 +8204,9 @@ is-directory@^0.3.1: integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= is-docker@^2.0.0: - version "2.2.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.2.1.tgz#33eeabe23cfe86f14bde4408a02c0cfb853acdaa" - integrity sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ== + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" + integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" @@ -8468,11 +8286,6 @@ is-negative-zero@^2.0.1: resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24" integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== -is-number-object@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.5.tgz#6edfaeed7950cff19afedce9fbfca9ee6dd289eb" - integrity sha512-RU0lI/n95pMoUKu9v1BZP5MBcZuNSVJkMkAG2dJqC4z2GlkGUNeH68SuHuBKBD/XFe+LHZ+f9BKkLET60Niedw== - is-number@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" @@ -8524,11 +8337,6 @@ is-plain-obj@^2.1.0: resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== -is-plain-obj@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-3.0.0.tgz#af6f2ea14ac5a646183a5bbdb5baabbc156ad9d7" - integrity sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA== - is-plain-object@^2.0.3, is-plain-object@^2.0.4: version "2.0.4" resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" @@ -8548,13 +8356,13 @@ is-reference@^1.2.1: dependencies: "@types/estree" "*" -is-regex@^1.0.4, is-regex@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.3.tgz#d029f9aff6448b93ebbe3f33dac71511fdcbef9f" - integrity sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ== +is-regex@^1.0.4, is-regex@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.2.tgz#81c8ebde4db142f2cf1c53fc86d6a45788266251" + integrity sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg== dependencies: call-bind "^1.0.2" - has-symbols "^1.0.2" + has-symbols "^1.0.1" is-regex@~1.0.5: version "1.0.5" @@ -8583,17 +8391,19 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== -is-string@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.6.tgz#3fe5d5992fb0d93404f32584d4b0179a71b54a5f" - integrity sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w== +is-svg@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-3.0.0.tgz#9321dbd29c212e5ca99c4fa9794c714bcafa2f75" + integrity sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ== + dependencies: + html-comment-regex "^1.1.0" -is-symbol@^1.0.2, is-symbol@^1.0.3: - version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" - integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== +is-symbol@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== dependencies: - has-symbols "^1.0.2" + has-symbols "^1.0.1" is-typedarray@^1.0.0, is-typedarray@~1.0.0: version "1.0.0" @@ -8676,9 +8486,17 @@ isurl@^1.0.0-alpha5: is-object "^1.0.1" javascript-stringify@^2.0.1: - version "2.1.0" - resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-2.1.0.tgz#27c76539be14d8bd128219a2d731b09337904e79" - integrity sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg== + version "2.0.1" + resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-2.0.1.tgz#6ef358035310e35d667c675ed63d3eb7c1aa19e5" + integrity sha512-yV+gqbd5vaOYjqlbk16EG89xB5udgjqQF3C5FAORDg4f/IS1Yc5ERCv5e/57yBcfJYw05V5JyIXabhwb75Xxow== + +jest-worker@^25.4.0: + version "25.5.0" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-25.5.0.tgz#2611d071b79cea0f43ee57a3d118593ac1547db1" + integrity sha512-/dsSmUkIy5EBGfv/IjjqmFxrNAUpBERfGs1oHROyD7yxjG/w+t0GOJDX8O1k32ySmd7+a5IhnJU2qQFcJ4n1vw== + dependencies: + merge-stream "^2.0.0" + supports-color "^7.0.0" jest-worker@^26.2.1: version "26.6.2" @@ -9028,11 +8846,6 @@ level-codec@~7.0.0: resolved "https://registry.yarnpkg.com/level-codec/-/level-codec-7.0.1.tgz#341f22f907ce0f16763f24bddd681e395a0fb8a7" integrity sha512-Ua/R9B9r3RasXdRmOtd+t9TCOEIIlts+TN/7XTT2unhDaL6sJn83S3rUyljbr6lVtw49N3/yA0HHjpV6Kzb2aQ== -level-concat-iterator@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz#1d1009cf108340252cb38c51f9727311193e6263" - integrity sha512-OTKKOqeav2QWcERMJR7IS9CUo1sHnke2C0gkSmcR7QuEtFNLLzHQAvnMw8ykvEcv0Qtkg0p7FOwP1v9e5Smdcw== - level-errors@^1.0.3: version "1.1.2" resolved "https://registry.yarnpkg.com/level-errors/-/level-errors-1.1.2.tgz#4399c2f3d3ab87d0625f7e3676e2d807deff404d" @@ -9082,15 +8895,6 @@ level-iterator-stream@~3.0.0: readable-stream "^2.3.6" xtend "^4.0.0" -level-iterator-stream@~4.0.0: - version "4.0.2" - resolved "https://registry.yarnpkg.com/level-iterator-stream/-/level-iterator-stream-4.0.2.tgz#7ceba69b713b0d7e22fcc0d1f128ccdc8a24f79c" - integrity sha512-ZSthfEqzGSOMWoUGhTXdX9jv26d32XJuHz/5YnuHZzH6wldfWMOVwI9TBtKcya4BKTyTt3XVA0A3cF3q5CY30Q== - dependencies: - inherits "^2.0.4" - readable-stream "^3.4.0" - xtend "^4.0.2" - level-mem@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/level-mem/-/level-mem-3.0.1.tgz#7ce8cf256eac40f716eb6489654726247f5a89e5" @@ -9099,22 +8903,6 @@ level-mem@^3.0.1: level-packager "~4.0.0" memdown "~3.0.0" -level-mem@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/level-mem/-/level-mem-5.0.1.tgz#c345126b74f5b8aa376dc77d36813a177ef8251d" - integrity sha512-qd+qUJHXsGSFoHTziptAKXoLX87QjR7v2KMbqncDXPxQuCdsQlzmyX+gwrEHhlzn08vkf8TyipYyMmiC6Gobzg== - dependencies: - level-packager "^5.0.3" - memdown "^5.0.0" - -level-packager@^5.0.3: - version "5.1.1" - resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-5.1.1.tgz#323ec842d6babe7336f70299c14df2e329c18939" - integrity sha512-HMwMaQPlTC1IlcwT3+swhqf/NUO+ZhXVz6TY1zZIIZlIR0YSn8GtAAWmIvKjNY16ZkEg/JcpAuQskxsXqC0yOQ== - dependencies: - encoding-down "^6.3.0" - levelup "^4.3.2" - level-packager@~4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/level-packager/-/level-packager-4.0.1.tgz#7e7d3016af005be0869bc5fa8de93d2a7f56ffe6" @@ -9146,13 +8934,6 @@ level-sublevel@6.6.4: typewiselite "~1.0.0" xtend "~4.0.0" -level-supports@~1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-1.0.1.tgz#2f530a596834c7301622521988e2c36bb77d122d" - integrity sha512-rXM7GYnW8gsl1vedTJIbzOrRv85c/2uCMpiiCzO2fndd06U/kUXEEU9evYn4zFggBOg36IsBW8LzqIpETwwQzg== - dependencies: - xtend "^4.0.2" - level-ws@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/level-ws/-/level-ws-0.0.0.tgz#372e512177924a00424b0b43aef2bb42496d228b" @@ -9170,15 +8951,6 @@ level-ws@^1.0.0: readable-stream "^2.2.8" xtend "^4.0.1" -level-ws@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/level-ws/-/level-ws-2.0.0.tgz#207a07bcd0164a0ec5d62c304b4615c54436d339" - integrity sha512-1iv7VXx0G9ec1isqQZ7y5LmoZo/ewAsyDHNA8EFDW5hqH2Kqovm33nSFkSdnLLAK+I5FlT+lo5Cw9itGe+CpQA== - dependencies: - inherits "^2.0.3" - readable-stream "^3.1.0" - xtend "^4.0.1" - levelup@3.1.1, levelup@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/levelup/-/levelup-3.1.1.tgz#c2c0b3be2b4dc316647c53b42e2f559e232d2189" @@ -9202,17 +8974,6 @@ levelup@^1.2.1: semver "~5.4.1" xtend "~4.0.0" -levelup@^4.3.2: - version "4.4.0" - resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.4.0.tgz#f89da3a228c38deb49c48f88a70fb71f01cafed6" - integrity sha512-94++VFO3qN95cM/d6eBXvd894oJE0w3cInq9USsyQzzoJxmiYzPAocNcuGCPGGjoXqDVJcr3C1jzt1TSjyaiLQ== - dependencies: - deferred-leveldown "~5.3.0" - level-errors "~2.0.0" - level-iterator-stream "~4.0.0" - level-supports "~1.0.0" - xtend "~4.0.0" - levn@^0.3.0, levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" @@ -9320,16 +9081,6 @@ lodash.assign@^4.0.3, lodash.assign@^4.0.6: resolved "https://registry.yarnpkg.com/lodash.assign/-/lodash.assign-4.2.0.tgz#0d99f3ccd7a6d261d19bdaeb9245005d285808e7" integrity sha1-DZnzzNem0mHRm9rrkkUAXShYCOc= -lodash.clonedeep@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" - integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= - -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168= - lodash.defaultsdeep@^4.6.1: version "4.6.1" resolved "https://registry.yarnpkg.com/lodash.defaultsdeep/-/lodash.defaultsdeep-4.6.1.tgz#512e9bd721d272d94e3d3a63653fa17516741ca6" @@ -9400,26 +9151,16 @@ lodash.transform@^4.6.0: resolved "https://registry.yarnpkg.com/lodash.transform/-/lodash.transform-4.6.0.tgz#12306422f63324aed8483d3f38332b5f670547a0" integrity sha1-EjBkIvYzJK7YSD0/ODMrX2cFR6A= -lodash.truncate@^4.4.2: - version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" - integrity sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM= - lodash.uniq@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= -lodash@4.17.20: +lodash@4.17.20, lodash@^4.0.0, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.3, lodash@^4.17.4, lodash@~4.17.10: version "4.17.20" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== -lodash@^4.0.0, lodash@^4.17.11, lodash@^4.17.12, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.3, lodash@^4.17.4, lodash@~4.17.10: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - log-symbols@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-3.0.0.tgz#f3a08516a5dea893336a7dee14d18a1cfdab77c4" @@ -9617,11 +9358,6 @@ markdownlint@~0.21.0: dependencies: markdown-it "11.0.0" -mcl-wasm@^0.7.1: - version "0.7.7" - resolved "https://registry.yarnpkg.com/mcl-wasm/-/mcl-wasm-0.7.7.tgz#fd463dd1641a37f9f55b6ca8e5a38e95be2bc58f" - integrity sha512-jDGiCQA++5hX37gdH6RDZ3ZsA0raet7xyY/R5itj5cbcdf4Gvw+YyxWX/ZZ0Z2UPxJiw1ktRsCJZzpnqlQILdw== - md5.js@^1.3.4: version "1.3.5" resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" @@ -9663,18 +9399,6 @@ memdown@^1.0.0: ltgt "~2.2.0" safe-buffer "~5.1.1" -memdown@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/memdown/-/memdown-5.1.0.tgz#608e91a9f10f37f5b5fe767667a8674129a833cb" - integrity sha512-B3J+UizMRAlEArDjWHTMmadet+UKwHd3UjMgGBkZcKAxAYVPS9o0Yeiha4qvz7iGiL2Sb3igUft6p7nbFWctpw== - dependencies: - abstract-leveldown "~6.2.1" - functional-red-black-tree "~1.0.1" - immediate "~3.2.3" - inherits "~2.0.1" - ltgt "~2.2.0" - safe-buffer "~5.2.0" - memdown@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/memdown/-/memdown-3.0.0.tgz#93aca055d743b20efc37492e9e399784f2958309" @@ -9773,19 +9497,6 @@ merkle-patricia-tree@^2.1.2, merkle-patricia-tree@^2.3.2: rlp "^2.0.0" semaphore ">=1.0.1" -merkle-patricia-tree@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/merkle-patricia-tree/-/merkle-patricia-tree-4.1.0.tgz#010636c4cfd68682df33a2e3186b7d0be7b98b9d" - integrity sha512-vmP1J7FwIpprFMVjjSMM1JAwFce85Q+tp0TYIedYv8qaMh2oLUZ3ETXn9wbgi9S6elySzKzGa+Ai6VNKGEwSlg== - dependencies: - "@types/levelup" "^4.3.0" - ethereumjs-util "^7.0.8" - level-mem "^5.0.1" - level-ws "^2.0.0" - readable-stream "^3.6.0" - rlp "^2.2.3" - semaphore-async-await "^1.5.1" - methods@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" @@ -9811,12 +9522,12 @@ micromatch@^3.1.10, micromatch@^3.1.4: to-regex "^3.0.2" micromatch@^4.0.2: - version "4.0.4" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" - integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== + version "4.0.2" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.2.tgz#4fcb0999bf9fbc2fcbdd212f6d629b9a56c39259" + integrity sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q== dependencies: braces "^3.0.1" - picomatch "^2.2.3" + picomatch "^2.0.5" miller-rabin@^4.0.0: version "4.0.1" @@ -9826,17 +9537,22 @@ miller-rabin@^4.0.0: bn.js "^4.0.0" brorand "^1.0.1" -mime-db@1.47.0, "mime-db@>= 1.43.0 < 2": - version "1.47.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.47.0.tgz#8cb313e59965d3c05cfbf898915a267af46a335c" - integrity sha512-QBmA/G2y+IfeS4oktet3qRZ+P5kPhCKRXxXnQEudYqUaEioAU1/Lq2us3D/t1Jfo4hE9REQPrbB7K5sOczJVIw== +mime-db@1.45.0: + version "1.45.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.45.0.tgz#cceeda21ccd7c3a745eba2decd55d4b73e7879ea" + integrity sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w== + +"mime-db@>= 1.43.0 < 2": + version "1.46.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.46.0.tgz#6267748a7f799594de3cbc8cde91def349661cee" + integrity sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ== mime-types@^2.1.12, mime-types@^2.1.16, mime-types@~2.1.17, mime-types@~2.1.19, mime-types@~2.1.24: - version "2.1.30" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.30.tgz#6e7be8b4c479825f85ed6326695db73f9305d62d" - integrity sha512-crmjA4bLtR8m9qLpHvgxSChT+XoSlZi8J4n/aIdn3z92e/U47Z0V/yl+Wh9W046GgFVAmoNR/fmdbZYcSSIUeg== + version "2.1.28" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.28.tgz#1160c4757eab2c5363888e005273ecf79d2a0ecd" + integrity sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ== dependencies: - mime-db "1.47.0" + mime-db "1.45.0" mime@1.6.0: version "1.6.0" @@ -9844,9 +9560,9 @@ mime@1.6.0: integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mime@^2.4.4: - version "2.5.2" - resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe" - integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg== + version "2.5.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.0.tgz#2b4af934401779806ee98026bb42e8c1ae1876b1" + integrity sha512-ft3WayFSFUVBuJj7BMLKAQcSlItKtfjsKDDsii3rqFDAZ7t11zRe8ASw/GlmivGwVUYtwkQrxiGGpL6gFvB0ag== mimic-fn@^1.0.0: version "1.2.0" @@ -9902,6 +9618,27 @@ minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5, minimist@~1.2.5: resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== +minipass-collect@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/minipass-collect/-/minipass-collect-1.0.2.tgz#22b813bf745dc6edba2576b940022ad6edc8c617" + integrity sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA== + dependencies: + minipass "^3.0.0" + +minipass-flush@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/minipass-flush/-/minipass-flush-1.0.5.tgz#82e7135d7e89a50ffe64610a787953c4c4cbb373" + integrity sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw== + dependencies: + minipass "^3.0.0" + +minipass-pipeline@^1.2.2: + version "1.2.4" + resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" + integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== + dependencies: + minipass "^3.0.0" + minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: version "2.9.0" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" @@ -9910,7 +9647,7 @@ minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0: safe-buffer "^5.1.2" yallist "^3.0.0" -minipass@^3.1.1: +minipass@^3.0.0, minipass@^3.1.1: version "3.1.3" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.3.tgz#7d42ff1f39635482e15f9cdb53184deebd5815fd" integrity sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg== @@ -9967,13 +9704,6 @@ mkdirp@0.5.5, "mkdirp@>=0.5 0", mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.3, mkd dependencies: minimist "^1.2.5" -mnemonist@^0.38.0: - version "0.38.3" - resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.3.tgz#35ec79c1c1f4357cfda2fe264659c2775ccd7d9d" - integrity sha512-2K9QYubXx/NAjv4VLq1d1Ly8pWNC5L3BrixtdkyTegXWJIqY+zLNDhhX/A+ZwWt70tB1S8H4BE8FLYEFyNoOBw== - dependencies: - obliterator "^1.6.1" - mocha-steps@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/mocha-steps/-/mocha-steps-1.3.0.tgz#2449231ec45ec56810f65502cb22e2571862957f" @@ -10010,9 +9740,9 @@ mocha@^7.1.2: yargs-unparser "1.6.0" mocha@^8.2.0: - version "8.4.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-8.4.0.tgz#677be88bf15980a3cae03a73e10a0fc3997f0cff" - integrity sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ== + version "8.3.0" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-8.3.0.tgz#a83a7432d382ae1ca29686062d7fdc2c36f63fe5" + integrity sha512-TQqyC89V1J/Vxx0DhJIXlq9gbbL9XFNdeLQ1+JsnZsVaSOV1z3tWfw0qZmQJGQRIfkvZcs7snQnZnOCKoldq1Q== dependencies: "@ungap/promise-all-settled" "1.1.2" ansi-colors "4.1.1" @@ -10041,9 +9771,9 @@ mocha@^8.2.0: yargs-unparser "2.0.0" mock-fs@^4.1.0, mock-fs@^4.13.0: - version "4.14.0" - resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-4.14.0.tgz#ce5124d2c601421255985e6e94da80a7357b1b18" - integrity sha512-qYvlv/exQ4+svI3UOvPUpLDF0OMX5euvUH0Ny4N5QyRyhNdgAgUrVH3iUINSzEPLvx0kbo/Bp28GJKIqvE7URw== + version "4.13.0" + resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-4.13.0.tgz#31c02263673ec3789f90eb7b6963676aa407a598" + integrity sha512-DD0vOdofJdoaRNtnWcrXe6RQbpHkPPmtqGq14uRX0F8ZKJ5nv89CVTYl/BZdppDxBDaV0hl75htg3abpEWlPZA== move-concurrently@^1.0.1: version "1.0.1" @@ -10268,9 +9998,9 @@ node-gyp@^3.8.0: which "1" node-ipc@^9.1.1: - version "9.1.4" - resolved "https://registry.yarnpkg.com/node-ipc/-/node-ipc-9.1.4.tgz#2acf962681afdac2602876d98fe6434d54d9bd3c" - integrity sha512-A+f0mn2KxUt1uRTSd5ktxQUsn2OEhj5evo7NUi/powBzMSZ0vocdzDjlq9QN2v3LH6CJi3e5xAenpZ1QwU5A8g== + version "9.1.3" + resolved "https://registry.yarnpkg.com/node-ipc/-/node-ipc-9.1.3.tgz#1df3f069d103184ae9127fa885dbdaea56a4436f" + integrity sha512-8RS4RZyS/KMKKYG8mrje+cLxwATe9dBCuOiqKFSWND4oOuKytfuKCiR9yinvhoXF/nGdX/WnbywaUee+9U87zA== dependencies: event-pubsub "4.3.0" js-message "1.0.7" @@ -10305,10 +10035,10 @@ node-libs-browser@^2.2.1: util "^0.11.0" vm-browserify "^1.0.1" -node-releases@^1.1.71: - version "1.1.72" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.72.tgz#14802ab6b1039a79a0c7d662b610a5bbd76eacbe" - integrity sha512-LLUo+PpH3dU6XizX3iVoubUNheF/owjXCZZ5yACDxNnPtgFuludV1ZL3ayK1kVep42Rmm0+R9/Y60NQbZ2bifw== +node-releases@^1.1.70: + version "1.1.70" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.70.tgz#66e0ed0273aa65666d7fe78febe7634875426a08" + integrity sha512-Slf2s69+2/uAD79pVVQo8uSiC34+g8GWY8UH2Qtqv34ZfhYrxpYpfzs9Js9d6O0mbDmALuxaTlplnBTnSELcrw== node-sass@^4.13.0: version "4.14.1" @@ -10460,10 +10190,10 @@ object-copy@^0.1.0: define-property "^0.2.5" kind-of "^3.0.3" -object-inspect@^1.9.0: - version "1.10.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.10.3.tgz#c2aa7d2d09f50c99375704f7a0adf24c5782d369" - integrity sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw== +object-inspect@^1.8.0, object-inspect@^1.9.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" + integrity sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw== object-inspect@~1.7.0: version "1.7.0" @@ -10471,11 +10201,11 @@ object-inspect@~1.7.0: integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw== object-is@^1.0.1: - version "1.1.5" - resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" - integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== + version "1.1.4" + resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.4.tgz#63d6c83c00a43f4cbc9434eb9757c8a5b8565068" + integrity sha512-1ZvAZ4wlF7IyPVOcE1Omikt7UpaFlOQq0HlSti+ZvDH3UiD2brwGMwDbyV43jao2bKJ+4+WdPJHSd7kgzKYVqg== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: @@ -10505,7 +10235,7 @@ object.assign@4.1.0: has-symbols "^1.0.0" object-keys "^1.0.11" -object.assign@^4.1.0, object.assign@^4.1.2: +object.assign@^4.1.0, object.assign@^4.1.1, object.assign@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== @@ -10516,13 +10246,13 @@ object.assign@^4.1.0, object.assign@^4.1.2: object-keys "^1.1.1" object.getownpropertydescriptors@^2.0.3, object.getownpropertydescriptors@^2.1.0, object.getownpropertydescriptors@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz#1bd63aeacf0d5d2d2f31b5e393b03a7c601a23f7" - integrity sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ== + version "2.1.1" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz#0dfda8d108074d9c563e80490c883b6661091544" + integrity sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" + es-abstract "^1.18.0-next.1" object.pick@^1.3.0: version "1.3.0" @@ -10532,20 +10262,15 @@ object.pick@^1.3.0: isobject "^3.0.1" object.values@^1.1.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.3.tgz#eaa8b1e17589f02f698db093f7c62ee1699742ee" - integrity sha512-nkF6PfDB9alkOUxpf1HNm/QlkeW3SReqL5WXeBLpEJJnlPSvRaDQpW3gQTksTN3fgJX4hL42RzKyOin6ff3tyw== + version "1.1.2" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.2.tgz#7a2015e06fcb0f546bd652486ce8583a4731c731" + integrity sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" + es-abstract "^1.18.0-next.1" has "^1.0.3" -obliterator@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/obliterator/-/obliterator-1.6.1.tgz#dea03e8ab821f6c4d96a299e17aef6a3af994ef3" - integrity sha512-9WXswnqINnnhOG/5SLimUlzuU1hFJUc8zkwyD59Sd+dPOMf05PmnYG/d6Q7HZ+KmgkZJa1PxRso6QdM3sTNHig== - oboe@2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/oboe/-/oboe-2.1.4.tgz#20c88cdb0c15371bb04119257d4fdd34b0aa49f6" @@ -10598,14 +10323,6 @@ open@^6.3.0: dependencies: is-wsl "^1.1.0" -open@^7.4.2: - version "7.4.2" - resolved "https://registry.yarnpkg.com/open/-/open-7.4.2.tgz#b8147e26dcf3e426316c730089fd71edd29c2321" - integrity sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q== - dependencies: - is-docker "^2.0.0" - is-wsl "^2.1.1" - opener@^1.5.1: version "1.5.2" resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" @@ -10723,7 +10440,7 @@ p-limit@^1.1.0: dependencies: p-try "^1.0.0" -p-limit@^2.0.0, p-limit@^2.2.0, p-limit@^2.2.1: +p-limit@^2.0.0, p-limit@^2.2.0, p-limit@^2.2.1, p-limit@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== @@ -10770,6 +10487,13 @@ p-map@^2.0.0: resolved "https://registry.yarnpkg.com/p-map/-/p-map-2.1.0.tgz#310928feef9c9ecc65b68b17693018a665cea175" integrity sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw== +p-map@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d" + integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ== + dependencies: + aggregate-error "^3.0.0" + p-retry@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/p-retry/-/p-retry-3.0.1.tgz#316b4c8893e2c8dc1cfa891f406c4b422bebf328" @@ -10890,7 +10614,7 @@ pascalcase@^0.1.1: resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= -patch-package@6.2.2: +patch-package@6.2.2, patch-package@^6.2.2: version "6.2.2" resolved "https://registry.yarnpkg.com/patch-package/-/patch-package-6.2.2.tgz#71d170d650c65c26556f0d0fbbb48d92b6cc5f39" integrity sha512-YqScVYkVcClUY0v8fF0kWOjDYopzIM8e3bj/RU1DPeEF14+dCGm6UeOYm4jvCyxqIEQ5/eJzmbWfDWnUleFNMg== @@ -10908,25 +10632,6 @@ patch-package@6.2.2: slash "^2.0.0" tmp "^0.0.33" -patch-package@^6.2.2: - version "6.4.7" - resolved "https://registry.yarnpkg.com/patch-package/-/patch-package-6.4.7.tgz#2282d53c397909a0d9ef92dae3fdeb558382b148" - integrity sha512-S0vh/ZEafZ17hbhgqdnpunKDfzHQibQizx9g8yEf5dcVk3KOflOfdufRXQX8CSEkyOQwuM/bNz1GwKvFj54kaQ== - dependencies: - "@yarnpkg/lockfile" "^1.1.0" - chalk "^2.4.2" - cross-spawn "^6.0.5" - find-yarn-workspace-root "^2.0.0" - fs-extra "^7.0.1" - is-ci "^2.0.0" - klaw-sync "^6.0.0" - minimist "^1.2.0" - open "^7.4.2" - rimraf "^2.6.3" - semver "^5.6.0" - slash "^2.0.0" - tmp "^0.0.33" - path-browserify@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" @@ -11018,15 +10723,15 @@ path@^0.12.7: process "^0.11.1" util "^0.10.3" -pathval@^1.1.1: +pathval@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== pbkdf2@^3.0.17, pbkdf2@^3.0.3, pbkdf2@^3.0.9: - version "3.1.2" - resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.2.tgz#dd822aa0887580e52f1a039dc3eda108efae3075" - integrity sha512-iuh7L6jA7JEGu2WxDwtQP1ddOpaJNC4KlDEFfdQajSGgGPNi4OyDc2R7QnbY2bR9QjBVGwgvTdNJZoE7RaxUMA== + version "3.1.1" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.1.tgz#cb8724b0fada984596856d1a6ebafd3584654b94" + integrity sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg== dependencies: create-hash "^1.1.2" create-hmac "^1.1.4" @@ -11039,10 +10744,10 @@ performance-now@^2.1.0: resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3: - version "2.2.3" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d" - integrity sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg== +picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1, picomatch@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" + integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== pify@^2.0.0, pify@^2.3.0: version "2.3.0" @@ -11405,18 +11110,21 @@ postcss-selector-parser@^3.0.0: uniq "^1.0.1" postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2: - version "6.0.6" - resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz#2c5bba8174ac2f6981ab631a42ab0ee54af332ea" - integrity sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg== + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz#56075a1380a04604c38b063ea7767a129af5c2b3" + integrity sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw== dependencies: cssesc "^3.0.0" + indexes-of "^1.0.1" + uniq "^1.0.1" util-deprecate "^1.0.2" -postcss-svgo@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.3.tgz#343a2cdbac9505d416243d496f724f38894c941e" - integrity sha512-NoRbrcMWTtUghzuKSoIm6XV+sJdvZ7GZSc3wdBN0W19FTtp2ko8NqLsgoh/m9CzNhU3KLPvQmjIwtaNFkaFTvw== +postcss-svgo@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.2.tgz#17b997bc711b333bab143aaed3b8d3d6e3d38258" + integrity sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw== dependencies: + is-svg "^3.0.0" postcss "^7.0.0" postcss-value-parser "^3.0.0" svgo "^1.0.0" @@ -11480,18 +11188,18 @@ prepend-http@^2.0.0: integrity sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= prettier-plugin-solidity@^1.0.0-alpha.27, prettier-plugin-solidity@^1.0.0-alpha.60: - version "1.0.0-beta.10" - resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-beta.10.tgz#f2a249002733826b08d981b599335ddb7e93af8d" - integrity sha512-55UsEbeJfqYKB3RFR7Nvpi+ApEoUfgdKHVg2ZybrbOkRW4RTblyONLL3mEr8Vrxpo7wBbObVLbWodGg4YXIQ7g== + version "1.0.0-beta.5" + resolved "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-beta.5.tgz#04347bc3fb8deb5d097c9c823cbc01451a40da7a" + integrity sha512-Fd0a+rF/FD7dnN/ZyaSHjH9q/onw6Qd4lzU+nIPj9FoqBkt+WDUYLpiwuVZ/I0i5hZRTVAxiwErp7qmgdyqYpA== dependencies: - "@solidity-parser/parser" "^0.12.1" + "@solidity-parser/parser" "^0.11.0" dir-to-object "^2.0.0" - emoji-regex "^9.2.2" + emoji-regex "^9.2.1" escape-string-regexp "^4.0.0" prettier "^2.2.1" - semver "^7.3.5" - solidity-comments-extractor "^0.0.7" - string-width "^4.2.2" + semver "^7.3.4" + solidity-comments-extractor "^0.0.4" + string-width "^4.2.0" prettier@^1.14.3, prettier@^1.18.2: version "1.19.1" @@ -11499,9 +11207,9 @@ prettier@^1.14.3, prettier@^1.18.2: integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== prettier@^2.1.2, prettier@^2.2.1: - version "2.3.0" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18" - integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w== + version "2.2.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" + integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== pretty-error@^2.0.2: version "2.1.2" @@ -11511,11 +11219,6 @@ pretty-error@^2.0.2: lodash "^4.17.20" renderkid "^2.0.4" -printj@~1.1.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.2.tgz#d90deb2975a8b9f600fb3a1c94e3f4c53c78a222" - integrity sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ== - private@^0.1.6, private@^0.1.8: version "0.1.8" resolved "https://registry.yarnpkg.com/private/-/private-0.1.8.tgz#2381edb3689f7a53d653190060fcf822d2f368ff" @@ -11702,11 +11405,9 @@ qs@6.7.0: integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== qs@^6.7.0: - version "6.10.1" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.1.tgz#4931482fa8d647a5aab799c5271d2133b981fb6a" - integrity sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg== - dependencies: - side-channel "^1.0.4" + version "6.9.6" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.6.tgz#26ed3c8243a431b2924aca84cc90471f35d5a0ee" + integrity sha512-TIRk4aqYLNoJUbd+g2lEdz5kLWIuTMRagAXxl78Q0RiVjAOugHmeKNGdd3cwo/ktpf9aL9epCfFqWDEKysUlLQ== qs@~6.5.2: version "6.5.2" @@ -11751,9 +11452,9 @@ querystringify@^2.1.1: integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + version "1.2.2" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.2.tgz#abf64491e6ecf0f38a6502403d4cda04f372dfd3" + integrity sha512-dB15eXv3p2jDlbOiNLyMabYg1/sXvppd8DP2J3EOCQ0AkuSXCW2tP7mnVouVLJKgUMY6yP0kcQDVpLCN13h4Xg== randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.0.6, randombytes@^2.1.0: version "2.1.0" @@ -11855,7 +11556,7 @@ readable-stream@^1.0.33: isarray "0.0.1" string_decoder "~0.10.x" -readable-stream@^3.0.6, readable-stream@^3.1.0, readable-stream@^3.1.1, readable-stream@^3.4.0, readable-stream@^3.6.0: +readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== @@ -12008,9 +11709,9 @@ regjsparser@^0.1.4: jsesc "~0.5.0" regjsparser@^0.6.4: - version "0.6.9" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.9.tgz#b489eef7c9a2ce43727627011429cf833a7183e6" - integrity sha512-ZqbNRz1SNjLAiYuwY0zoXW8Ne675IX5q+YHioAGbCw4X96Mjl2+dcX9B2ciaeyYjViDAfvIjFpQjJgLttTEERQ== + version "0.6.7" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.7.tgz#c00164e1e6713c2e3ee641f1701c4b7aa0a7f86c" + integrity sha512-ib77G0uxsA2ovgiYbCVGx4Pv3PSttAx2vIwidqQzbL2U5S4Q+j00HdSAneSBuyVcMvEnTXMjiGgB+DlXozVhpQ== dependencies: jsesc "~0.5.0" @@ -12036,9 +11737,9 @@ renderkid@^2.0.4: strip-ansi "^3.0.0" repeat-element@^1.1.2: - version "1.1.4" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9" - integrity sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ== + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== repeat-string@^1.6.1: version "1.6.1" @@ -12137,7 +11838,7 @@ resolve@1.17.0, resolve@~1.17.0: dependencies: path-parse "^1.0.6" -resolve@^1.10.0, resolve@^1.12.0, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.3.2, resolve@^1.8.1: +resolve@^1.10.0, resolve@^1.12.0, resolve@^1.17.0, resolve@^1.3.2, resolve@^1.8.1: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -12192,7 +11893,7 @@ rgba-regex@^1.0.0: resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= -rimraf@2, rimraf@^2.2.8, rimraf@^2.5.4, rimraf@^2.6.3: +rimraf@2, rimraf@^2.2.8, rimraf@^2.5.4, rimraf@^2.6.3, rimraf@^2.7.1: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== @@ -12229,9 +11930,9 @@ rlp@^2.0.0, rlp@^2.2.1, rlp@^2.2.2, rlp@^2.2.3, rlp@^2.2.4: bn.js "^4.11.1" rollup-plugin-copy@^3.3.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/rollup-plugin-copy/-/rollup-plugin-copy-3.4.0.tgz#f1228a3ffb66ffad8606e2f3fb7ff23141ed3286" - integrity sha512-rGUmYYsYsceRJRqLVlE9FivJMxJ7X6jDlP79fmFkL8sJs7VVMSVyA2yfyL+PGyO/vJs4A87hwhgVfz61njI+uQ== + version "3.3.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-copy/-/rollup-plugin-copy-3.3.0.tgz#5ba230047f86b9f703a29288f242948a5580e7b9" + integrity sha512-euDjCUSBXZa06nqnwCNADbkAcYDfzwowfZQkto9K/TFhiH+QG7I4PUsEMwM9tDgomGWJc//z7KLW8t+tZwxADA== dependencies: "@types/fs-extra" "^8.0.1" colorette "^1.1.0" @@ -12250,9 +11951,9 @@ rollup-plugin-terser@^7.0.0: terser "^5.0.0" rollup@^2.32.1: - version "2.48.0" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.48.0.tgz#fceb01ed771f991f29f7bd2ff7838146e55acb74" - integrity sha512-wl9ZSSSsi5579oscSDYSzGn092tCS076YB+TQrzsGuSfYyJeep8eEWj0eaRjuC5McuMNmcnR8icBqiE/FWNB1A== + version "2.39.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.39.0.tgz#be4f98c9e421793a8fec82c854fb567c35e22ab6" + integrity sha512-+WR3bttcq7zE+BntH09UxaW3bQo3vItuYeLsyk4dL2tuwbeSKJuvwiawyhEnvRdRgrII0Uzk00FpctHO/zB1kw== optionalDependencies: fsevents "~2.3.1" @@ -12281,9 +11982,9 @@ rustbn.js@~0.2.0: integrity sha512-4VlvkRUuCJvr2J6Y0ImW7NvTCriMi7ErOAqWk1y69vAdoNIzCF3yPmgeNzx+RQTLEDFq5sHfscn1MwHxP9hNfA== rxjs@^6.4.0: - version "6.6.7" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.7.tgz#90ac018acabf491bf65044235d5863c4dab804c9" - integrity sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ== + version "6.6.3" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.3.tgz#8ca84635c4daa900c0d3967a6ee7ac60271ee552" + integrity sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ== dependencies: tslib "^1.9.0" @@ -12351,7 +12052,7 @@ schema-utils@^1.0.0: ajv-errors "^1.0.0" ajv-keywords "^3.1.0" -schema-utils@^2.0.0, schema-utils@^2.5.0, schema-utils@^2.6.1, schema-utils@^2.6.5, schema-utils@^2.7.0: +schema-utils@^2.0.0, schema-utils@^2.5.0, schema-utils@^2.6.1, schema-utils@^2.6.5, schema-utils@^2.6.6, schema-utils@^2.7.0: version "2.7.1" resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.7.1.tgz#1ca4f32d1b24c590c203b8e7a50bf0ea4cd394d7" integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== @@ -12400,23 +12101,18 @@ select-hose@^2.0.0: integrity sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo= selfsigned@^1.10.8: - version "1.10.11" - resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.11.tgz#24929cd906fe0f44b6d01fb23999a739537acbe9" - integrity sha512-aVmbPOfViZqOZPgRBT0+3u4yZFHpmnIghLMlAcb5/xhp5ZtB/RVnKhz5vl2M32CLXAqR4kha9zfhNg0Lf/sxKA== + version "1.10.8" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.8.tgz#0d17208b7d12c33f8eac85c41835f27fc3d81a30" + integrity sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w== dependencies: node-forge "^0.10.0" -semaphore-async-await@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/semaphore-async-await/-/semaphore-async-await-1.5.1.tgz#857bef5e3644601ca4b9570b87e9df5ca12974fa" - integrity sha1-hXvvXjZEYBykuVcLh+nfXKEpdPo= - semaphore@>=1.0.1, semaphore@^1.0.3, semaphore@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/semaphore/-/semaphore-1.1.0.tgz#aaad8b86b20fe8e9b32b16dc2ee682a8cd26a8aa" integrity sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA== -"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0: +"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0: version "5.7.1" resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== @@ -12426,15 +12122,15 @@ semver@7.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== -semver@^6.0.0, semver@^6.1.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0: +semver@^6.0.0, semver@^6.1.0, semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== -semver@^7.2.1, semver@^7.3.2, semver@^7.3.5: - version "7.3.5" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" - integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== +semver@^7.2.1, semver@^7.3.2, semver@^7.3.4: + version "7.3.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" + integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== dependencies: lru-cache "^6.0.0" @@ -12594,15 +12290,6 @@ shell-quote@^1.6.1: resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2" integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== - dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" - signal-exit@^3.0.0, signal-exit@^3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" @@ -12693,16 +12380,16 @@ snapdragon@^0.8.1: use "^3.1.0" sockjs-client@^1.5.0: - version "1.5.1" - resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.5.1.tgz#256908f6d5adfb94dabbdbd02c66362cca0f9ea6" - integrity sha512-VnVAb663fosipI/m6pqRXakEOw7nvd7TUgdr3PlR/8V2I95QIdwT8L4nMxhyU8SmDBHYXU1TOElaKOmKLfYzeQ== + version "1.5.0" + resolved "https://registry.yarnpkg.com/sockjs-client/-/sockjs-client-1.5.0.tgz#2f8ff5d4b659e0d092f7aba0b7c386bd2aa20add" + integrity sha512-8Dt3BDi4FYNrCFGTL/HtwVzkARrENdwOUf1ZoW/9p3M8lZdFT35jVdrHza+qgxuG9H3/shR4cuX/X9umUrjP8Q== dependencies: debug "^3.2.6" eventsource "^1.0.7" faye-websocket "^0.11.3" inherits "^2.0.4" json3 "^3.3.3" - url-parse "^1.5.1" + url-parse "^1.4.7" sockjs@^0.3.21: version "0.3.21" @@ -12769,11 +12456,11 @@ solc@^0.6.3: tmp "0.0.33" solhint@^3.3.2: - version "3.3.4" - resolved "https://registry.yarnpkg.com/solhint/-/solhint-3.3.4.tgz#81770c60eeb027e6e447cb91ed599baf5e888e09" - integrity sha512-AEyjshF/PC6kox1c1l79Pji+DK9WVuk5u2WEh6bBKt188gWa63NBOAgYg0fBRr5CTUmsuGc1sGH7dgUVs83mKw== + version "3.3.2" + resolved "https://registry.yarnpkg.com/solhint/-/solhint-3.3.2.tgz#ebd7270bb50fd378b427d7a6fc9f2a7fd00216c0" + integrity sha512-8tHCkIAk1axLLG6Qu2WIH3GgNABonj9eAWejJbov3o3ujkZQRNHeHU1cC4/Dmjsh3Om7UzFFeADUHu2i7ZJeiw== dependencies: - "@solidity-parser/parser" "^0.12.0" + "@solidity-parser/parser" "^0.8.2" ajv "^6.6.1" antlr4 "4.7.1" ast-parents "0.0.1" @@ -12790,10 +12477,10 @@ solhint@^3.3.2: optionalDependencies: prettier "^1.14.3" -solidity-comments-extractor@^0.0.7: - version "0.0.7" - resolved "https://registry.yarnpkg.com/solidity-comments-extractor/-/solidity-comments-extractor-0.0.7.tgz#99d8f1361438f84019795d928b931f4e5c39ca19" - integrity sha512-wciNMLg/Irp8OKGrh3S2tfvZiZ0NEyILfcRCXCD4mp7SgK/i9gzLfhY2hY7VMCQJ3kH9UB9BzNdibIVMchzyYw== +solidity-comments-extractor@^0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/solidity-comments-extractor/-/solidity-comments-extractor-0.0.4.tgz#ce420aef23641ffd0131c7d80ba85b6e1e42147e" + integrity sha512-58glBODwXIKMaQ7rfcJOrWtFQMMOK28tJ0/LcB5Xhu7WtAxk4UX2fpgKPuaL41XjMp/y0gAa1MTLqk018wuSzA== solpp@^0.10.1: version "0.10.2" @@ -12910,9 +12597,9 @@ spdx-expression-parse@^3.0.0: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.8" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.8.tgz#eb1e97ad99b11bf3f82a3b71a0472dd9a00f2ecf" - integrity sha512-NDgA96EnaLSvtbM7trJj+t1LUR3pirkDCcz9nOUlPb5DMBGsH7oES6C3hs3j7R9oHEa1EMvReS/BUAIT5Tcr0g== + version "3.0.7" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz#e9c18a410e5ed7e12442a549fbd8afa767038d65" + integrity sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ== spdy-transport@^3.0.0: version "3.0.0" @@ -12965,17 +12652,18 @@ sshpk@^1.7.0: tweetnacl "~0.14.0" ssri@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.2.tgz#157939134f20464e7301ddba3e90ffa8f7728ac5" - integrity sha512-cepbSq/neFK7xB6A50KHN0xHDotYzq58wWCa5LeWqnPrHG8GzfEjO/4O8kpmcGW+oaxkvhEJCWgbgNk4/ZV93Q== + version "6.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-6.0.1.tgz#2a3c41b28dd45b62b63676ecb74001265ae9edd8" + integrity sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA== dependencies: figgy-pudding "^3.5.1" -ssri@^8.0.1: - version "8.0.1" - resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" - integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== +ssri@^7.0.0, ssri@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-7.1.0.tgz#92c241bf6de82365b5c7fb4bd76e975522e1294d" + integrity sha512-77/WrDZUWocK0mvA5NTRQyveUf+wsrIc6vyrxpS8tVvYBcX215QbafrJR3KtkpskIzoFLqqNuuYQvxaMjXJ/0g== dependencies: + figgy-pudding "^3.5.1" minipass "^3.1.1" stable@^0.1.8: @@ -13086,38 +12774,38 @@ string-width@^3.0.0, string-width@^3.1.0: is-fullwidth-code-point "^2.0.0" strip-ansi "^5.1.0" -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" - integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== +string-width@^4.1.0, string-width@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" + integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== dependencies: emoji-regex "^8.0.0" is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" string.prototype.trim@~1.2.1: - version "1.2.4" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.4.tgz#6014689baf5efaf106ad031a5fa45157666ed1bd" - integrity sha512-hWCk/iqf7lp0/AgTF7/ddO1IWtSNPASjlzCicV5irAVdE1grjsneK26YG6xACMBEdCvO8fUST0UzDMh/2Qy+9Q== + version "1.2.3" + resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.3.tgz#d23a22fde01c1e6571a7fadcb9be11decd8061a7" + integrity sha512-16IL9pIBA5asNOSukPfxX2W68BaBvxyiRK16H3RA/lWW9BDosh+w7f+LhomPHpXJ82QEe7w7/rY/S1CV97raLg== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" + es-abstract "^1.18.0-next.1" -string.prototype.trimend@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.4.tgz#e75ae90c2942c63504686c18b287b4a0b1a45f80" - integrity sha512-y9xCjw1P23Awk8EvTpcyL2NIr1j7wJ39f+k6lvRnSMz+mz9CGz9NYPelDk42kOz6+ql8xjfK8oYzy3jAP5QU5A== +string.prototype.trimend@^1.0.1, string.prototype.trimend@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz#a22bd53cca5c7cf44d7c9d5c732118873d6cd18b" + integrity sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" -string.prototype.trimstart@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz#b36399af4ab2999b4c9c648bd7a3fb2bb26feeed" - integrity sha512-jh6e984OBfvxS50tdY2nRZnoC5/mLFKOREQfw8t5yytkoUsJRNxvI/E39qu1sD0OtWI3OC0XgKSmcWwziwYuZw== +string.prototype.trimstart@^1.0.1, string.prototype.trimstart@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz#9b4cb590e123bb36564401d59824298de50fd5aa" + integrity sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg== dependencies: - call-bind "^1.0.2" + call-bind "^1.0.0" define-properties "^1.1.3" string_decoder@^1.0.0, string_decoder@^1.1.1: @@ -13309,16 +12997,14 @@ table@^5.2.3: string-width "^3.0.0" table@^6.0.4: - version "6.7.1" - resolved "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2" - integrity sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg== + version "6.0.7" + resolved "https://registry.yarnpkg.com/table/-/table-6.0.7.tgz#e45897ffbcc1bcf9e8a87bf420f2c9e5a7a52a34" + integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g== dependencies: - ajv "^8.0.1" - lodash.clonedeep "^4.5.0" - lodash.truncate "^4.4.2" + ajv "^7.0.2" + lodash "^4.17.20" slice-ansi "^4.0.0" string-width "^4.2.0" - strip-ansi "^6.0.0" tabtab@^3.0.2: version "3.0.2" @@ -13380,7 +13066,7 @@ tar@^4.0.2: safe-buffer "^5.1.2" yallist "^3.0.3" -terser-webpack-plugin@^1.4.3, terser-webpack-plugin@^1.4.4: +terser-webpack-plugin@^1.4.3: version "1.4.5" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz#a217aefaea330e734ffacb6120ec1fa312d6040b" integrity sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw== @@ -13395,7 +13081,22 @@ terser-webpack-plugin@^1.4.3, terser-webpack-plugin@^1.4.4: webpack-sources "^1.4.0" worker-farm "^1.7.0" -terser@^4.1.2: +terser-webpack-plugin@^2.3.6: + version "2.3.8" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-2.3.8.tgz#894764a19b0743f2f704e7c2a848c5283a696724" + integrity sha512-/fKw3R+hWyHfYx7Bv6oPqmk4HGQcrWLtV3X6ggvPuwPNHSnzvVV51z6OaaCOus4YLjutYGOz3pEpbhe6Up2s1w== + dependencies: + cacache "^13.0.1" + find-cache-dir "^3.3.1" + jest-worker "^25.4.0" + p-limit "^2.3.0" + schema-utils "^2.6.6" + serialize-javascript "^4.0.0" + source-map "^0.6.1" + terser "^4.6.12" + webpack-sources "^1.4.3" + +terser@^4.1.2, terser@^4.6.12: version "4.8.0" resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17" integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== @@ -13405,9 +13106,9 @@ terser@^4.1.2: source-map-support "~0.5.12" terser@^5.0.0: - version "5.7.0" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.0.tgz#a761eeec206bc87b605ab13029876ead938ae693" - integrity sha512-HP5/9hp2UaZt5fYkuhNBR8YyRcT8juw8+uFbAme53iN9hblvKnLUTKkmwJG6ocWpIKf8UK4DoeWG4ty0J6S6/g== + version "5.6.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.6.0.tgz#138cdf21c5e3100b1b3ddfddf720962f88badcd2" + integrity sha512-vyqLMoqadC1uR0vywqOZzriDYzgEkNJFK4q9GeyOBHIbiECHiWLKcWfbQWAUaPfxkjDhapSlZB9f7fkMrvkVjA== dependencies: commander "^2.20.0" source-map "~0.7.2" @@ -13605,11 +13306,6 @@ ts-essentials@^1.0.0: resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-1.0.4.tgz#ce3b5dade5f5d97cf69889c11bf7d2da8555b15a" integrity sha512-q3N1xS4vZpRouhYHDPwO0bDW3EZ6SK9CrrDHxi/D6BPReSjpVgWIOpLS2o0gSBZm+7q/wyKp6RVM1AeeW7uyfQ== -ts-essentials@^6.0.3: - version "6.0.7" - resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-6.0.7.tgz#5f4880911b7581a873783740ce8b94da163d18a6" - integrity sha512-2E4HIIj4tQJlIHuATRHayv0EfMGK3ris/GRk1E3CFnsZzeNV+hUmelbaTZHLtXaZppM5oLhHRtO04gINC4Jusw== - ts-essentials@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/ts-essentials/-/ts-essentials-7.0.1.tgz#d205508cae0cdadfb73c89503140cf2228389e2d" @@ -13684,9 +13380,9 @@ tsutils@^2.29.0: tslib "^1.8.1" tsutils@^3.17.1: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + version "3.20.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.20.0.tgz#ea03ea45462e146b53d70ce0893de453ff24f698" + integrity sha512-RYbuQuvkhuqVeXweWT3tJLKOEJ/UUw9GjNEZGWdrLLlM+611o1gwLHBpxoFJKKl25fLprp2eVthtKs5JOrNeXg== dependencies: tslib "^1.8.1" @@ -13731,20 +13427,15 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" -type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5: +type-detect@^4.0.0, type-detect@^4.0.5: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== +type-fest@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1" + integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== type-fest@^0.6.0: version "0.6.0" @@ -13775,27 +13466,14 @@ type@^1.0.1: integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== type@^2.0.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/type/-/type-2.5.0.tgz#0a2e78c2e77907b252abe5f298c1b01c63f0db3d" - integrity sha512-180WMDQaIMm3+7hGXWf12GtdniDEy7nYcyFMKJn/eZz/6tSLXrUN9V0wKSbMjej0I1WHWbpREDEKHtqPQa9NNw== - -typechain@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/typechain/-/typechain-3.0.0.tgz#d5a47700831f238e43f7429b987b4bb54849b92e" - integrity sha512-ft4KVmiN3zH4JUFu2WJBrwfHeDf772Tt2d8bssDTo/YcckKW2D+OwFrHXRC6hJvO3mHjFQTihoMV6fJOi0Hngg== - dependencies: - command-line-args "^4.0.7" - debug "^4.1.1" - fs-extra "^7.0.0" - js-sha3 "^0.8.0" - lodash "^4.17.15" - ts-essentials "^6.0.3" - ts-generator "^0.1.1" + version "2.2.0" + resolved "https://registry.yarnpkg.com/type/-/type-2.2.0.tgz#3edd448793f517d8b9dd108b486a043f5befd91f" + integrity sha512-M/u37b4oSGlusaU8ZB96BfFPWQ8MbsZYXB+kXGMiDj6IKinkcNaQvmirBuWj8mAXqP6LYn1rQvbTYum3yPhaOA== typechain@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/typechain/-/typechain-4.0.3.tgz#e8fcd6c984676858c64eeeb155ea783a10b73779" - integrity sha512-tmoHQeXZWHxIdeLK+i6dU0CU0vOd9Cndr3jFTZIMzak5/YpFZ8XoiYpTZcngygGBqZo+Z1EUmttLbW9KkFZLgQ== + version "4.0.1" + resolved "https://registry.yarnpkg.com/typechain/-/typechain-4.0.1.tgz#b40eaf5ede15588d97a4b9a5f85120f7ea1cf262" + integrity sha512-H/1VpRmplp1qhCTVLU9PCgzyVCQ7Lth7YvaaI1hTvT31IpWnLLNpDpQD4vXJGr26T9BsZ0ZIceOwieAbcoywXw== dependencies: command-line-args "^4.0.7" debug "^4.1.1" @@ -13818,9 +13496,9 @@ typedarray@^0.0.6: integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= typescript@^4.0.2, typescript@^4.0.5: - version "4.2.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.2.4.tgz#8610b59747de028fda898a8aef0e103f156d0961" - integrity sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg== + version "4.1.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.1.5.tgz#123a3b214aaff3be32926f0d8f1f6e704eb89a72" + integrity sha512-6OSu9PTIzmn9TCDiovULTnET6BgXtDYL4Gg4szY+cGsc3JP1dQL8qvE8kShTRx1NIw4Q9IBHlwODjkjWEtMUyA== typewise-core@^1.2, typewise-core@^1.2.0: version "1.2.0" @@ -13858,30 +13536,15 @@ uglify-js@3.4.x: source-map "~0.6.1" uglify-js@^3.1.4: - version "3.13.7" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.13.7.tgz#25468a3b39b1c875df03f0937b2b7036a93f3fee" - integrity sha512-1Psi2MmnZJbnEsgJJIlfnd7tFlJfitusmR7zDI8lXlFI0ACD4/Rm/xdrU8bh6zF0i74aiVoBtkRiFulkrmh3AA== + version "3.12.8" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.12.8.tgz#a82e6e53c9be14f7382de3d068ef1e26e7d4aaf8" + integrity sha512-fvBeuXOsvqjecUtF/l1dwsrrf5y2BCUk9AOJGzGcm6tE7vegku5u/YvqjyDaAGr422PLoLnrxg3EnRvTqsdC1w== ultron@~1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.1.1.tgz#9fe1536a10a664a65266a1e3ccf85fd36302bc9c" integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og== -unbox-primitive@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" - integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== - dependencies: - function-bind "^1.1.1" - has-bigints "^1.0.1" - has-symbols "^1.0.2" - which-boxed-primitive "^1.0.2" - -underscore@1.12.1: - version "1.12.1" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.12.1.tgz#7bb8cc9b3d397e201cf8553336d262544ead829e" - integrity sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw== - underscore@1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961" @@ -14022,10 +13685,10 @@ url-parse-lax@^3.0.0: dependencies: prepend-http "^2.0.0" -url-parse@^1.4.3, url-parse@^1.5.1: - version "1.5.1" - resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.5.1.tgz#d5fa9890af8a5e1f274a2c98376510f6425f6e3b" - integrity sha512-HOfCOUJt7iSYzEx/UqgtwKRMC6EU91NFhsCHMv9oM03VJcVo2Qrp8T8kI9D7amFf1cu+/3CEhgb3rF9zL7k85Q== +url-parse@^1.4.3, url-parse@^1.4.7: + version "1.4.7" + resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278" + integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg== dependencies: querystringify "^2.1.1" requires-port "^1.0.0" @@ -14054,9 +13717,9 @@ use@^3.1.0: integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== utf-8-validate@^5.0.2: - version "5.0.5" - resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.5.tgz#dd32c2e82c72002dc9f02eb67ba6761f43456ca1" - integrity sha512-+pnxRYsS/axEpkrrEpzYfNZGXp0IjC/9RIxwM5gntY4Koi8SHmUGSfxfWqxZdRxrtaoVstuOzUp/rbs3JSPELQ== + version "5.0.4" + resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.4.tgz#72a1735983ddf7a05a43a9c6b67c5ce1c910f9b8" + integrity sha512-MEF05cPSq3AwJ2C7B7sHAA6i53vONoZbMGX8My5auEVm6W+dJ2Jd/TZPyGJ5CH42V2XtbI5FD28HeHeqlPzZ3Q== dependencies: node-gyp-build "^4.2.0" @@ -14078,7 +13741,7 @@ util.promisify@1.0.0: define-properties "^1.1.2" object.getownpropertydescriptors "^2.0.3" -util.promisify@^1.0.0, util.promisify@^1.0.1: +util.promisify@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.1.1.tgz#77832f57ced2c9478174149cae9b96e9918cd54b" integrity sha512-/s3UsZUrIfa6xDhr7zZhnE9SLQ5RIXyYfiVnMMyMDzOc8WhWN4Nbh36H842OyurKbCDAesZOJaVyvmSl6fhGQw== @@ -14146,9 +13809,9 @@ v-clipboard@^2.2.2: integrity sha512-Wg+ObZoYK6McHb5OOCFWvm0R7xHp0/p0G1ocx/8bO22jvA/yVY05rADbfiztwCokXBNfQuGv/XSd1ozcTFgekw== v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + version "2.2.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz#9471efa3ef9128d2f7c6a7ca39c4dd6b5055b132" + integrity sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q== validate-npm-package-license@^3.0.1: version "3.0.4" @@ -14198,18 +13861,18 @@ vue-hot-reload-api@^2.3.0: integrity sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog== "vue-loader-v16@npm:vue-loader@^16.1.0": - version "16.2.0" - resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-16.2.0.tgz#046a53308dd47e58efe20ddec1edec027ce3b46e" - integrity sha512-TitGhqSQ61RJljMmhIGvfWzJ2zk9m1Qug049Ugml6QP3t0e95o0XJjk29roNEiPKJQBEi8Ord5hFuSuELzSp8Q== + version "16.1.2" + resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-16.1.2.tgz#5c03b6c50d2a5f983c7ceba15c50d78ca2b298f4" + integrity sha512-8QTxh+Fd+HB6fiL52iEVLKqE9N1JSlMXLR92Ijm6g8PZrwIxckgpqjPDWRP5TWxdiPaHR+alUWsnu1ShQOwt+Q== dependencies: chalk "^4.1.0" hash-sum "^2.0.0" loader-utils "^2.0.0" vue-loader@^15.9.2: - version "15.9.7" - resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.9.7.tgz#15b05775c3e0c38407679393c2ce6df673b01044" - integrity sha512-qzlsbLV1HKEMf19IqCJqdNvFJRCI58WNbS6XbPqK13MrLz65es75w392MSQ5TsARAfIjUw+ATm3vlCXUJSOH9Q== + version "15.9.6" + resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.9.6.tgz#f4bb9ae20c3a8370af3ecf09b8126d38ffdb6b8b" + integrity sha512-j0cqiLzwbeImIC6nVIby2o/ABAWhlppyL/m5oJ67R5MloP0hj/DtFgb0Zmq3J9CG7AJ+AXIvHVnJAPBvrLyuDg== dependencies: "@vue/component-compiler-utils" "^3.1.0" hash-sum "^1.0.2" @@ -14223,9 +13886,9 @@ vue-router@^3.1.3: integrity sha512-RRQNLT8Mzr8z7eL4p7BtKvRaTSGdCbTy2+Mm5HTJvLGYSSeG9gDzNasJPP/yOYKLy+/cLG/ftrqq5fvkFwBJEw== vue-style-loader@^4.1.0, vue-style-loader@^4.1.2: - version "4.1.3" - resolved "https://registry.yarnpkg.com/vue-style-loader/-/vue-style-loader-4.1.3.tgz#6d55863a51fa757ab24e89d9371465072aa7bc35" - integrity sha512-sFuh0xfbtpRlKfm39ss/ikqs9AbKCoXZBpHeVZ8Tx650o0k0q/YCM7FRvigtxpACezfq6af+a7JeqVTWvncqDg== + version "4.1.2" + resolved "https://registry.yarnpkg.com/vue-style-loader/-/vue-style-loader-4.1.2.tgz#dedf349806f25ceb4e64f3ad7c0a44fba735fcf8" + integrity sha512-0ip8ge6Gzz/Bk0iHovU9XAUQaFt/G2B61bnWa2tCcqqdgfHs1lF9xXorFbE55Gmy92okFT+8bfmySuUOu13vxQ== dependencies: hash-sum "^1.0.2" loader-utils "^1.0.2" @@ -14538,9 +14201,9 @@ web3-utils@1.2.11: utf8 "3.0.0" web3-utils@^1.0.0-beta.31: - version "1.3.6" - resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.3.6.tgz#390bc9fa3a7179746963cfaca55bb80ac4d8dc10" - integrity sha512-hHatFaQpkQgjGVER17gNx8u1qMyaXFZtM0y0XLGH1bzsjMPlkMPLRcYOrZ00rOPfTEuYFOdrpGOqZXVmGrMZRg== + version "1.3.4" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.3.4.tgz#9b1aa30d7549f860b573e7bb7e690999e7192198" + integrity sha512-/vC2v0MaZNpWooJfpRw63u0Y3ag2gNjAWiLtMSL6QQLmCqCy4SQIndMt/vRyx0uMoeGt1YTwSXEcHjUzOhLg0A== dependencies: bn.js "^4.11.9" eth-lib "0.2.8" @@ -14548,7 +14211,7 @@ web3-utils@^1.0.0-beta.31: ethjs-unit "0.1.6" number-to-bn "1.7.0" randombytes "^2.1.0" - underscore "1.12.1" + underscore "1.9.1" utf8 "3.0.0" web3@1.2.11: @@ -14656,7 +14319,7 @@ webpack-merge@^4.2.2: dependencies: lodash "^4.17.15" -webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1: +webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1, webpack-sources@^1.4.3: version "1.4.3" resolved "https://registry.yarnpkg.com/webpack-sources/-/webpack-sources-1.4.3.tgz#eedd8ec0b928fbf1cbfe994e22d2d890f330a933" integrity sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ== @@ -14729,9 +14392,9 @@ websocket@1.0.32: yaeti "^0.0.6" websocket@^1.0.30, websocket@^1.0.31: - version "1.0.34" - resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.34.tgz#2bdc2602c08bf2c82253b730655c0ef7dcab3111" - integrity sha512-PRDso2sGwF6kM75QykIesBijKSVceR6jL2G8NGYyq2XrItNC2P5/qL5XeR056GhA+Ly7JMFvJb9I312mJfmqnQ== + version "1.0.33" + resolved "https://registry.yarnpkg.com/websocket/-/websocket-1.0.33.tgz#407f763fc58e74a3fa41ca3ae5d78d3f5e3b82a5" + integrity sha512-XwNqM2rN5eh3G2CUQE3OHZj+0xfdH42+OFK6LdC2yqiC0YU8e5UK0nYre220T0IyyN031V/XOvtHvXozvJYFWA== dependencies: bufferutil "^4.0.1" debug "^2.2.0" @@ -14746,20 +14409,9 @@ whatwg-fetch@2.0.4: integrity sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng== whatwg-fetch@^3.4.1: - version "3.6.2" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" - integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== - -which-boxed-primitive@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" - integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== - dependencies: - is-bigint "^1.0.1" - is-boolean-object "^1.1.0" - is-number-object "^1.0.4" - is-string "^1.0.5" - is-symbol "^1.0.3" + version "3.5.0" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.5.0.tgz#605a2cd0a7146e5db141e29d1c62ab84c0c4c868" + integrity sha512-jXkLtsR42xhXg7akoDKvKWE40eJeI+2KZqcp2h3NsOrRnDvtWX36KcKl30dy+hxECivdk2BVUHVNrPtoMBUx6A== which-module@^1.0.0: version "1.0.0" @@ -14895,9 +14547,9 @@ ws@^6.0.0, ws@^6.2.1: async-limiter "~1.0.0" ws@^7.2.1: - version "7.4.5" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.5.tgz#a484dd851e9beb6fdb420027e3885e8ce48986c1" - integrity sha512-xzyu3hFvomRfXKH8vOFMU3OguG6oOvhXMo3xsGy3xWExqaM2dxBbVxuD99O7m3ZUFMvvscsZDqxfgMaRr/Nr1g== + version "7.4.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.3.tgz#1f9643de34a543b8edb124bdcbc457ae55a6e5cd" + integrity sha512-hr6vCR76GsossIRsr8OLR9acVVm1jyfEWvhbNjtgPOrfvAlKzvyeg/P6r8RuDjRyrcQoPQT7K0DGEPc7Ae6jzA== xhr-request-promise@^0.1.2: version "0.1.3" @@ -14941,7 +14593,7 @@ xhr@^2.0.4, xhr@^2.2.0, xhr@^2.3.3: parse-headers "^2.0.0" xtend "^4.0.0" -xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2, xtend@~4.0.0, xtend@~4.0.1: +xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.0, xtend@~4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== @@ -14959,14 +14611,14 @@ y18n@^3.2.1: integrity sha512-uGZHXkHnhF0XeeAPgnKfPv1bgKAYyVvmNL1xlKsPYZPaIHxGti2hHqvOCQv71XMsLxu1QjergkqogUnms5D3YQ== y18n@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" - integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== + version "4.0.1" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.1.tgz#8db2b83c31c5d75099bb890b23f3094891e247d4" + integrity sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ== y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + version "5.0.5" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" + integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== yaeti@^0.0.6: version "0.0.6" @@ -15010,9 +14662,9 @@ yargs-parser@^2.4.1: lodash.assign "^4.0.6" yargs-parser@^20.2.2: - version "20.2.7" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.7.tgz#61df85c113edfb5a7a4e36eb8aa60ef423cbc90a" - integrity sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw== + version "20.2.5" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.5.tgz#5d37729146d3f894f39fc94b6796f5b239513186" + integrity sha512-jYRGS3zWy20NtDtK2kBgo/TlAoy5YUuhD9/LZ7z7W4j1Fdw2cqD0xEEclf8fxc8xjD6X5Qr+qQQwCEsP8iRiYg== yargs-unparser@1.6.0: version "1.6.0" @@ -15093,9 +14745,9 @@ yocto-queue@^0.1.0: integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== "zksync@link:sdk/zksync.js": - version "0.11.0-beta.6" + version "0.11.0-beta.7" dependencies: axios "^0.21.1" websocket "^1.0.30" websocket-as-promised "^1.1.0" - zksync-crypto "^0.6.0-beta.0" + zksync-crypto "^0.6.0-beta.1"