diff --git a/.prettierignore b/.prettierignore index f268596e5..6f1427edb 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,3 +3,4 @@ artifacts cache coverage* gasReporterOutput.json +dist diff --git a/src/clients/HubPoolClient.ts b/src/clients/HubPoolClient.ts index d4cf7d293..e078185c0 100644 --- a/src/clients/HubPoolClient.ts +++ b/src/clients/HubPoolClient.ts @@ -37,6 +37,10 @@ export class HubPoolClient { return this.pendingRootBundle !== undefined && this.pendingRootBundle.unclaimedPoolRebalanceLeafCount > 0; } + getProposedRootBundles() { + return this.proposedRootBundles; + } + getSpokePoolForBlock(block: number, chain: number): string { if (!this.crossChainContracts[chain]) throw new Error(`No cross chain contracts set for ${chain}`); const mostRecentSpokePoolUpdatebeforeBlock = ( @@ -182,15 +186,17 @@ export class HubPoolClient { ) as ExecutedRootBundle[]; } - getNextBundleStartBlockNumber(chainIdList: number[], latestMainnetBlock: number, chainId: number): number { + getLatestFullyExecutedRootBundle(latestMainnetBlock: number): ProposedRootBundle | undefined { // Search for latest ProposeRootBundleExecuted event followed by all of its RootBundleExecuted event suggesting // that all pool rebalance leaves were executed. This ignores any proposed bundles that were partially executed. - const latestFullyExecutedPoolRebalanceRoot = sortEventsDescending(this.proposedRootBundles).find( - (rootBundle: ProposedRootBundle) => { - if (rootBundle.blockNumber > latestMainnetBlock) return false; - return this.isRootBundleValid(rootBundle, latestMainnetBlock); - } - ) as ProposedRootBundle; + return sortEventsDescending(this.proposedRootBundles).find((rootBundle: ProposedRootBundle) => { + if (rootBundle.blockNumber > latestMainnetBlock) return false; + return this.isRootBundleValid(rootBundle, latestMainnetBlock); + }); + } + + getNextBundleStartBlockNumber(chainIdList: number[], latestMainnetBlock: number, chainId: number): number { + const latestFullyExecutedPoolRebalanceRoot = this.getLatestFullyExecutedRootBundle(latestMainnetBlock); // If no event, then we can return a conservative default starting block like 0, // or we could throw an Error. diff --git a/src/clients/MultiCallerClient.ts b/src/clients/MultiCallerClient.ts index 9e2b26269..1d751cd03 100644 --- a/src/clients/MultiCallerClient.ts +++ b/src/clients/MultiCallerClient.ts @@ -145,7 +145,7 @@ export class MultiCallerClient { buildMultiCallBundle(transactions: AugmentedTransaction[]) { // Validate all transactions in the batch have the same target contract. const target = transactions[0].contract; - if (transactions.every((tx) => tx.contract.address != target.address)) { + if (transactions.every((tx) => tx.contract.address !== target.address)) { this.logger.error({ at: "MultiCallerClient", message: "some transactions in the bundle contain different targets", diff --git a/src/clients/SpokePoolClient.ts b/src/clients/SpokePoolClient.ts index 41a0a4a9b..7c4034eb6 100644 --- a/src/clients/SpokePoolClient.ts +++ b/src/clients/SpokePoolClient.ts @@ -3,6 +3,7 @@ import { toBN, Event, ZERO_ADDRESS, winston, paginatedEventQuery, spreadEventWit import { AcrossConfigStoreClient } from "./ConfigStoreClient"; import { Deposit, DepositWithBlock, Fill, SpeedUp, FillWithBlock, TokensBridged } from "../interfaces/SpokePool"; +import { RootBundleRelayWithBlock, RelayerRefundExecutionWithBlock } from "../interfaces/SpokePool"; export class SpokePoolClient { private deposits: { [DestinationChainId: number]: Deposit[] } = {}; @@ -10,6 +11,8 @@ export class SpokePoolClient { private speedUps: { [depositorAddress: string]: { [depositId: number]: SpeedUp[] } } = {}; private depositRoutes: { [originToken: string]: { [DestinationChainId: number]: boolean } } = {}; private tokensBridged: TokensBridged[] = []; + private rootBundleRelays: RootBundleRelayWithBlock[] = []; + private relayerRefundExecutions: RelayerRefundExecutionWithBlock[] = []; public isUpdated: boolean = false; public firstBlockToSearch: number; @@ -76,6 +79,14 @@ export class SpokePoolClient { return this.fills.filter((fill: Fill) => fill.relayer === relayer); } + getRootBundleRelays() { + return this.rootBundleRelays; + } + + getRelayerRefundExecutions() { + return this.relayerRefundExecutions; + } + appendMaxSpeedUpSignatureToDeposit(deposit: Deposit) { const maxSpeedUp = this.speedUps[deposit.depositor]?.[deposit.depositId].reduce((prev, current) => prev.newRelayerFeePct.gt(current.newRelayerFeePct) ? prev : current @@ -141,12 +152,22 @@ export class SpokePoolClient { this.log("debug", "Updating client", { searchConfig, depositRouteSearchConfig, spokePool: this.spokePool.address }); if (searchConfig.fromBlock > searchConfig.toBlock) return; // If the starting block is greater than the ending block return. - const [depositEvents, speedUpEvents, fillEvents, enableDepositsEvents, tokensBridgedEvents] = await Promise.all([ + const [ + depositEvents, + speedUpEvents, + fillEvents, + enableDepositsEvents, + tokensBridgedEvents, + relayedRootBundleEvents, + executedRelayerRefundRootEvents, + ] = await Promise.all([ paginatedEventQuery(this.spokePool, this.spokePool.filters.FundsDeposited(), searchConfig), paginatedEventQuery(this.spokePool, this.spokePool.filters.RequestedSpeedUpDeposit(), searchConfig), paginatedEventQuery(this.spokePool, this.spokePool.filters.FilledRelay(), searchConfig), paginatedEventQuery(this.spokePool, this.spokePool.filters.EnabledDepositRoute(), depositRouteSearchConfig), paginatedEventQuery(this.spokePool, this.spokePool.filters.TokensBridged(), depositRouteSearchConfig), + paginatedEventQuery(this.spokePool, this.spokePool.filters.RelayedRootBundle(), searchConfig), + paginatedEventQuery(this.spokePool, this.spokePool.filters.ExecutedRelayerRefundRoot(), searchConfig), ]); for (const event of tokensBridgedEvents) { @@ -196,11 +217,21 @@ export class SpokePoolClient { const enableDeposit = spreadEvent(event); assign(this.depositRoutes, [enableDeposit.originToken, enableDeposit.destinationChainId], enableDeposit.enabled); } + + for (const event of relayedRootBundleEvents) { + this.rootBundleRelays.push(spreadEvent(event)); + } + + for (const event of executedRelayerRefundRootEvents) { + this.relayerRefundExecutions.push(spreadEvent(event)); + } + this.firstBlockToSearch = searchConfig.toBlock + 1; // Next iteration should start off from where this one ended. this.isUpdated = true; this.log("debug", "Client updated!"); } + public hubPoolClient() { return this.configStoreClient.hubPoolClient; } diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 334ea1773..85fc94de9 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -1,8 +1,8 @@ -import { winston, EMPTY_MERKLE_ROOT } from "../utils"; +import { winston, EMPTY_MERKLE_ROOT, sortEventsDescending } from "../utils"; import { UnfilledDeposit, Deposit, DepositWithBlock, RootBundle } from "../interfaces"; -import { UnfilledDepositsForOriginChain, RunningBalances } from "../interfaces"; -import { FillWithBlock, PoolRebalanceLeaf } from "../interfaces"; -import { BigNumberForToken, FillsToRefund } from "../interfaces"; +import { UnfilledDepositsForOriginChain, TreeData, RunningBalances } from "../interfaces"; +import { FillWithBlock, PoolRebalanceLeaf, RelayerRefundLeaf, RelayerRefundLeafWithGroup } from "../interfaces"; +import { BigNumberForToken, FillsToRefund, RelayData } from "../interfaces"; import { DataworkerClients } from "./DataworkerClientHelper"; import { SpokePoolClient } from "../clients"; import * as PoolRebalanceUtils from "./PoolRebalanceUtils"; @@ -26,7 +26,8 @@ export class Dataworker { readonly maxRefundCountOverride: number = undefined, readonly maxL1TokenCountOverride: number = undefined, readonly tokenTransferThreshold: BigNumberForToken = {}, - readonly blockRangeEndBlockBuffer: { [chainId: number]: number } = {} + readonly blockRangeEndBlockBuffer: { [chainId: number]: number } = {}, + readonly spokeRootsLookbackCount = 0 ) { if ( maxRefundCountOverride !== undefined || @@ -425,7 +426,15 @@ export class Dataworker { hubPoolChainId: number, widestPossibleExpectedBlockRange: number[][], rootBundle: RootBundle - ): Promise<{ valid: boolean; reason?: string }> { + ): Promise<{ + valid: boolean; + reason?: string; + expectedTrees?: { + poolRebalanceTree: TreeData; + relayerRefundTree: TreeData; + slowRelayTree: TreeData; + }; + }> { // If pool rebalance root is empty, always dispute. There should never be a bundle with an empty rebalance root. if (rootBundle.poolRebalanceRoot === EMPTY_MERKLE_ROOT) { this.logger.debug({ @@ -590,7 +599,14 @@ export class Dataworker { : this.clients.configStoreClient.getMaxRefundCountForRelayerRefundLeafForBlock(endBlockForMainnet), this.tokenTransferThreshold ); + const expectedSlowRelayRoot = _buildSlowRelayRoot(unfilledDeposits); + + const expectedTrees = { + poolRebalanceTree: expectedPoolRebalanceRoot, + relayerRefundTree: expectedRelayerRefundRoot, + slowRelayTree: expectedSlowRelayRoot, + }; if ( expectedPoolRebalanceRoot.leaves.length !== rootBundle.unclaimedPoolRebalanceLeafCount || expectedPoolRebalanceRoot.tree.getHexRoot() !== rootBundle.poolRebalanceRoot @@ -628,6 +644,7 @@ export class Dataworker { }); return { valid: true, + expectedTrees, }; } @@ -648,21 +665,335 @@ export class Dataworker { [...expectedSlowRelayRoot.leaves], expectedSlowRelayRoot.tree.getHexRoot() ), + expectedTrees, }; } + // TODO: this method and executeRelayerRefundLeaves have a lot of similarities, but they have some key differences + // in both the events they search for and the comparisons they make. We should try to generalize this in the future, + // but keeping them separate is probably the simplest for the initial implementation. async executeSlowRelayLeaves() { - // TODO: Caller should grab `bundleBlockNumbers` from ProposeRootBundle event, recreate root and execute - // all leaves for root. To locate `rootBundleId`, look up `SpokePool.RelayedRootBundle` events and find event - // with matching roots. + const spokePoolClients = await constructSpokePoolClientsForBlockAndUpdate( + this.chainIdListForBundleEvaluationBlockNumbers, + this.clients, + this.logger, + this.clients.hubPoolClient.latestBlockNumber + ); + + Object.entries(spokePoolClients).forEach(([chainId, client]) => { + let rootBundleRelays = sortEventsDescending(client.getRootBundleRelays()); + + // Only grab the most recent n roots that have been sent if configured to do so. + if (this.spokeRootsLookbackCount !== 0) + rootBundleRelays = rootBundleRelays.slice(0, this.spokeRootsLookbackCount); + + const slowFillsForChain = client.getFills().filter((fill) => fill.isSlowRelay); + for (const rootBundleRelay of rootBundleRelays) { + const matchingRootBundle = this.clients.hubPoolClient.getProposedRootBundles().find((bundle) => { + if (bundle.slowRelayRoot !== rootBundleRelay.slowRelayRoot) return false; + + const followingBlockNumber = + this.clients.hubPoolClient.getFollowingRootBundle(bundle)?.blockNumber || + this.clients.hubPoolClient.latestBlockNumber; + + const leaves = this.clients.hubPoolClient.getExecutedLeavesForRootBundle(bundle, followingBlockNumber); + + // Only use this bundle if it had valid leaves returned (meaning it was at least partially executed). + return leaves.length > 0; + }); + + if (!matchingRootBundle) { + this.logger.warn({ + at: "Dataworke#executeSlowRelayLeaves", + message: "Couldn't find a mainnet root bundle for a slowRelayRoot on L2!", + chainId, + slowRelayRoot: rootBundleRelay.slowRelayRoot, + }); + continue; + } + + const prevRootBundle = this.clients.hubPoolClient.getLatestFullyExecutedRootBundle( + matchingRootBundle.blockNumber + ); + + const blockNumberRanges = matchingRootBundle.bundleEvaluationBlockNumbers.map((endBlock, i) => { + const fromBlock = prevRootBundle?.bundleEvaluationBlockNumbers?.[i] + ? prevRootBundle.bundleEvaluationBlockNumbers[i].toNumber() + 1 + : 0; + return [fromBlock, endBlock.toNumber()]; + }); + + const { tree, leaves } = this.buildSlowRelayRoot(blockNumberRanges, spokePoolClients); + if (tree.getHexRoot() !== rootBundleRelay.slowRelayRoot) { + this.logger.warn({ + at: "Dataworke#executeSlowRelayLeaves", + message: "Constructed a different root for the block range!", + chainId, + publishedSlowRelayRoot: rootBundleRelay.slowRelayRoot, + constructedSlowRelayRoot: tree.getHexRoot(), + }); + continue; + } + + const executableLeaves = leaves.filter((leaf) => { + if (leaf.destinationChainId !== Number(chainId)) return false; + const executedLeaf = slowFillsForChain.find( + (event) => event.originChainId === leaf.originChainId && event.depositId === leaf.depositId + ); + + // Only return true if no leaf was found in the list of executed leaves. + if (executedLeaf) return false; + + const fullFill = client.getFills().find((fill) => { + return ( + fill.depositId === leaf.depositId && + fill.originChainId === leaf.originChainId && + fill.depositor === leaf.depositor && + fill.destinationChainId === leaf.destinationChainId && + fill.destinationToken === leaf.destinationToken && + fill.amount.eq(leaf.amount) && + fill.realizedLpFeePct.eq(leaf.realizedLpFeePct) && + fill.relayerFeePct.eq(leaf.relayerFeePct) && + fill.recipient === leaf.recipient && + fill.totalFilledAmount.eq(fill.amount) // Full fill + ); + }); + + // If no previous full fill was found, we should try to fill. + return !fullFill; + }); + + executableLeaves.forEach((leaf) => { + this.clients.multiCallerClient.enqueueTransaction({ + contract: client.spokePool, + chainId: Number(chainId), + method: "executeSlowRelayLeaf", + args: [ + leaf.depositor, + leaf.recipient, + leaf.destinationToken, + leaf.amount, + leaf.originChainId, + leaf.realizedLpFeePct, + leaf.relayerFeePct, + leaf.depositId, + rootBundleRelay.rootBundleId, + tree.getHexProof(leaf), + ], + message: "Executed SlowRelayLeaf 🌿!", + mrkdwn: `rootBundleId: ${rootBundleRelay.rootBundleId}\nslowRelayRoot: ${rootBundleRelay.slowRelayRoot}\nOrigin chain: ${leaf.originChainId}\nDestination chain:${leaf.destinationChainId}\nDeposit Id: ${leaf.depositId}\n`, // Just a placeholder + }); + }); + } + }); } async executePoolRebalanceLeaves() { - // TODO: + if (!this.clients.hubPoolClient.isUpdated) throw new Error(`HubPoolClient not updated`); + const hubPoolChainId = (await this.clients.hubPoolClient.hubPool.provider.getNetwork()).chainId; + + // Exit early if a bundle is not pending. + if (!this.clients.hubPoolClient.hasPendingProposal()) { + this.logger.debug({ + at: "Dataworker#executePoolRebalanceLeaves", + message: "No pending proposal, nothing to execute", + }); + return; + } + + const pendingRootBundle = this.clients.hubPoolClient.getPendingRootBundleProposal(); + this.logger.debug({ + at: "Dataworker#executePoolRebalanceLeaves", + message: "Found pending proposal", + pendingRootBundle, + }); + + // Exit early if challenge period timestamp has not passed: + if (this.clients.hubPoolClient.currentTime <= pendingRootBundle.challengePeriodEndTimestamp) { + this.logger.debug({ + at: "Dataworke#executePoolRebalanceLeaves", + message: "Challenge period not passed, cannot execute", + }); + return; + } + + const widestPossibleExpectedBlockRange = await PoolRebalanceUtils.getWidestPossibleExpectedBlockRange( + this.chainIdListForBundleEvaluationBlockNumbers, + this.clients, + this.clients.hubPoolClient.latestBlockNumber + ); + const { valid, reason, expectedTrees } = await this.validateRootBundle( + hubPoolChainId, + widestPossibleExpectedBlockRange, + pendingRootBundle + ); + + if (!valid) { + this.logger.error({ + at: "Dataworke#executePoolRebalanceLeaves", + message: "Found invalid proposal after challenge period!", + reason, + }); + return; + } + + if (valid && !expectedTrees) { + this.logger.error({ + at: "Dataworke#executePoolRebalanceLeaves", + message: + "Found valid proposal, but no trees could be generated. This probably means that the proposal was never evaluated during liveness due to an odd block range!", + reason, + }); + return; + } + + const executedLeaves = this.clients.hubPoolClient.getExecutedLeavesForRootBundle( + this.clients.hubPoolClient.getMostRecentProposedRootBundle(this.clients.hubPoolClient.latestBlockNumber), + this.clients.hubPoolClient.latestBlockNumber + ); + + // Filter out previously executed leaves. + const unexecutedLeaves = expectedTrees.poolRebalanceTree.leaves.filter((leaf) => + executedLeaves.every(({ leafId }) => leafId !== leaf.leafId) + ); + + const chainId = (await this.clients.hubPoolClient.hubPool.provider.getNetwork()).chainId; + unexecutedLeaves.forEach((leaf) => { + const proof = expectedTrees.poolRebalanceTree.tree.getHexProof(leaf); + + this.clients.multiCallerClient.enqueueTransaction({ + contract: this.clients.hubPoolClient.hubPool, + chainId, + method: "executeRootBundle", + args: [ + leaf.chainId, + leaf.groupIndex, + leaf.bundleLpFees, + leaf.netSendAmounts, + leaf.runningBalances, + leaf.leafId, + leaf.l1Tokens, + proof, + ], + message: "Executed PoolRebalanceLeaf 🌿!", + mrkdwn: `Root hash: ${expectedTrees.poolRebalanceTree.tree.getHexRoot()}\nLeaf: ${leaf.leafId}`, // Just a placeholder + }); + }); } async executeRelayerRefundLeaves() { - // TODO: + const spokePoolClients = await constructSpokePoolClientsForBlockAndUpdate( + this.chainIdListForBundleEvaluationBlockNumbers, + this.clients, + this.logger, + this.clients.hubPoolClient.latestBlockNumber + ); + + Object.entries(spokePoolClients).forEach(([chainId, client]) => { + let rootBundleRelays = sortEventsDescending(client.getRootBundleRelays()); + + // Only grab the most recent n roots that have been sent if configured to do so. + if (this.spokeRootsLookbackCount !== 0) + rootBundleRelays = rootBundleRelays.slice(0, this.spokeRootsLookbackCount); + + const executedLeavesForChain = client.getRelayerRefundExecutions(); + for (const rootBundleRelay of rootBundleRelays) { + const matchingRootBundle = this.clients.hubPoolClient.getProposedRootBundles().find((bundle) => { + if (bundle.relayerRefundRoot !== rootBundleRelay.relayerRefundRoot) return false; + + const followingBlockNumber = + this.clients.hubPoolClient.getFollowingRootBundle(bundle)?.blockNumber || + this.clients.hubPoolClient.latestBlockNumber; + + const leaves = this.clients.hubPoolClient.getExecutedLeavesForRootBundle(bundle, followingBlockNumber); + + // Only use this bundle if it had valid leaves returned (meaning it was at least partially executed). + return leaves.length > 0; + }); + + if (!matchingRootBundle) { + this.logger.warn({ + at: "Dataworke#executeRelayerRefundLeaves", + message: "Couldn't find a mainnet root bundle for a relayerRefundRoot on L2!", + chainId, + relayerRefundRoot: rootBundleRelay.relayerRefundRoot, + }); + continue; + } + + const prevRootBundle = this.clients.hubPoolClient.getLatestFullyExecutedRootBundle( + matchingRootBundle.blockNumber + ); + + const blockNumberRanges = matchingRootBundle.bundleEvaluationBlockNumbers.map((endBlock, i) => { + const fromBlock = prevRootBundle?.bundleEvaluationBlockNumbers?.[i] + ? prevRootBundle.bundleEvaluationBlockNumbers[i].toNumber() + 1 + : 0; + return [fromBlock, endBlock.toNumber()]; + }); + + const { fillsToRefund, deposits, allValidFills, unfilledDeposits } = this._loadData( + blockNumberRanges, + spokePoolClients + ); + + const endBlockForMainnet = getBlockRangeForChain( + blockNumberRanges, + 1, + this.chainIdListForBundleEvaluationBlockNumbers + )[1]; + + const expectedPoolRebalanceRoot = _buildPoolRebalanceRoot( + endBlockForMainnet, + fillsToRefund, + deposits, + allValidFills, + unfilledDeposits, + this.clients, + this.chainIdListForBundleEvaluationBlockNumbers, + this.maxL1TokenCountOverride, + this.tokenTransferThreshold + ); + + const { tree, leaves } = this.buildRelayerRefundRoot( + blockNumberRanges, + spokePoolClients, + expectedPoolRebalanceRoot.leaves, + expectedPoolRebalanceRoot.runningBalances + ); + + if (tree.getHexRoot() !== rootBundleRelay.relayerRefundRoot) { + this.logger.warn({ + at: "Dataworke#executeRelayerRefundLeaves", + message: "Constructed a different root for the block range!", + chainId, + publishedRelayerRefundRoot: rootBundleRelay.relayerRefundRoot, + constructedRelayerRefundRoot: tree.getHexRoot(), + }); + continue; + } + + const executableLeaves = leaves.filter((leaf) => { + if (leaf.chainId !== Number(chainId)) return false; + const executedLeaf = executedLeavesForChain.find( + (event) => event.rootBundleId === rootBundleRelay.rootBundleId && event.leafId === leaf.leafId + ); + // Only return true if no leaf was found in the list of executed leaves. + return !executedLeaf; + }); + + executableLeaves.forEach((leaf) => { + this.clients.multiCallerClient.enqueueTransaction({ + contract: client.spokePool, + chainId: Number(chainId), + method: "executeRelayerRefundLeaf", + args: [rootBundleRelay.rootBundleId, leaf, tree.getHexProof(leaf)], + message: "Executed RelayerRefundLeaf 🌿!", + mrkdwn: `rootBundleId: ${rootBundleRelay.rootBundleId}\nrelayerRefundRoot: ${rootBundleRelay.relayerRefundRoot}\nLeaf: ${leaf.leafId}`, // Just a placeholder + }); + }); + } + }); } _proposeRootBundle( diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index de89a2356..3cf8ecc76 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -40,6 +40,13 @@ export async function runDataworker(_logger: winston.Logger): Promise { await dataworker.proposeRootBundle(); + await dataworker.executePoolRebalanceLeaves(); + + // Execute slow relays before relayer refunds to give them priority for any L2 funds. + await dataworker.executeSlowRelayLeaves(); + + await dataworker.executeRelayerRefundLeaves(); + await clients.multiCallerClient.executeTransactionQueue(); if (await processEndPollingLoop(logger, "Dataworker", config.pollingDelay)) break; diff --git a/src/interfaces/Common.ts b/src/interfaces/Common.ts index 009f67cbf..974d34702 100644 --- a/src/interfaces/Common.ts +++ b/src/interfaces/Common.ts @@ -1,4 +1,4 @@ -import { BigNumber } from "../utils"; +import { BigNumber, MerkleTree } from "../utils"; export interface SortableEvent { blockNumber: number; @@ -9,3 +9,8 @@ export interface SortableEvent { export interface BigNumberForToken { [l1TokenAddress: string]: BigNumber; } + +export interface TreeData { + tree: MerkleTree; + leaves: T[]; +} diff --git a/src/interfaces/SpokePool.ts b/src/interfaces/SpokePool.ts index 5ae1aae0c..6ed03c472 100644 --- a/src/interfaces/SpokePool.ts +++ b/src/interfaces/SpokePool.ts @@ -61,6 +61,27 @@ export interface SlowFill { recipient: string; } +export interface RootBundleRelay { + rootBundleId: number; + relayerRefundRoot: string; + slowRelayRoot: string; +} + +export interface RootBundleRelayWithBlock extends RootBundleRelay, SortableEvent {} + +export interface RelayerRefundExecution { + amountToReturn: BigNumber; + chainId: number; + refundAmounts: BigNumber[]; + rootBundleId: number; + leafId: number; + l2TokenAddress: string; + refundAddresses: string[]; + caller: string; +} + +export interface RelayerRefundExecutionWithBlock extends RelayerRefundExecution, SortableEvent {} + // Used in pool by spokePool to execute a slow relay. export interface RelayData { depositor: string; diff --git a/src/utils/EventUtils.ts b/src/utils/EventUtils.ts index 6b52d5511..c45610198 100644 --- a/src/utils/EventUtils.ts +++ b/src/utils/EventUtils.ts @@ -15,6 +15,7 @@ export function spreadEvent(event: Event) { if (returnedObject.originChainId) returnedObject.originChainId = Number(returnedObject.originChainId); if (returnedObject.repaymentChainId) returnedObject.repaymentChainId = Number(returnedObject.repaymentChainId); if (returnedObject.l2ChainId) returnedObject.l2ChainId = Number(returnedObject.l2ChainId); + if (returnedObject.rootBundleId) returnedObject.rootBundleId = Number(returnedObject.rootBundleId); return returnedObject; } diff --git a/test/Dataworker.executePoolRebalances.ts b/test/Dataworker.executePoolRebalances.ts new file mode 100644 index 000000000..c378af41b --- /dev/null +++ b/test/Dataworker.executePoolRebalances.ts @@ -0,0 +1,109 @@ +import { buildFillForRepaymentChain } from "./utils"; +import { SignerWithAddress, expect, ethers, Contract, buildDeposit } from "./utils"; +import { HubPoolClient, AcrossConfigStoreClient, MultiCallerClient } from "../src/clients"; +import { amountToDeposit, destinationChainId } from "./constants"; +import { MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF } from "./constants"; +import { DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD } from "./constants"; +import { setupDataworker } from "./fixtures/Dataworker.Fixture"; +import { MAX_UINT_VAL } from "../src/utils"; + +// Tested +import { Dataworker } from "../src/dataworker/Dataworker"; + +let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract; +let l1Token_1: Contract, hubPool: Contract; +let depositor: SignerWithAddress; + +let hubPoolClient: HubPoolClient, configStoreClient: AcrossConfigStoreClient; +let dataworkerInstance: Dataworker, multiCallerClient: MultiCallerClient; + +let updateAllClients: () => Promise; + +describe("Dataworker: Execute pool rebalances", async function () { + beforeEach(async function () { + ({ + hubPool, + spokePool_1, + erc20_1, + spokePool_2, + configStoreClient, + hubPoolClient, + l1Token_1, + depositor, + dataworkerInstance, + multiCallerClient, + updateAllClients, + } = await setupDataworker( + ethers, + MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, + MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, + DEFAULT_POOL_BALANCE_TOKEN_TRANSFER_THRESHOLD, + 0 + )); + }); + it("Simple lifecycle", async function () { + await updateAllClients(); + + // Send a deposit and a fill so that dataworker builds simple roots. + const deposit = await buildDeposit( + configStoreClient, + hubPoolClient, + spokePool_1, + erc20_1, + l1Token_1, + depositor, + destinationChainId, + amountToDeposit + ); + await updateAllClients(); + await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 0.5, destinationChainId); + await updateAllClients(); + + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and check that root bundle is pending: + await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // Should be 2 transactions: 1 for the to chain and 1 for the from chain. + expect(multiCallerClient.transactionCount()).to.equal(2); + await multiCallerClient.executeTransactionQueue(); + + // TEST 3: + // Submit another root bundle proposal and check bundle block range. There should be no leaves in the new range + // yet. In the bundle block range, all chains should have increased their start block, including those without + // pool rebalance leaves because they should use the chain's end block from the latest fully executed proposed + // root bundle, which should be the bundle block in expectedPoolRebalanceRoot2 + 1. + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + expect(multiCallerClient.transactionCount()).to.equal(0); + + // TEST 4: + // Submit another fill and check that dataworker proposes another root: + await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 1, destinationChainId); + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and execute leaves: + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // Should be 1 leaf since this is _only_ a second partial fill repayment and doesn't involve the deposit chain. + expect(multiCallerClient.transactionCount()).to.equal(1); + await multiCallerClient.executeTransactionQueue(); + }); +}); diff --git a/test/Dataworker.executeRelayerRefunds.ts b/test/Dataworker.executeRelayerRefunds.ts new file mode 100644 index 000000000..c8c1d983e --- /dev/null +++ b/test/Dataworker.executeRelayerRefunds.ts @@ -0,0 +1,112 @@ +import { buildFillForRepaymentChain } from "./utils"; +import { SignerWithAddress, expect, ethers, Contract, buildDeposit } from "./utils"; +import { HubPoolClient, AcrossConfigStoreClient, MultiCallerClient } from "../src/clients"; +import { amountToDeposit, destinationChainId } from "./constants"; +import { MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF } from "./constants"; +import { setupDataworker } from "./fixtures/Dataworker.Fixture"; +import { MAX_UINT_VAL } from "../src/utils"; + +// Tested +import { Dataworker } from "../src/dataworker/Dataworker"; + +let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; +let l1Token_1: Contract, hubPool: Contract; +let depositor: SignerWithAddress; + +let hubPoolClient: HubPoolClient, configStoreClient: AcrossConfigStoreClient; +let dataworkerInstance: Dataworker, multiCallerClient: MultiCallerClient; + +let updateAllClients: () => Promise; + +describe("Dataworker: Execute relayer refunds", async function () { + beforeEach(async function () { + ({ + hubPool, + spokePool_1, + erc20_1, + spokePool_2, + erc20_2, + configStoreClient, + hubPoolClient, + l1Token_1, + depositor, + dataworkerInstance, + multiCallerClient, + updateAllClients, + } = await setupDataworker( + ethers, + MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, + MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, + ethers.BigNumber.from(0), + 0 + )); + }); + it("Simple lifecycle", async function () { + await updateAllClients(); + + // Send a deposit and a fill so that dataworker builds simple roots. + const deposit = await buildDeposit( + configStoreClient, + hubPoolClient, + spokePool_1, + erc20_1, + l1Token_1, + depositor, + destinationChainId, + amountToDeposit + ); + await updateAllClients(); + await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 0.5, destinationChainId); + await updateAllClients(); + + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and check that root bundle is pending: + await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute rebalance leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + await multiCallerClient.executeTransactionQueue(); + + // TEST 3: + // Submit another root bundle proposal and check bundle block range. There should be no leaves in the new range + // yet. In the bundle block range, all chains should have increased their start block, including those without + // pool rebalance leaves because they should use the chain's end block from the latest fully executed proposed + // root bundle, which should be the bundle block in expectedPoolRebalanceRoot2 + 1. + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // TEST 4: + // Submit another fill and check that dataworker proposes another root: + await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 1, destinationChainId); + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and execute leaves: + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // Should be 1 leaf since this is _only_ a second partial fill repayment and doesn't involve the deposit chain. + await multiCallerClient.executeTransactionQueue(); + + await updateAllClients(); + await dataworkerInstance.executeRelayerRefundLeaves(); + expect(multiCallerClient.transactionCount()).to.equal(3); + + // Note: we need to manually supply the tokens since the L1 tokens won't be recognized in the spoke pool. + await erc20_2.mint(spokePool_2.address, amountToDeposit); + await multiCallerClient.executeTransactionQueue(); + }); +}); diff --git a/test/Dataworker.executeSlowRelay.ts b/test/Dataworker.executeSlowRelay.ts new file mode 100644 index 000000000..6db0589e9 --- /dev/null +++ b/test/Dataworker.executeSlowRelay.ts @@ -0,0 +1,114 @@ +import { buildFillForRepaymentChain } from "./utils"; +import { SignerWithAddress, expect, ethers, Contract, buildDeposit } from "./utils"; +import { HubPoolClient, AcrossConfigStoreClient, MultiCallerClient } from "../src/clients"; +import { amountToDeposit, destinationChainId } from "./constants"; +import { MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF } from "./constants"; +import { setupDataworker } from "./fixtures/Dataworker.Fixture"; +import { MAX_UINT_VAL } from "../src/utils"; + +// Tested +import { Dataworker } from "../src/dataworker/Dataworker"; + +let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; +let l1Token_1: Contract, hubPool: Contract; +let depositor: SignerWithAddress; + +let hubPoolClient: HubPoolClient, configStoreClient: AcrossConfigStoreClient; +let dataworkerInstance: Dataworker, multiCallerClient: MultiCallerClient; + +let updateAllClients: () => Promise; + +describe("Dataworker: Execute slow relays", async function () { + beforeEach(async function () { + ({ + hubPool, + spokePool_1, + erc20_1, + spokePool_2, + erc20_2, + configStoreClient, + hubPoolClient, + l1Token_1, + depositor, + dataworkerInstance, + multiCallerClient, + updateAllClients, + } = await setupDataworker( + ethers, + MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, + MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, + ethers.BigNumber.from(0), + 0 + )); + }); + it("Simple lifecycle", async function () { + await updateAllClients(); + + // Send a deposit and a fill so that dataworker builds simple roots. + const deposit = await buildDeposit( + configStoreClient, + hubPoolClient, + spokePool_1, + erc20_1, + l1Token_1, + depositor, + destinationChainId, + amountToDeposit + ); + await updateAllClients(); + await buildFillForRepaymentChain(spokePool_2, depositor, deposit, 0.5, destinationChainId); + await updateAllClients(); + + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and check that root bundle is pending: + await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute rebalance leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + await multiCallerClient.executeTransactionQueue(); + + // TEST 3: + // Submit another root bundle proposal and check bundle block range. There should be no leaves in the new range + // yet. In the bundle block range, all chains should have increased their start block, including those without + // pool rebalance leaves because they should use the chain's end block from the latest fully executed proposed + // root bundle, which should be the bundle block in expectedPoolRebalanceRoot2 + 1. + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // TEST 4: + // Submit a new root with no additional actions taken to make sure that this doesn't break anything. + await updateAllClients(); + await dataworkerInstance.proposeRootBundle(); + + // Execute queue and execute leaves: + await multiCallerClient.executeTransactionQueue(); + + // Advance time and execute leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(); + + // Should be 1 leaf since this is _only_ a second partial fill repayment and doesn't involve the deposit chain. + await multiCallerClient.executeTransactionQueue(); + + await updateAllClients(); + await dataworkerInstance.executeSlowRelayLeaves(); + + // There should be one slow relay to execute. + expect(multiCallerClient.transactionCount()).to.equal(1); + + // Note: we need to manually supply the tokens since the L1 tokens won't be recognized in the spoke pool. + // It should only require ~1/2 of the amount because there was a prev fill that provided the other half. + await erc20_2.mint(spokePool_2.address, amountToDeposit.div(2).sub(1)); + await multiCallerClient.executeTransactionQueue(); + }); +}); diff --git a/test/utils/utils.ts b/test/utils/utils.ts index 24419b1ea..db44b72bf 100644 --- a/test/utils/utils.ts +++ b/test/utils/utils.ts @@ -129,7 +129,7 @@ export async function deployAndConfigureHubPool( await utils.getContractFactory("HubPool", signer) ).deploy(lpTokenFactory.address, finderAddress, zeroAddress, timerAddress); - const mockAdapter = await (await utils.getContractFactory("Mock_Adapter", signer)).deploy(); + const mockAdapter = await (await utils.getContractFactory("Ethereum_Adapter", signer)).deploy(); for (const spokePool of spokePools) { await hubPool.setCrossChainContracts(spokePool.l2ChainId, mockAdapter.address, spokePool.spokePool.address);