diff --git a/.github/workflows/docker.yaml b/.github/workflows/docker.yaml index b78fc4861..b5d44671f 100644 --- a/.github/workflows/docker.yaml +++ b/.github/workflows/docker.yaml @@ -38,6 +38,4 @@ jobs: context: ./ file: ./Dockerfile push: true - tags: - - latest - - ${{ steps.docker-image.outputs.image }} + tags: latest,${{ steps.docker-image.outputs.image }} diff --git a/.prettierignore b/.prettierignore index 725b09ed0..71c49694d 100644 --- a/.prettierignore +++ b/.prettierignore @@ -39,4 +39,7 @@ dist/ *.txt # env example files -*.env.example \ No newline at end of file +*.env.example + +# local files +.secret \ No newline at end of file diff --git a/contracts/MockPolygonEvents.sol b/contracts/MockPolygonEvents.sol new file mode 100644 index 000000000..7068081d7 --- /dev/null +++ b/contracts/MockPolygonEvents.sol @@ -0,0 +1,31 @@ +// This file contains contracts that can be used to unit test the src/clients/bridges/ZkSyncAdapter.ts +// code which reads events from zkSync contracts facilitating cross chain transfers. + +pragma solidity ^0.8.0; + +contract Polygon_L1Bridge { + event LockedERC20( + address indexed depositor, + address indexed depositReceiver, + address indexed rootToken, + uint256 amount + ); + + event LockedEther(address indexed depositor, address indexed depositReceiver, uint256 amount); + + function depositFor(address depositor, address depositReceiver, address rootToken, uint256 amount) external { + emit LockedERC20(depositor, depositReceiver, rootToken, amount); + } + + function depositEtherFor(address depositor, address depositReceiver, uint256 amount) external { + emit LockedEther(depositor, depositReceiver, amount); + } +} + +contract Polygon_L2Bridge { + event Transfer(address indexed from, address indexed to, uint256 value); + + function transfer(address from, address to, uint256 value) external { + emit Transfer(from, to, value); + } +} diff --git a/package.json b/package.json index d9d3febf8..29ca59c5a 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "dependencies": { "@across-protocol/constants": "^3.1.14", "@across-protocol/contracts": "^3.0.10", - "@across-protocol/sdk": "^3.1.31", + "@across-protocol/sdk": "^3.1.36", "@arbitrum/sdk": "^3.1.3", "@aws-sdk/client-kms": "^3.592.0", "@aws-sdk/client-s3": "^3.592.0", @@ -30,7 +30,7 @@ "@types/express": "^4.17.21", "@uma/common": "2.33.0", "@uma/logger": "^1.3.0", - "axios": "^1.6.1", + "axios": "^1.7.4", "dotenv": "^16.3.1", "ethers": "^5.7.2", "express": "^4.19.2", diff --git a/scripts/runMainnet.sh b/scripts/runMainnet.sh index 4c17f067d..a65deefc4 100644 --- a/scripts/runMainnet.sh +++ b/scripts/runMainnet.sh @@ -36,6 +36,9 @@ echo "SLACK_CONFIG=$SLACK_CONFIG" >> ${app_dir}/.env echo "All env vars from secrets are set." +# Set the bot identifier +echo "BOT_IDENTIFIER=LISK_ACROSS_RELAYER" >> ${app_dir}/.env + # Simulation mode OFF echo "SEND_RELAYS=true" >> ${app_dir}/.env echo "SEND_REBALANCES=true" >> ${app_dir}/.env diff --git a/scripts/runSepolia.sh b/scripts/runSepolia.sh index 398ad98d0..370993ffc 100644 --- a/scripts/runSepolia.sh +++ b/scripts/runSepolia.sh @@ -30,8 +30,12 @@ echo "RPC_PROVIDER_GELATO_4202=$RPC_PROVIDER_GELATO_4202" >> ${app_dir}/.env echo "All env vars from secrets are set." +# Set the bot identifier +echo "BOT_IDENTIFIER=LISK_ACROSS_RELAYER" >> ${app_dir}/.env + # Simulation mode ON echo "SEND_RELAYS=true" >> ${app_dir}/.env + # RPC provider configuration echo "RPC_PROVIDERS=TENDERLY,GELATO,DRPC" >> ${app_dir}/.env echo "RPC_PROVIDERS_11155111=TENDERLY,DRPC" >> ${app_dir}/.env @@ -42,8 +46,10 @@ echo "RELAYER_IGNORE_LIMITS=true" >> ${app_dir}/.env echo "HUB_CHAIN_ID=11155111" >> ${app_dir}/.env echo "RELAYER_ORIGIN_CHAINS=[11155111,4202]" >> ${app_dir}/.env echo "RELAYER_DESTINATION_CHAINS=[11155111,4202]" >> ${app_dir}/.env + # Redis settings echo "REDIS_URL='redis://127.0.0.1:6379'" >> ${app_dir}/.env + # Supported token settings echo RELAYER_TOKENS=\'[\"0x16B840bA01e2b05fc2268eAf6d18892a11EC29D6\", \"0xaA8E23Fb1079EA71e0a56F48a2aA51851D8433D0\", \"0xfFf9976782d46CC05630D1f6eBAb18b2324d6B14\"]\' >> ${app_dir}/.env echo MIN_DEPOSIT_CONFIRMATIONS=\'{ \"1000000\": { \"919\": 1, \"4202\": 1, \"80002\": 1, \"84532\": 1, \"421614\": 1, \"11155111\": 1, \"11155420\": 1 } }\' >> ${app_dir}/.env diff --git a/scripts/spokepool.ts b/scripts/spokepool.ts index 00b227618..cef9687cd 100644 --- a/scripts/spokepool.ts +++ b/scripts/spokepool.ts @@ -231,7 +231,7 @@ async function deposit(args: Record, signer: Signer): P } async function fillDeposit(args: Record, signer: Signer): Promise { - const { txnHash, depositId: depositIdArg, execute } = args; + const { txnHash, depositId: depositIdArg, execute, slow } = args; const originChainId = Number(args.chainId); if (txnHash === undefined || typeof txnHash !== "string" || txnHash.length != 66 || !txnHash.startsWith("0x")) { @@ -295,7 +295,9 @@ async function fillDeposit(args: Record, sign fromLiteChain: false, // Not relevant toLiteChain: false, // Not relevant }; - const fill = await sdkUtils.populateV3Relay(destSpokePool, deposit, relayer); + const fill = isDefined(slow) + ? await destSpokePool.populateTransaction.requestV3SlowFill(deposit) + : await sdkUtils.populateV3Relay(destSpokePool, deposit, relayer); console.group("Fill Txn Info"); console.log(`to: ${fill.to}`); @@ -459,7 +461,7 @@ function usage(badInput?: string): boolean { const dumpConfigArgs = "--chainId"; const fetchArgs = "--chainId [--depositId | --txnHash ]"; - const fillArgs = "--chainId --txnHash [--depositId ] [--execute]"; + const fillArgs = "--chainId --txnHash [--depositId ] [--slow] [--execute]"; const pad = "deposit".length; usageStr += ` @@ -491,7 +493,7 @@ async function run(argv: string[]): Promise { const fetchDepositOpts = ["chainId", "depositId"]; const opts = { string: ["wallet", ...configOpts, ...depositOpts, ...fetchOpts, ...fillOpts, ...fetchDepositOpts], - boolean: ["decimals", "execute"], // @dev tbd whether this is good UX or not...may need to change. + boolean: ["decimals", "execute", "slow"], // @dev tbd whether this is good UX or not...may need to change. default: { wallet: "secret", decimals: false, diff --git a/src/adapter/BaseChainAdapter.ts b/src/adapter/BaseChainAdapter.ts index f43692ced..4ef899f3b 100644 --- a/src/adapter/BaseChainAdapter.ts +++ b/src/adapter/BaseChainAdapter.ts @@ -219,7 +219,12 @@ export class BaseChainAdapter { const augmentedTxn = { contract, chainId: this.chainId, method, args: [], value, mrkdwn, message }; if (simMode) { const { succeed, reason } = (await this.transactionClient.simulate([augmentedTxn]))[0]; - this.log("Simulation result", { succeed, reason, contract, value }, "debug", "wrapEthIfAboveThreshold"); + this.log( + "Simulation result", + { succeed, reason, contract: contract.address, value }, + "debug", + "wrapEthIfAboveThreshold" + ); return { hash: ZERO_ADDRESS } as TransactionResponse; } else { (await this.transactionClient.submit(this.chainId, [augmentedTxn]))[0]; diff --git a/src/adapter/bridges/LineaBridge.ts b/src/adapter/bridges/LineaBridge.ts index 09f1e1e4d..9c6a27e8e 100644 --- a/src/adapter/bridges/LineaBridge.ts +++ b/src/adapter/bridges/LineaBridge.ts @@ -42,7 +42,7 @@ export class LineaBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.BridgingInitiatedV2(undefined, fromAddress, l1Token), + this.getL1Bridge().filters.BridgingInitiatedV2(undefined, toAddress, l1Token), eventConfig ); return { @@ -60,7 +60,7 @@ export class LineaBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL2Bridge(), - this.getL2Bridge().filters.BridgingFinalizedV2(l1Token, undefined, undefined, fromAddress), + this.getL2Bridge().filters.BridgingFinalizedV2(l1Token, undefined, undefined, toAddress), eventConfig ); // There is no "from" field in this event, so we set it to the L2 token received. diff --git a/src/adapter/bridges/LineaUSDCBridge.ts b/src/adapter/bridges/LineaUSDCBridge.ts index dc339dbe9..c3e03c2eb 100644 --- a/src/adapter/bridges/LineaUSDCBridge.ts +++ b/src/adapter/bridges/LineaUSDCBridge.ts @@ -42,7 +42,7 @@ export class LineaUSDCBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.Deposited(undefined, undefined, fromAddress), + this.getL1Bridge().filters.Deposited(undefined, undefined, toAddress), eventConfig ); return { @@ -58,7 +58,7 @@ export class LineaUSDCBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL2Bridge(), - this.getL2Bridge().filters.ReceivedFromOtherLayer(fromAddress), + this.getL2Bridge().filters.ReceivedFromOtherLayer(toAddress), eventConfig ); // There is no "from" address in this event. diff --git a/src/adapter/bridges/LineaWethBridge.ts b/src/adapter/bridges/LineaWethBridge.ts index 696c46250..d2f837c50 100644 --- a/src/adapter/bridges/LineaWethBridge.ts +++ b/src/adapter/bridges/LineaWethBridge.ts @@ -1,10 +1,22 @@ -import { Contract, BigNumber, paginatedEventQuery, bnZero, Signer, EventSearchConfig, Provider } from "../../utils"; +import { + Contract, + BigNumber, + paginatedEventQuery, + bnZero, + Signer, + EventSearchConfig, + Provider, + getBlockForTimestamp, + BlockFinder, + isDefined, +} from "../../utils"; import { CONTRACT_ADDRESSES } from "../../common"; import { BridgeTransactionDetails, BaseBridgeAdapter, BridgeEvents } from "./BaseBridgeAdapter"; import { processEvent } from "../utils"; export class LineaWethBridge extends BaseBridgeAdapter { protected atomicDepositor: Contract; + protected blockFinder: BlockFinder; constructor( l2chainId: number, @@ -46,9 +58,12 @@ export class LineaWethBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.MessageSent(undefined, fromAddress), + this.getL1Bridge().filters.MessageSent(undefined, toAddress), eventConfig ); + + // @dev There will be a MessageSent to the SpokePool address for each RelayedRootBundle so remove + // those with 0 value. return { [this.resolveL2TokenAddress(l1Token)]: events .map((event) => processEvent(event, "_value", "_to", "_from")) @@ -62,16 +77,33 @@ export class LineaWethBridge extends BaseBridgeAdapter { toAddress: string, eventConfig: EventSearchConfig ): Promise { - // TODO: This can probably be refactored to save an RPC call since this is called in parallel with - // queryL1BridgeInitiationEvents in the BaseChainAdapter class. + const l2Provider = this.getL2Bridge().provider; + + const [fromBlock, toBlock] = await Promise.all([ + l2Provider.getBlock(eventConfig.fromBlock), + l2Provider.getBlock(eventConfig.toBlock), + ]); + + const [l1FromBlock, l1ToBlock] = [ + await getBlockForTimestamp(this.hubChainId, fromBlock.timestamp, this.blockFinder), + await getBlockForTimestamp(this.hubChainId, toBlock.timestamp, this.blockFinder), + ]; + const l1SearchConfig = { + fromBlock: l1FromBlock, + toBlock: l1ToBlock, + }; const initiatedQueryResult = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.MessageSent(undefined, fromAddress), - eventConfig + this.getL1Bridge().filters.MessageSent(undefined, toAddress), + l1SearchConfig ); - // @dev There will be a MessageSent to the SpokePool address for each RelayedRootBundle so remove - // those with 0 value. + // If there are no initiations, then exit early, since there will be no finalized events to match. + // This can happen if the from/toAddress is the hub pool. + if (initiatedQueryResult.length === 0) { + return Promise.resolve({}); + } + const internalMessageHashes = initiatedQueryResult .filter(({ args }) => args._value.gt(0)) .map(({ args }) => args._messageHash); @@ -80,11 +112,26 @@ export class LineaWethBridge extends BaseBridgeAdapter { this.getL2Bridge().filters.MessageClaimed(internalMessageHashes), eventConfig ); - const finalizedHashes = events.map(({ args }) => args._messageHash); + const matchedEvents = events + .map((finalized) => { + const queryEvent = initiatedQueryResult.find( + (initiated) => initiated.args._messageHash === finalized.args._messageHash + ); + // It is possible for a finalized event to be observed without the corresponding initiation event + // when the finalization event approaches the max look back value. In this case, we filter those out. + return isDefined(queryEvent) + ? { + ...processEvent(queryEvent, "_value", "_to", "_from"), + blockNumber: finalized.blockNumber, + transactionIndex: finalized.transactionIndex, + logIndex: finalized.logIndex, + transactionHash: finalized.transactionHash, + } + : undefined; + }) + .filter(isDefined); return { - [this.resolveL2TokenAddress(l1Token)]: initiatedQueryResult - .filter(({ args }) => finalizedHashes.includes(args._messageHash)) - .map((event) => processEvent(event, "_value", "_to", "_from")), + [this.resolveL2TokenAddress(l1Token)]: matchedEvents, }; } } diff --git a/src/adapter/bridges/PolygonERC20Bridge.ts b/src/adapter/bridges/PolygonERC20Bridge.ts index 77d443d12..a7e5c653b 100644 --- a/src/adapter/bridges/PolygonERC20Bridge.ts +++ b/src/adapter/bridges/PolygonERC20Bridge.ts @@ -7,7 +7,6 @@ import { Provider, bnToHex, ZERO_ADDRESS, - getL2TokenAddresses, } from "../../utils"; import { CONTRACT_ADDRESSES } from "../../common"; import { BridgeTransactionDetails, BaseBridgeAdapter, BridgeEvents } from "./BaseBridgeAdapter"; @@ -31,7 +30,6 @@ export class PolygonERC20Bridge extends BaseBridgeAdapter { // TOKEN_SYMBOLS_MAP. This constructor will therefore break if // either the SDK, or the constants dependency in the SDK, is not // up-to-date. - const l2TokenAddresses = getL2TokenAddresses(l1Token); const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId].polygonBridge; const { address: l1GatewayAddress, abi: l1GatewayAbi } = CONTRACT_ADDRESSES[hubChainId].polygonRootChainManager; super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [l1Address]); @@ -41,7 +39,8 @@ export class PolygonERC20Bridge extends BaseBridgeAdapter { // For Polygon, we look for mint events triggered by the L2 token, not the L2 Bridge. const l2Abi = CONTRACT_ADDRESSES[l2chainId].withdrawableErc20.abi; - this.l2Bridge = new Contract(l2TokenAddresses[l2chainId], l2Abi, l2SignerOrProvider); + const l2TokenAddress = this.resolveL2TokenAddress(l1Token); + this.l2Bridge = new Contract(l2TokenAddress, l2Abi, l2SignerOrProvider); } async constructL1ToL2Txn( @@ -65,12 +64,12 @@ export class PolygonERC20Bridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.LockedERC20(undefined, fromAddress, l1Token), + this.getL1Bridge().filters.LockedERC20(undefined, toAddress, l1Token), eventConfig ); return { [this.resolveL2TokenAddress(l1Token)]: events.map((event) => - processEvent(event, "amount", "depositorReceiver", "depositor") + processEvent(event, "amount", "depositReceiver", "depositor") ), }; } @@ -83,7 +82,7 @@ export class PolygonERC20Bridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL2Bridge(), - this.getL2Bridge().filters.Transfer(ZERO_ADDRESS, fromAddress), + this.getL2Bridge().filters.Transfer(ZERO_ADDRESS, toAddress), eventConfig ); return { diff --git a/src/adapter/bridges/PolygonWethBridge.ts b/src/adapter/bridges/PolygonWethBridge.ts index 0c6435b7a..a076ea700 100644 --- a/src/adapter/bridges/PolygonWethBridge.ts +++ b/src/adapter/bridges/PolygonWethBridge.ts @@ -64,7 +64,7 @@ export class PolygonWethBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL1Bridge(), - this.getL1Bridge().filters.LockedEther(undefined, fromAddress), + this.getL1Bridge().filters.LockedEther(undefined, toAddress), eventConfig ); return { @@ -82,7 +82,7 @@ export class PolygonWethBridge extends BaseBridgeAdapter { ): Promise { const events = await paginatedEventQuery( this.getL2Bridge(), - this.getL2Bridge().filters.Transfer(ZERO_ADDRESS, fromAddress), + this.getL2Bridge().filters.Transfer(ZERO_ADDRESS, toAddress), eventConfig ); return { diff --git a/src/clients/BundleDataClient.ts b/src/clients/BundleDataClient.ts deleted file mode 100644 index aeaaa0b80..000000000 --- a/src/clients/BundleDataClient.ts +++ /dev/null @@ -1,1301 +0,0 @@ -import * as _ from "lodash"; -import { - ProposedRootBundle, - SlowFillRequestWithBlock, - SpokePoolClientsByChain, - V3DepositWithBlock, - V3FillWithBlock, - FillType, - FillStatus, -} from "../interfaces"; -import { ConfigStoreClient, SpokePoolClient } from "../clients"; -import { - winston, - BigNumber, - bnZero, - getRefundInformationFromFill, - queryHistoricalDepositForFill, - assign, - assert, - fixedPointAdjustment, - isDefined, - toBN, -} from "../utils"; -import { Clients, INFINITE_FILL_DEADLINE } from "../common"; -import { - getBlockRangeForChain, - getImpliedBundleBlockRanges, - getEndBlockBuffers, - prettyPrintV3SpokePoolEvents, - getRefundsFromBundle, - CombinedRefunds, - _buildPoolRebalanceRoot, -} from "../dataworker/DataworkerUtils"; -import { getWidestPossibleExpectedBlockRange, isChainDisabled } from "../dataworker/PoolRebalanceUtils"; -import { utils } from "@across-protocol/sdk"; -import { - BundleDepositsV3, - BundleExcessSlowFills, - BundleFillsV3, - BundleFillV3, - BundleSlowFills, - ExpiredDepositsToRefundV3, - LoadDataReturnValue, -} from "../interfaces/BundleData"; -import { BundleDataSS } from "../utils/SuperstructUtils"; -import { PoolRebalanceRoot } from "../dataworker/Dataworker"; - -type DataCache = Record>; - -// V3 dictionary helper functions -function updateExpiredDepositsV3(dict: ExpiredDepositsToRefundV3, deposit: V3DepositWithBlock): void { - const { originChainId, inputToken } = deposit; - if (!dict?.[originChainId]?.[inputToken]) { - assign(dict, [originChainId, inputToken], []); - } - dict[originChainId][inputToken].push(deposit); -} - -function updateBundleDepositsV3(dict: BundleDepositsV3, deposit: V3DepositWithBlock): void { - const { originChainId, inputToken } = deposit; - if (!dict?.[originChainId]?.[inputToken]) { - assign(dict, [originChainId, inputToken], []); - } - dict[originChainId][inputToken].push(deposit); -} - -function updateBundleFillsV3( - dict: BundleFillsV3, - fill: V3FillWithBlock, - lpFeePct: BigNumber, - repaymentChainId: number, - repaymentToken: string -): void { - if (!dict?.[repaymentChainId]?.[repaymentToken]) { - assign(dict, [repaymentChainId, repaymentToken], { - fills: [], - totalRefundAmount: bnZero, - realizedLpFees: bnZero, - refunds: {}, - }); - } - - const bundleFill: BundleFillV3 = { ...fill, lpFeePct }; - - // Add all fills, slow and fast, to dictionary. - assign(dict, [repaymentChainId, repaymentToken, "fills"], [bundleFill]); - - // All fills update the bundle LP fees. - const refundObj = dict[repaymentChainId][repaymentToken]; - const realizedLpFee = fill.inputAmount.mul(bundleFill.lpFeePct).div(fixedPointAdjustment); - refundObj.realizedLpFees = refundObj.realizedLpFees ? refundObj.realizedLpFees.add(realizedLpFee) : realizedLpFee; - - // Only fast fills get refunded. - if (!utils.isSlowFill(fill)) { - const refundAmount = fill.inputAmount.mul(fixedPointAdjustment.sub(lpFeePct)).div(fixedPointAdjustment); - refundObj.totalRefundAmount = refundObj.totalRefundAmount - ? refundObj.totalRefundAmount.add(refundAmount) - : refundAmount; - - // Instantiate dictionary if it doesn't exist. - refundObj.refunds ??= {}; - - if (refundObj.refunds[fill.relayer]) { - refundObj.refunds[fill.relayer] = refundObj.refunds[fill.relayer].add(refundAmount); - } else { - refundObj.refunds[fill.relayer] = refundAmount; - } - } -} - -function updateBundleExcessSlowFills( - dict: BundleExcessSlowFills, - deposit: V3DepositWithBlock & { lpFeePct: BigNumber } -): void { - const { destinationChainId, outputToken } = deposit; - if (!dict?.[destinationChainId]?.[outputToken]) { - assign(dict, [destinationChainId, outputToken], []); - } - dict[destinationChainId][outputToken].push(deposit); -} - -function updateBundleSlowFills(dict: BundleSlowFills, deposit: V3DepositWithBlock & { lpFeePct: BigNumber }): void { - const { destinationChainId, outputToken } = deposit; - if (!dict?.[destinationChainId]?.[outputToken]) { - assign(dict, [destinationChainId, outputToken], []); - } - dict[destinationChainId][outputToken].push(deposit); -} - -// @notice Shared client for computing data needed to construct or validate a bundle. -export class BundleDataClient { - private loadDataCache: DataCache = {}; - private arweaveDataCache: Record> = {}; - - private bundleTimestampCache: Record = {}; - - // eslint-disable-next-line no-useless-constructor - constructor( - readonly logger: winston.Logger, - readonly clients: Clients, - readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, - readonly chainIdListForBundleEvaluationBlockNumbers: number[], - readonly blockRangeEndBlockBuffer: { [chainId: number]: number } = {} - ) {} - - // This should be called whenever it's possible that the loadData information for a block range could have changed. - // For instance, if the spoke or hub clients have been updated, it probably makes sense to clear this to be safe. - clearCache(): void { - this.loadDataCache = {}; - } - - private async loadDataFromCache(key: string): Promise { - // Always return a deep cloned copy of object stored in cache. Since JS passes by reference instead of value, we - // want to minimize the risk that the programmer accidentally mutates data in the cache. - return _.cloneDeep(await this.loadDataCache[key]); - } - - getBundleTimestampsFromCache(key: string): undefined | { [chainId: number]: number[] } { - if (this.bundleTimestampCache[key]) { - return _.cloneDeep(this.bundleTimestampCache[key]); - } - return undefined; - } - - setBundleTimestampsInCache(key: string, timestamps: { [chainId: number]: number[] }): void { - this.bundleTimestampCache[key] = timestamps; - } - - private getArweaveClientKey(blockRangesForChains: number[][]): string { - return `bundles-${blockRangesForChains}`; - } - - private async loadPersistedDataFromArweave( - blockRangesForChains: number[][] - ): Promise { - if (!isDefined(this.clients?.arweaveClient)) { - return undefined; - } - const start = performance.now(); - const persistedData = await this.clients.arweaveClient.getByTopic( - this.getArweaveClientKey(blockRangesForChains), - BundleDataSS - ); - // If there is no data or the data is empty, return undefined because we couldn't - // pull info from the Arweave persistence layer. - if (!isDefined(persistedData) || persistedData.length < 1) { - return undefined; - } - - // A converter function to account for the fact that our SuperStruct schema does not support numeric - // keys in records. Fundamentally, this is a limitation of superstruct itself. - const convertTypedStringRecordIntoNumericRecord = ( - data: Record> - ): Record> => - Object.keys(data).reduce((acc, chainId) => { - acc[Number(chainId)] = data[chainId]; - return acc; - }, {} as Record>); - - const data = persistedData[0].data; - const bundleData = { - bundleFillsV3: convertTypedStringRecordIntoNumericRecord(data.bundleFillsV3), - expiredDepositsToRefundV3: convertTypedStringRecordIntoNumericRecord(data.expiredDepositsToRefundV3), - bundleDepositsV3: convertTypedStringRecordIntoNumericRecord(data.bundleDepositsV3), - unexecutableSlowFills: convertTypedStringRecordIntoNumericRecord(data.unexecutableSlowFills), - bundleSlowFillsV3: convertTypedStringRecordIntoNumericRecord(data.bundleSlowFillsV3), - }; - this.logger.debug({ - at: "BundleDataClient#loadPersistedDataFromArweave", - message: `Loaded persisted data from Arweave in ${Math.round(performance.now() - start) / 1000}s.`, - blockRanges: JSON.stringify(blockRangesForChains), - bundleData: prettyPrintV3SpokePoolEvents( - bundleData.bundleDepositsV3, - bundleData.bundleFillsV3, - [], // Invalid fills are not persisted to Arweave. - bundleData.bundleSlowFillsV3, - bundleData.expiredDepositsToRefundV3, - bundleData.unexecutableSlowFills - ), - }); - return bundleData; - } - - // @dev This function should probably be moved to the InventoryClient since it bypasses loadData completely now. - async getPendingRefundsFromValidBundles(): Promise { - const refunds = []; - if (!this.clients.hubPoolClient.isUpdated) { - throw new Error("BundleDataClient::getPendingRefundsFromValidBundles HubPoolClient not updated."); - } - - const bundle = this.clients.hubPoolClient.getLatestFullyExecutedRootBundle( - this.clients.hubPoolClient.latestBlockSearched - ); - if (bundle !== undefined) { - refunds.push(await this.getPendingRefundsFromBundle(bundle)); - } // No more valid bundles in history! - return refunds; - } - - // @dev This function should probably be moved to the InventoryClient since it bypasses loadData completely now. - // Return refunds from input bundle. - async getPendingRefundsFromBundle(bundle: ProposedRootBundle): Promise { - const nextBundleMainnetStartBlock = this.clients.hubPoolClient.getNextBundleStartBlockNumber( - this.chainIdListForBundleEvaluationBlockNumbers, - this.clients.hubPoolClient.latestBlockSearched, - this.clients.hubPoolClient.chainId - ); - const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); - - // Reconstruct latest bundle block range. - const bundleEvaluationBlockRanges = getImpliedBundleBlockRanges( - this.clients.hubPoolClient, - this.clients.configStoreClient, - bundle - ); - let combinedRefunds: CombinedRefunds; - // Here we don't call loadData because our fallback is to approximate refunds if we don't have arweave data, rather - // than use the much slower loadData to compute all refunds. We don't need to consider slow fills or deposit - // expiries here so we can skip some steps. We also don't need to compute LP fees as they should be small enough - // so as not to affect this approximate refund count. - const arweaveData = await this.loadArweaveData(bundleEvaluationBlockRanges); - if (arweaveData === undefined) { - combinedRefunds = this.getApproximateRefundsForBlockRange(chainIds, bundleEvaluationBlockRanges); - } else { - const { bundleFillsV3, expiredDepositsToRefundV3 } = arweaveData; - combinedRefunds = getRefundsFromBundle(bundleFillsV3, expiredDepositsToRefundV3); - // If we don't have a spoke pool client for a chain, then we won't be able to deduct refunds correctly for this - // chain. For most of the pending bundle's liveness period, these past refunds are already executed so this is - // a reasonable assumption. This empty refund chain also matches what the alternative - // `getApproximateRefundsForBlockRange` would return. - Object.keys(combinedRefunds).forEach((chainId) => { - if (this.spokePoolClients[Number(chainId)] === undefined) { - delete combinedRefunds[Number(chainId)]; - } - }); - } - - // The latest proposed bundle's refund leaves might have already been partially or entirely executed. - // We have to deduct the executed amounts from the total refund amounts. - return this.deductExecutedRefunds(combinedRefunds, bundle); - } - - // @dev This helper function should probably be moved to the InventoryClient - getApproximateRefundsForBlockRange(chainIds: number[], blockRanges: number[][]): CombinedRefunds { - const refundsForChain: CombinedRefunds = {}; - for (const chainId of chainIds) { - if (this.spokePoolClients[chainId] === undefined) { - continue; - } - const chainIndex = chainIds.indexOf(chainId); - this.spokePoolClients[chainId] - .getFills() - .filter((fill) => { - if (fill.blockNumber < blockRanges[chainIndex][0] || fill.blockNumber > blockRanges[chainIndex][1]) { - return false; - } - - // If origin spoke pool client isn't defined, we can't validate it. - if (this.spokePoolClients[fill.originChainId] === undefined) { - return false; - } - const matchingDeposit = this.spokePoolClients[fill.originChainId].getDeposit(fill.depositId); - const hasMatchingDeposit = - matchingDeposit !== undefined && - this.getRelayHashFromEvent(fill) === this.getRelayHashFromEvent(matchingDeposit); - return hasMatchingDeposit; - }) - .forEach((fill) => { - const matchingDeposit = this.spokePoolClients[fill.originChainId].getDeposit(fill.depositId); - assert(isDefined(matchingDeposit)); - const { chainToSendRefundTo, repaymentToken } = getRefundInformationFromFill( - fill, - this.clients.hubPoolClient, - blockRanges, - this.chainIdListForBundleEvaluationBlockNumbers, - matchingDeposit.fromLiteChain - ); - // Assume that lp fees are 0 for the sake of speed. In the future we could batch compute - // these or make hardcoded assumptions based on the origin-repayment chain direction. This might result - // in slight over estimations of refunds, but its not clear whether underestimating or overestimating is - // worst from the relayer's perspective. - const { relayer, inputAmount: refundAmount } = fill; - refundsForChain[chainToSendRefundTo] ??= {}; - refundsForChain[chainToSendRefundTo][repaymentToken] ??= {}; - const existingRefundAmount = refundsForChain[chainToSendRefundTo][repaymentToken][relayer] ?? bnZero; - refundsForChain[chainToSendRefundTo][repaymentToken][relayer] = existingRefundAmount.add(refundAmount); - }); - } - return refundsForChain; - } - - getUpcomingDepositAmount(chainId: number, l2Token: string, latestBlockToSearch: number): BigNumber { - if (this.spokePoolClients[chainId] === undefined) { - return toBN(0); - } - return this.spokePoolClients[chainId] - .getDeposits() - .filter((deposit) => deposit.blockNumber > latestBlockToSearch && deposit.inputToken === l2Token) - .reduce((acc, deposit) => { - return acc.add(deposit.inputAmount); - }, toBN(0)); - } - - private async getLatestProposedBundleData(): Promise<{ bundleData: LoadDataReturnValue; blockRanges: number[][] }> { - const hubPoolClient = this.clients.hubPoolClient; - // Determine which bundle we should fetch from arweave, either the pending bundle or the latest - // executed one. Both should have arweave data but if for some reason the arweave data is missing, - // this function will have to compute the bundle data from scratch which will be slow. We have to fallback - // to computing the bundle from scratch since this function needs to return the full bundle data so that - // it can be used to get the running balance proposed using its data. - const bundleBlockRanges = getImpliedBundleBlockRanges( - hubPoolClient, - this.clients.configStoreClient, - hubPoolClient.hasPendingProposal() - ? hubPoolClient.getLatestProposedRootBundle() - : hubPoolClient.getLatestFullyExecutedRootBundle(hubPoolClient.latestBlockSearched) - ); - return { - blockRanges: bundleBlockRanges, - bundleData: await this.loadData( - bundleBlockRanges, - this.spokePoolClients, - true // this bundle data should have been published to arweave - ), - }; - } - - async getLatestPoolRebalanceRoot(): Promise<{ root: PoolRebalanceRoot; blockRanges: number[][] }> { - const { bundleData, blockRanges } = await this.getLatestProposedBundleData(); - const hubPoolClient = this.clients.hubPoolClient; - const root = await _buildPoolRebalanceRoot( - hubPoolClient.latestBlockSearched, - blockRanges[0][1], - bundleData.bundleDepositsV3, - bundleData.bundleFillsV3, - bundleData.bundleSlowFillsV3, - bundleData.unexecutableSlowFills, - bundleData.expiredDepositsToRefundV3, - { - hubPoolClient, - configStoreClient: hubPoolClient.configStoreClient as ConfigStoreClient, - } - ); - return { - root, - blockRanges, - }; - } - - // @dev This function should probably be moved to the InventoryClient since it bypasses loadData completely now. - // Return refunds from the next valid bundle. This will contain any refunds that have been sent but are not included - // in a valid bundle with all of its leaves executed. This contains refunds from: - // - Bundles that passed liveness but have not had all of their pool rebalance leaves executed. - // - Bundles that are pending liveness - // - Fills sent after the pending, but not validated, bundle - async getNextBundleRefunds(): Promise { - const hubPoolClient = this.clients.hubPoolClient; - const nextBundleMainnetStartBlock = hubPoolClient.getNextBundleStartBlockNumber( - this.chainIdListForBundleEvaluationBlockNumbers, - hubPoolClient.latestBlockSearched, - hubPoolClient.chainId - ); - const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(nextBundleMainnetStartBlock); - const combinedRefunds: CombinedRefunds[] = []; - - // @dev: If spoke pool client is undefined for a chain, then the end block will be null or undefined, which - // should be handled gracefully and effectively cause this function to ignore refunds for the chain. - let widestBundleBlockRanges = getWidestPossibleExpectedBlockRange( - chainIds, - this.spokePoolClients, - getEndBlockBuffers(chainIds, this.blockRangeEndBlockBuffer), - this.clients, - this.clients.hubPoolClient.latestBlockSearched, - this.clients.configStoreClient.getEnabledChains(this.clients.hubPoolClient.latestBlockSearched) - ); - // Return block ranges for blocks after _pendingBlockRanges and up to widestBlockRanges. - // If a chain is disabled or doesn't have a spoke pool client, return a range of 0 - function getBlockRangeDelta(_pendingBlockRanges: number[][]): number[][] { - return widestBundleBlockRanges.map((blockRange, index) => { - // If pending block range doesn't have an entry for the widest range, which is possible when a new chain - // is added to the CHAIN_ID_INDICES list, then simply set the initial block range to the widest block range. - // This will produce a block range delta of 0 where the returned range for this chain is [widest[1], widest[1]]. - const initialBlockRange = _pendingBlockRanges[index] ?? blockRange; - // If chain is disabled, return disabled range - if (initialBlockRange[0] === initialBlockRange[1]) { - return initialBlockRange; - } - // If pending bundle end block exceeds widest end block or if widest end block is undefined - // (which is possible if the spoke pool client for the chain is not defined), return an empty range since there are no - // "new" events to consider for this chain. - if (!isDefined(blockRange[1]) || initialBlockRange[1] >= blockRange[1]) { - return [initialBlockRange[1], initialBlockRange[1]]; - } - // If initialBlockRange][0] > widestBlockRange[0], then we'll ignore any blocks - // between initialBlockRange[0] and widestBlockRange[0] (inclusive) for simplicity reasons. In practice - // this should not happen. - return [initialBlockRange[1] + 1, blockRange[1]]; - }); - } - - // If there is a pending bundle that has not been fully executed, then it should have arweave - // data so we can load it from there. - if (hubPoolClient.hasPendingProposal()) { - const pendingBundleBlockRanges = getImpliedBundleBlockRanges( - hubPoolClient, - this.clients.configStoreClient, - hubPoolClient.getLatestProposedRootBundle() - ); - // Similar to getAppoximateRefundsForBlockRange, we'll skip the full bundle reconstruction if the arweave - // data is undefined and use the much faster approximation method which doesn't consider LP fees which is - // ok for this use case. - const arweaveData = await this.loadArweaveData(pendingBundleBlockRanges); - if (arweaveData === undefined) { - combinedRefunds.push(this.getApproximateRefundsForBlockRange(chainIds, pendingBundleBlockRanges)); - } else { - const { bundleFillsV3, expiredDepositsToRefundV3 } = arweaveData; - combinedRefunds.push(getRefundsFromBundle(bundleFillsV3, expiredDepositsToRefundV3)); - } - - // Shorten the widestBundleBlockRanges now to not double count the pending bundle blocks. - widestBundleBlockRanges = getBlockRangeDelta(pendingBundleBlockRanges); - } - - // Next, load all refunds sent after the last bundle proposal. This can be expensive so we'll skip the full - // bundle reconstruction and make some simplifying assumptions: - // - Only look up fills sent after the pending bundle's end blocks - // - Skip LP fee computations and just assume the relayer is being refunded the full deposit.inputAmount - const start = performance.now(); - combinedRefunds.push(this.getApproximateRefundsForBlockRange(chainIds, widestBundleBlockRanges)); - this.logger.debug({ - at: "BundleDataClient#getNextBundleRefunds", - message: `Loading approximate refunds for next bundle in ${Math.round(performance.now() - start) / 1000}s.`, - blockRanges: JSON.stringify(widestBundleBlockRanges), - }); - return combinedRefunds; - } - - // @dev This helper function should probably be moved to the InventoryClient - getExecutedRefunds( - spokePoolClient: SpokePoolClient, - relayerRefundRoot: string - ): { - [tokenAddress: string]: { - [relayer: string]: BigNumber; - }; - } { - if (!isDefined(spokePoolClient)) { - return {}; - } - // @dev Search from right to left since there can be multiple root bundles with the same relayer refund root. - // The caller should take caution if they're trying to use this function to find matching refunds for older - // root bundles as opposed to more recent ones. - const bundle = _.findLast( - spokePoolClient.getRootBundleRelays(), - (bundle) => bundle.relayerRefundRoot === relayerRefundRoot - ); - if (bundle === undefined) { - return {}; - } - - const executedRefundLeaves = spokePoolClient - .getRelayerRefundExecutions() - .filter((leaf) => leaf.rootBundleId === bundle.rootBundleId); - const executedRefunds: { [tokenAddress: string]: { [relayer: string]: BigNumber } } = {}; - for (const refundLeaf of executedRefundLeaves) { - const tokenAddress = refundLeaf.l2TokenAddress; - if (executedRefunds[tokenAddress] === undefined) { - executedRefunds[tokenAddress] = {}; - } - const executedTokenRefunds = executedRefunds[tokenAddress]; - - for (let i = 0; i < refundLeaf.refundAddresses.length; i++) { - const relayer = refundLeaf.refundAddresses[i]; - const refundAmount = refundLeaf.refundAmounts[i]; - if (executedTokenRefunds[relayer] === undefined) { - executedTokenRefunds[relayer] = bnZero; - } - executedTokenRefunds[relayer] = executedTokenRefunds[relayer].add(refundAmount); - } - } - return executedRefunds; - } - - // @dev This helper function should probably be moved to the InventoryClient - private deductExecutedRefunds( - allRefunds: CombinedRefunds, - bundleContainingRefunds: ProposedRootBundle - ): CombinedRefunds { - for (const chainIdStr of Object.keys(allRefunds)) { - const chainId = Number(chainIdStr); - if (!isDefined(this.spokePoolClients[chainId])) { - continue; - } - const executedRefunds = this.getExecutedRefunds( - this.spokePoolClients[chainId], - bundleContainingRefunds.relayerRefundRoot - ); - - for (const tokenAddress of Object.keys(allRefunds[chainId])) { - const refunds = allRefunds[chainId][tokenAddress]; - if (executedRefunds[tokenAddress] === undefined || refunds === undefined) { - continue; - } - - for (const relayer of Object.keys(refunds)) { - const executedAmount = executedRefunds[tokenAddress][relayer]; - if (executedAmount === undefined) { - continue; - } - // Since there should only be a single executed relayer refund leaf for each relayer-token-chain combination, - // we can deduct this refund and mark it as executed if the executed amount is > 0. - refunds[relayer] = bnZero; - } - } - } - return allRefunds; - } - - getRefundsFor(bundleRefunds: CombinedRefunds, relayer: string, chainId: number, token: string): BigNumber { - if (!bundleRefunds[chainId] || !bundleRefunds[chainId][token]) { - return BigNumber.from(0); - } - const allRefunds = bundleRefunds[chainId][token]; - return allRefunds && allRefunds[relayer] ? allRefunds[relayer] : BigNumber.from(0); - } - - getTotalRefund(refunds: CombinedRefunds[], relayer: string, chainId: number, refundToken: string): BigNumber { - return refunds.reduce((totalRefund, refunds) => { - return totalRefund.add(this.getRefundsFor(refunds, relayer, chainId, refundToken)); - }, bnZero); - } - - private async loadArweaveData(blockRangesForChains: number[][]): Promise { - const arweaveKey = this.getArweaveClientKey(blockRangesForChains); - // eslint-disable-next-line @typescript-eslint/no-misused-promises - if (!this.arweaveDataCache[arweaveKey]) { - this.arweaveDataCache[arweaveKey] = this.loadPersistedDataFromArweave(blockRangesForChains); - } - const arweaveData = _.cloneDeep(await this.arweaveDataCache[arweaveKey]); - return arweaveData; - } - - // Common data re-formatting logic shared across all data worker public functions. - // User must pass in spoke pool to search event data against. This allows the user to refund relays and fill deposits - // on deprecated spoke pools. - async loadData( - blockRangesForChains: number[][], - spokePoolClients: SpokePoolClientsByChain, - attemptArweaveLoad = false - ): Promise { - const key = JSON.stringify(blockRangesForChains); - // eslint-disable-next-line @typescript-eslint/no-misused-promises - if (!this.loadDataCache[key]) { - let arweaveData; - if (attemptArweaveLoad) { - arweaveData = await this.loadArweaveData(blockRangesForChains); - } else { - arweaveData = undefined; - } - const data = isDefined(arweaveData) - ? // We can return the data to a Promise to keep the return type consistent. - // Note: this is now a fast operation since we've already loaded the data from Arweave. - Promise.resolve(arweaveData) - : this.loadDataFromScratch(blockRangesForChains, spokePoolClients); - this.loadDataCache[key] = data; - } - - return this.loadDataFromCache(key); - } - - private async loadDataFromScratch( - blockRangesForChains: number[][], - spokePoolClients: SpokePoolClientsByChain - ): Promise { - let start = performance.now(); - const key = JSON.stringify(blockRangesForChains); - - if (!this.clients.configStoreClient.isUpdated) { - throw new Error("ConfigStoreClient not updated"); - } else if (!this.clients.hubPoolClient.isUpdated) { - throw new Error("HubPoolClient not updated"); - } - - const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(blockRangesForChains[0][0]); - - if (blockRangesForChains.length > chainIds.length) { - throw new Error( - `Unexpected block range list length of ${blockRangesForChains.length}, should be <= ${chainIds.length}` - ); - } - - // V3 specific objects: - const bundleDepositsV3: BundleDepositsV3 = {}; // Deposits in bundle block range. - const bundleFillsV3: BundleFillsV3 = {}; // Fills to refund in bundle block range. - const bundleInvalidFillsV3: V3FillWithBlock[] = []; // Fills that are not valid in this bundle. - const bundleSlowFillsV3: BundleSlowFills = {}; // Deposits that we need to send slow fills - // for in this bundle. - const expiredDepositsToRefundV3: ExpiredDepositsToRefundV3 = {}; - // Newly expired deposits in this bundle that need to be refunded. - const unexecutableSlowFills: BundleExcessSlowFills = {}; - // Deposit data for all Slowfills that was included in a previous - // bundle and can no longer be executed because (1) they were replaced with a FastFill in this bundle or - // (2) the fill deadline has passed. We'll need to decrement running balances for these deposits on the - // destination chain where the slow fill would have been executed. - - const _isChainDisabled = (chainId: number): boolean => { - const blockRangeForChain = getBlockRangeForChain(blockRangesForChains, chainId, chainIds); - return isChainDisabled(blockRangeForChain); - }; - - // Infer chain ID's to load from number of block ranges passed in. - const allChainIds = blockRangesForChains - .map((_blockRange, index) => chainIds[index]) - .filter((chainId) => !_isChainDisabled(chainId) && spokePoolClients[chainId] !== undefined); - allChainIds.forEach((chainId) => { - const spokePoolClient = spokePoolClients[chainId]; - if (!spokePoolClient.isUpdated) { - throw new Error(`SpokePoolClient for chain ${chainId} not updated.`); - } - }); - - // If spoke pools are V3 contracts, then we need to compute start and end timestamps for block ranges to - // determine whether fillDeadlines have expired. - // @dev Going to leave this in so we can see impact on run-time in prod. This makes (allChainIds.length * 2) RPC - // calls in parallel. - const _cachedBundleTimestamps = this.getBundleTimestampsFromCache(key); - let bundleBlockTimestamps: { [chainId: string]: number[] } = {}; - if (!_cachedBundleTimestamps) { - bundleBlockTimestamps = await this.getBundleBlockTimestamps(chainIds, blockRangesForChains, spokePoolClients); - this.setBundleTimestampsInCache(key, bundleBlockTimestamps); - this.logger.debug({ - at: "BundleDataClient#loadData", - message: "Bundle block timestamps", - bundleBlockTimestamps, - blockRangesForChains: JSON.stringify(blockRangesForChains), - }); - } else { - bundleBlockTimestamps = _cachedBundleTimestamps; - } - - /** ***************************** - * - * Handle V3 events - * - * *****************************/ - - // The methodology here is roughly as follows - // - Query all deposits from SpokePoolClients - // - If deposit is in origin chain block range, add it to bundleDepositsV3 - // - If deposit is expired or from an older bundle, stash it away as a deposit that may require an expired - // deposit refund. - // - Query fills from SpokePoolClients - // - If fill is in destination chain block range, then validate fill - // - Fill is valid if its RelayData hash is identical to a deposit's relay data hash that we've already seen. - // If we haven't seen a deposit with a matching hash, then we need to query for an older deposit earlier than - // the SpokePoolClient's lookback window via queryHistoricalDepositForFill(). - // - If fill is valid, then add it to bundleFillsV3. If it's a slow fill execution, we won't - // add a relayer refund for it, but all fills accumulate realized LP fees. - // - If fill replaced a slow fill request, then stash it away as one that potentially created an - // unexecutable slow fill. - // - Query slow fills from SpokePoolClients - // - If slow fill is in destination chain block range, then validate slow fill - // - Slow fill is valid if its RelayData hash is identical to a deposit's relay data hash that we've already seen, - // and it does not match with a Fill that we've seen, and its input and output tokens are equivalent, - // and the deposit that is being slow filled has not expired. - // - Note that if we haven't can't match the slow fill with a deposit, then we need to query for an older - // deposit earlier than the SpokePoolClient's lookback window via queryHistoricalDepositForFill(). - // - input and output tokens are considered equivalent if they map to the same L1 token via a PoolRebalanceRoute - // at the deposit.quoteBlockNumber. - // - To validate fills that replaced slow fills, we should check that there is no slow fill request in the - // current destination chain bundle block range with a matching relay hash. Additionally, the - // fast fill replacing a slow fill must have filled a slow-fill eligible deposit meaning that - // its input and output tokens are equivalent. We don't need to check that the slow fill was created - // before the deposit expired by definition because the deposit was fast-filled, meaning that it did not - // expire. - // - To validate deposits in the current bundle block range that expired newly in this destination - // chain's current bundle block range, we only have to check that the deposit was not filled in the current - // destination chain block range. - // - To validate deposits from a prior bundle that expired newly, we need to make sure that the deposit - // was not filled. If we can't find a fill, then we should check its FillStatus on-chain via eth_call. - // This will return either Unfilled, RequestedSlowFill, or Filled. If the deposit is Filled, then - // then the fill happened a long time ago and we should do nothing. If the deposit is Unfilled, then - // we should refund it as an expired deposit. If the deposit is RequestedSlowFill then we need to validate - // that the deposit is eligible for a slow fill (its input and output tokens are equivalent) and that - // the slow fill request was not sent in the current destination chain's bundle block range. - - // Using the above rules, we will create a list of: - // - deposits in the current bundle - // - fast fills to refund in the current bundle - // - fills creating bundle LP fees in the current bundle - // - slow fills to create for the current bundle - // - deposits that expired in the current bundle - - // Use this dictionary to conveniently unite all events with the same relay data hash which will make - // secondary lookups faster. The goal is to lazily fill up this dictionary with all events in the SpokePool - // client's in-memory event cache. - const v3RelayHashes: { - [relayHash: string]: { - // Note: Since there are no partial fills in v3, there should only be one fill per relay hash. - // There should also only be one deposit per relay hash since deposit ID's can't be re-used on the - // same spoke pool. Moreover, the SpokePool blocks multiple slow fill requests, so - // there should also only be one slow fill request per relay hash. - deposit?: V3DepositWithBlock; - fill?: V3FillWithBlock; - slowFillRequest?: SlowFillRequestWithBlock; - }; - } = {}; - - // Process all deposits first and keep track of deposits that may be refunded as an expired deposit: - // - expiredBundleDepositHashes: Deposits sent in this bundle that expired. - const expiredBundleDepositHashes: Set = new Set(); - // - olderDepositHashes: Deposits sent in a prior bundle that newly expired in this bundle - const olderDepositHashes: Set = new Set(); - - let depositCounter = 0; - for (const originChainId of allChainIds) { - const originClient = spokePoolClients[originChainId]; - const originChainBlockRange = getBlockRangeForChain(blockRangesForChains, originChainId, chainIds); - - for (const destinationChainId of allChainIds) { - if (originChainId === destinationChainId) { - continue; - } - - originClient.getDepositsForDestinationChain(destinationChainId).forEach((deposit) => { - depositCounter++; - const relayDataHash = this.getRelayHashFromEvent(deposit); - if (v3RelayHashes[relayDataHash]) { - // If we've seen this deposit before, then skip this deposit. This can happen if our RPC provider - // gives us bad data. - return; - } - // Even if deposit is not in bundle block range, store all deposits we can see in memory in this - // convenient dictionary. - v3RelayHashes[relayDataHash] = { - deposit: deposit, - fill: undefined, - slowFillRequest: undefined, - }; - - // If deposit block is within origin chain bundle block range, then save as bundle deposit. - // If deposit is in bundle and it has expired, additionally save it as an expired deposit. - // If deposit is not in the bundle block range, then save it as an older deposit that - // may have expired. - if (deposit.blockNumber >= originChainBlockRange[0] && deposit.blockNumber <= originChainBlockRange[1]) { - // Deposit is a V3 deposit in this origin chain's bundle block range and is not a duplicate. - updateBundleDepositsV3(bundleDepositsV3, deposit); - // We don't check that fillDeadline >= bundleBlockTimestamps[destinationChainId][0] because - // that would eliminate any deposits in this bundle with a very low fillDeadline like equal to 0 - // for example. Those should be impossible to create but technically should be included in this - // bundle of refunded deposits. - if (deposit.fillDeadline < bundleBlockTimestamps[destinationChainId][1]) { - expiredBundleDepositHashes.add(relayDataHash); - } - } else if (deposit.blockNumber < originChainBlockRange[0]) { - olderDepositHashes.add(relayDataHash); - } - }); - } - } - this.logger.debug({ - at: "BundleDataClient#loadData", - message: `Processed ${depositCounter} deposits in ${performance.now() - start}ms.`, - }); - start = performance.now(); - - // Process fills now that we've populated relay hash dictionary with deposits: - const validatedBundleV3Fills: (V3FillWithBlock & { quoteTimestamp: number })[] = []; - const validatedBundleSlowFills: V3DepositWithBlock[] = []; - const validatedBundleUnexecutableSlowFills: V3DepositWithBlock[] = []; - let fillCounter = 0; - for (const originChainId of allChainIds) { - const originClient = spokePoolClients[originChainId]; - for (const destinationChainId of allChainIds) { - if (originChainId === destinationChainId) { - continue; - } - - const destinationClient = spokePoolClients[destinationChainId]; - const destinationChainBlockRange = getBlockRangeForChain(blockRangesForChains, destinationChainId, chainIds); - - // Keep track of fast fills that replaced slow fills, which we'll use to create "unexecutable" slow fills - // if the slow fill request was sent in a prior bundle. - const fastFillsReplacingSlowFills: string[] = []; - await utils.forEachAsync( - destinationClient - .getFillsForOriginChain(originChainId) - .filter((fill) => fill.blockNumber <= destinationChainBlockRange[1]), - async (fill) => { - const relayDataHash = this.getRelayHashFromEvent(fill); - fillCounter++; - - if (v3RelayHashes[relayDataHash]) { - if (!v3RelayHashes[relayDataHash].fill) { - assert(v3RelayHashes[relayDataHash].deposit, "Deposit should exist in relay hash dictionary."); - // At this point, the v3RelayHashes entry already existed meaning that there is a matching deposit, - // so this fill is validated. - v3RelayHashes[relayDataHash].fill = fill; - if (fill.blockNumber >= destinationChainBlockRange[0]) { - validatedBundleV3Fills.push({ - ...fill, - quoteTimestamp: v3RelayHashes[relayDataHash].deposit.quoteTimestamp, - }); - // If fill replaced a slow fill request, then mark it as one that might have created an - // unexecutable slow fill. We can't know for sure until we check the slow fill request - // events. - // slow fill requests for deposits from or to lite chains are considered invalid - if ( - fill.relayExecutionInfo.fillType === FillType.ReplacedSlowFill && - !v3RelayHashes[relayDataHash].deposit.fromLiteChain && - !v3RelayHashes[relayDataHash].deposit.toLiteChain - ) { - fastFillsReplacingSlowFills.push(relayDataHash); - } - } - } - return; - } - - // At this point, there is no relay hash dictionary entry for this fill, so we need to - // instantiate the entry. - v3RelayHashes[relayDataHash] = { - deposit: undefined, - fill: fill, - slowFillRequest: undefined, - }; - - // TODO: We might be able to remove the following historical query once we deprecate the deposit() - // function since there won't be any old, unexpired deposits anymore assuming the spoke pool client - // lookbacks have been validated, which they should be before we run this function. - - // Since there was no deposit matching the relay hash, we need to do a historical query for an - // older deposit in case the spoke pool client's lookback isn't old enough to find the matching deposit. - // We can skip this step if the fill's fill deadline is not infinite, because we can assume that the - // spoke pool clients have loaded deposits old enough to cover all fills with a non-infinite fill deadline. - if (fill.blockNumber >= destinationChainBlockRange[0]) { - // Fill has a non-infinite expiry, and we can assume our spoke pool clients have old enough deposits - // to conclude that this fill is invalid if we haven't found a matching deposit in memory, so - // skip the historical query. - if (!INFINITE_FILL_DEADLINE.eq(fill.fillDeadline)) { - bundleInvalidFillsV3.push(fill); - return; - } - const historicalDeposit = await queryHistoricalDepositForFill(originClient, fill); - if (!historicalDeposit.found) { - bundleInvalidFillsV3.push(fill); - } else { - const matchedDeposit = historicalDeposit.deposit; - // @dev Since queryHistoricalDepositForFill validates the fill by checking individual - // object property values against the deposit's, we - // sanity check it here by comparing the full relay hashes. If there's an error here then the - // historical deposit query is not working as expected. - assert(this.getRelayHashFromEvent(matchedDeposit) === relayDataHash); - validatedBundleV3Fills.push({ - ...fill, - quoteTimestamp: matchedDeposit.quoteTimestamp, - }); - v3RelayHashes[relayDataHash].deposit = matchedDeposit; - // slow fill requests for deposits from or to lite chains are considered invalid - if ( - fill.relayExecutionInfo.fillType === FillType.ReplacedSlowFill && - !matchedDeposit.fromLiteChain && - !matchedDeposit.toLiteChain - ) { - fastFillsReplacingSlowFills.push(relayDataHash); - } - } - } - } - ); - - await utils.forEachAsync( - destinationClient - .getSlowFillRequestsForOriginChain(originChainId) - .filter((request) => request.blockNumber <= destinationChainBlockRange[1]), - async (slowFillRequest: SlowFillRequestWithBlock) => { - const relayDataHash = this.getRelayHashFromEvent(slowFillRequest); - - if (v3RelayHashes[relayDataHash]) { - if (!v3RelayHashes[relayDataHash].slowFillRequest) { - // At this point, the v3RelayHashes entry already existed meaning that there is either a matching - // fill or deposit. - v3RelayHashes[relayDataHash].slowFillRequest = slowFillRequest; - if (v3RelayHashes[relayDataHash].fill) { - // If there is a fill matching the relay hash, then this slow fill request can't be used - // to create a slow fill for a filled deposit. - return; - } - assert(v3RelayHashes[relayDataHash].deposit, "Deposit should exist in relay hash dictionary."); - const matchedDeposit = v3RelayHashes[relayDataHash].deposit; - - // Input and Output tokens must be equivalent on the deposit for this to be slow filled. - if ( - !this.clients.hubPoolClient.areTokensEquivalent( - matchedDeposit.inputToken, - matchedDeposit.originChainId, - matchedDeposit.outputToken, - matchedDeposit.destinationChainId, - matchedDeposit.quoteBlockNumber - ) - ) { - return; - } - - // slow fill requests for deposits from or to lite chains are considered invalid - if ( - v3RelayHashes[relayDataHash].deposit.fromLiteChain || - v3RelayHashes[relayDataHash].deposit.toLiteChain - ) { - return; - } - - // If there is no fill matching the relay hash, then this might be a valid slow fill request - // that we should produce a slow fill leaf for. Check if the slow fill request is in the - // destination chain block range and that the underlying deposit has not expired yet. - if ( - slowFillRequest.blockNumber >= destinationChainBlockRange[0] && - // Deposit must not have expired in this bundle. - slowFillRequest.fillDeadline >= bundleBlockTimestamps[destinationChainId][1] - ) { - // At this point, the v3RelayHashes entry already existed meaning that there is a matching deposit, - // so this slow fill request relay data is correct. - validatedBundleSlowFills.push(matchedDeposit); - } - } - return; - } - - // Instantiate dictionary if there is neither a deposit nor fill matching it. - v3RelayHashes[relayDataHash] = { - deposit: undefined, - fill: undefined, - slowFillRequest: slowFillRequest, - }; - - // TODO: We might be able to remove the following historical query once we deprecate the deposit() - // function since there won't be any old, unexpired deposits anymore assuming the spoke pool client - // lookbacks have been validated, which they should be before we run this function. - - // Since there was no deposit matching the relay hash, we need to do a historical query for an - // older deposit in case the spoke pool client's lookback isn't old enough to find the matching deposit. - // We can skip this step if the deposit's fill deadline is not infinite, because we can assume that the - // spoke pool clients have loaded deposits old enough to cover all fills with a non-infinite fill deadline. - if ( - INFINITE_FILL_DEADLINE.eq(slowFillRequest.fillDeadline) && - slowFillRequest.blockNumber >= destinationChainBlockRange[0] - ) { - const historicalDeposit = await queryHistoricalDepositForFill(originClient, slowFillRequest); - if (!historicalDeposit.found) { - // TODO: Invalid slow fill request. Maybe worth logging. - return; - } - const matchedDeposit: V3DepositWithBlock = historicalDeposit.deposit; - // @dev Since queryHistoricalDepositForFill validates the slow fill request by checking individual - // object property values against the deposit's, we - // sanity check it here by comparing the full relay hashes. If there's an error here then the - // historical deposit query is not working as expected. - assert(this.getRelayHashFromEvent(matchedDeposit) === relayDataHash); - - // slow fill requests for deposits from or to lite chains are considered invalid - if (matchedDeposit.fromLiteChain || matchedDeposit.toLiteChain) { - return; - } - - v3RelayHashes[relayDataHash].deposit = matchedDeposit; - - // Note: we don't need to query for a historical fill at this point because a fill - // cannot precede a slow fill request and if the fill came after the slow fill request, - // we would have seen it already because we would have processed it in the loop above. - if ( - // Input and Output tokens must be equivalent on the deposit for this to be slow filled. - !this.clients.hubPoolClient.areTokensEquivalent( - matchedDeposit.inputToken, - matchedDeposit.originChainId, - matchedDeposit.outputToken, - matchedDeposit.destinationChainId, - matchedDeposit.quoteBlockNumber - ) || - // Deposit must not have expired in this bundle. - slowFillRequest.fillDeadline < bundleBlockTimestamps[destinationChainId][1] - ) { - // TODO: Invalid slow fill request. Maybe worth logging. - return; - } - validatedBundleSlowFills.push(matchedDeposit); - } - } - ); - - // For all fills that came after a slow fill request, we can now check if the slow fill request - // was a valid one and whether it was created in a previous bundle. If so, then it created a slow fill - // leaf that is now unexecutable. - fastFillsReplacingSlowFills.forEach((relayDataHash) => { - const { deposit, slowFillRequest, fill } = v3RelayHashes[relayDataHash]; - assert( - fill.relayExecutionInfo.fillType === FillType.ReplacedSlowFill, - "Fill type should be ReplacedSlowFill." - ); - // We should never push fast fills involving lite chains here because slow fill requests for them are invalid: - assert( - !deposit.fromLiteChain && !deposit.toLiteChain, - "fastFillsReplacingSlowFills should not contain lite chain deposits" - ); - const destinationBlockRange = getBlockRangeForChain(blockRangesForChains, destinationChainId, chainIds); - if ( - // If the slow fill request that was replaced by this fill was in an older bundle, then we don't - // need to check if the slow fill request was valid since we can assume all bundles in the past - // were validated. However, we might as well double check. - this.clients.hubPoolClient.areTokensEquivalent( - deposit.inputToken, - deposit.originChainId, - deposit.outputToken, - deposit.destinationChainId, - deposit.quoteBlockNumber - ) && - // If there is a slow fill request in this bundle that matches the relay hash, then there was no slow fill - // created that would be considered excess. - (!slowFillRequest || slowFillRequest.blockNumber < destinationBlockRange[0]) - ) { - validatedBundleUnexecutableSlowFills.push(deposit); - } - }); - } - } - this.logger.debug({ - at: "BundleDataClient#loadData", - message: `Processed ${fillCounter} fills in ${performance.now() - start}ms.`, - }); - start = performance.now(); - - // Go through expired deposits in this bundle and now prune those that we have seen a fill for to construct - // the list of expired deposits we need to refund in this bundle. - expiredBundleDepositHashes.forEach((relayDataHash) => { - const { deposit, fill } = v3RelayHashes[relayDataHash]; - assert(deposit, "Deposit should exist in relay hash dictionary."); - if (!fill) { - updateExpiredDepositsV3(expiredDepositsToRefundV3, deposit); - } - }); - - // For all deposits older than this bundle, we need to check if they expired in this bundle and if they did, - // whether there was a slow fill created for it in a previous bundle that is now unexecutable and replaced - // by a new expired deposit refund. - await utils.forEachAsync([...olderDepositHashes], async (relayDataHash) => { - const { deposit, slowFillRequest, fill } = v3RelayHashes[relayDataHash]; - assert(deposit, "Deposit should exist in relay hash dictionary."); - const { destinationChainId } = deposit; - const destinationBlockRange = getBlockRangeForChain(blockRangesForChains, destinationChainId, chainIds); - - // Only look for deposits that were mined before this bundle and that are newly expired. - // If the fill deadline is lower than the bundle start block on the destination chain, then - // we should assume it was marked "newly expired" and refunded in a previous bundle. - if ( - // If there is a valid fill that we saw matching this deposit, then it does not need a refund. - !fill && - deposit.fillDeadline < bundleBlockTimestamps[destinationChainId][1] && - deposit.fillDeadline >= bundleBlockTimestamps[destinationChainId][0] && - spokePoolClients[destinationChainId] !== undefined - ) { - // If we haven't seen a fill matching this deposit, then we need to rule out that it was filled a long time ago - // by checkings its on-chain fill status. - const fillStatus = await utils.relayFillStatus( - spokePoolClients[destinationChainId].spokePool, - deposit, - // We can assume that in production - // the block ranges passed into this function would never contain blocks where the spoke pool client - // hasn't queried. This is because this function will usually be called - // in production with block ranges that were validated by - // DataworkerUtils.blockRangesAreInvalidForSpokeClients - Math.min(destinationBlockRange[1], spokePoolClients[destinationChainId].latestBlockSearched), - destinationChainId - ); - - // If there is no matching fill and the deposit expired in this bundle and the fill status on-chain is not - // Filled, then we can to refund it as an expired deposit. - if (fillStatus !== FillStatus.Filled) { - updateExpiredDepositsV3(expiredDepositsToRefundV3, deposit); - } - // If fill status is RequestedSlowFill, then we might need to mark down an unexecutable - // slow fill that we're going to replace with an expired deposit refund. - // If deposit cannot be slow filled, then exit early. - // slow fill requests for deposits from or to lite chains are considered invalid - if (fillStatus !== FillStatus.RequestedSlowFill || deposit.fromLiteChain || deposit.toLiteChain) { - return; - } - // Now, check if there was a slow fill created for this deposit in a previous bundle which would now be - // unexecutable. Mark this deposit as having created an unexecutable slow fill if there is no matching - // slow fill request or the matching slow fill request took place in a previous bundle. - - // If there is a slow fill request in this bundle, then the expired deposit refund will supercede - // the slow fill request. If there is no slow fill request seen or its older than this bundle, then we can - // assume a slow fill leaf was created for it because its tokens are equivalent. The slow fill request was - // also sent before the fill deadline expired since we checked that above. - if ( - // Since this deposit was requested for a slow fill in an older bundle at this point, we don't - // technically need to check if the slow fill request was valid since we can assume all bundles in the past - // were validated. However, we might as well double check. - this.clients.hubPoolClient.areTokensEquivalent( - deposit.inputToken, - deposit.originChainId, - deposit.outputToken, - deposit.destinationChainId, - deposit.quoteBlockNumber - ) && - (!slowFillRequest || slowFillRequest.blockNumber < destinationBlockRange[0]) - ) { - validatedBundleUnexecutableSlowFills.push(deposit); - } - } - }); - - // Batch compute V3 lp fees. - start = performance.now(); - const promises = [ - validatedBundleV3Fills.length > 0 - ? this.clients.hubPoolClient.batchComputeRealizedLpFeePct( - validatedBundleV3Fills.map((fill) => { - const matchedDeposit = v3RelayHashes[this.getRelayHashFromEvent(fill)].deposit; - assert(isDefined(matchedDeposit)); - const { chainToSendRefundTo: paymentChainId } = getRefundInformationFromFill( - fill, - this.clients.hubPoolClient, - blockRangesForChains, - chainIds, - matchedDeposit.fromLiteChain - ); - return { - ...fill, - paymentChainId, - }; - }) - ) - : [], - validatedBundleSlowFills.length > 0 - ? this.clients.hubPoolClient.batchComputeRealizedLpFeePct( - validatedBundleSlowFills.map((deposit) => { - return { - ...deposit, - paymentChainId: deposit.destinationChainId, - }; - }) - ) - : [], - validatedBundleUnexecutableSlowFills.length > 0 - ? this.clients.hubPoolClient.batchComputeRealizedLpFeePct( - validatedBundleUnexecutableSlowFills.map((deposit) => { - return { - ...deposit, - paymentChainId: deposit.destinationChainId, - }; - }) - ) - : [], - ]; - const [v3FillLpFees, v3SlowFillLpFees, v3UnexecutableSlowFillLpFees] = await Promise.all(promises); - this.logger.debug({ - at: "BundleDataClient#loadData", - message: `Computed batch async LP fees in ${performance.now() - start}ms.`, - }); - v3FillLpFees.forEach(({ realizedLpFeePct }, idx) => { - const fill = validatedBundleV3Fills[idx]; - const associatedDeposit = v3RelayHashes[this.getRelayHashFromEvent(fill)].deposit; - assert(isDefined(associatedDeposit)); - const { chainToSendRefundTo, repaymentToken } = getRefundInformationFromFill( - fill, - this.clients.hubPoolClient, - blockRangesForChains, - chainIds, - associatedDeposit.fromLiteChain - ); - updateBundleFillsV3(bundleFillsV3, fill, realizedLpFeePct, chainToSendRefundTo, repaymentToken); - }); - v3SlowFillLpFees.forEach(({ realizedLpFeePct: lpFeePct }, idx) => { - const deposit = validatedBundleSlowFills[idx]; - updateBundleSlowFills(bundleSlowFillsV3, { ...deposit, lpFeePct }); - }); - v3UnexecutableSlowFillLpFees.forEach(({ realizedLpFeePct: lpFeePct }, idx) => { - const deposit = validatedBundleUnexecutableSlowFills[idx]; - updateBundleExcessSlowFills(unexecutableSlowFills, { ...deposit, lpFeePct }); - }); - - const v3SpokeEventsReadable = prettyPrintV3SpokePoolEvents( - bundleDepositsV3, - bundleFillsV3, - bundleInvalidFillsV3, - bundleSlowFillsV3, - expiredDepositsToRefundV3, - unexecutableSlowFills - ); - - if (bundleInvalidFillsV3.length > 0) { - this.logger.debug({ - at: "BundleDataClient#loadData", - message: "Finished loading V3 spoke pool data and found some invalid V3 fills in range", - blockRangesForChains, - bundleInvalidFillsV3, - }); - } - - this.logger.debug({ - at: "BundleDataClient#loadDataFromScratch", - message: `Computed bundle data in ${Math.round(performance.now() - start) / 1000}s.`, - blockRangesForChains: JSON.stringify(blockRangesForChains), - v3SpokeEventsReadable, - }); - return { - bundleDepositsV3, - expiredDepositsToRefundV3, - bundleFillsV3, - unexecutableSlowFills, - bundleSlowFillsV3, - }; - } - - // Internal function to uniquely identify a bridge event. This is preferred over `SDK.getRelayDataHash` which returns - // keccak256 hash of the relay data, which can be used as input into the on-chain `fillStatuses()` function in the - // spoke pool contract. However, this internal function is used to uniquely identify a bridging event - // for speed since its easier to build a string from the event data than to hash it. - private getRelayHashFromEvent(event: V3DepositWithBlock | V3FillWithBlock | SlowFillRequestWithBlock): string { - return `${event.depositor}-${event.recipient}-${event.exclusiveRelayer}-${event.inputToken}-${event.outputToken}-${event.inputAmount}-${event.outputAmount}-${event.originChainId}-${event.depositId}-${event.fillDeadline}-${event.exclusivityDeadline}-${event.message}-${event.destinationChainId}`; - } - - async getBundleBlockTimestamps( - chainIds: number[], - blockRangesForChains: number[][], - spokePoolClients: SpokePoolClientsByChain - ): Promise<{ [chainId: string]: number[] }> { - return Object.fromEntries( - ( - await utils.mapAsync(chainIds, async (chainId, index) => { - const blockRangeForChain = blockRangesForChains[index]; - if (!isDefined(blockRangeForChain) || isChainDisabled(blockRangeForChain)) { - return; - } - const [_startBlockForChain, _endBlockForChain] = blockRangeForChain; - const spokePoolClient = spokePoolClients[chainId]; - - // Relayer instances using the BundleDataClient for repayment estimates may only relay on a subset of chains. - if (!isDefined(spokePoolClient)) { - return; - } - - // We can assume that in production the block ranges passed into this function would never - // contain blocks where the spoke pool client hasn't queried. This is because this function - // will usually be called in production with block ranges that were validated by - // DataworkerUtils.blockRangesAreInvalidForSpokeClients. - const startBlockForChain = Math.min(_startBlockForChain, spokePoolClient.latestBlockSearched); - const endBlockForChain = Math.min(_endBlockForChain, spokePoolClient.latestBlockSearched); - const [startTime, endTime] = [ - Number((await spokePoolClient.spokePool.provider.getBlock(startBlockForChain)).timestamp), - Number((await spokePoolClient.spokePool.provider.getBlock(endBlockForChain)).timestamp), - ]; - // Sanity checks: - assert(endTime >= startTime, "End time should be greater than start time."); - assert(startTime > 0, "Start time should be greater than 0."); - return [chainId, [startTime, endTime]]; - }) - ).filter(isDefined) - ); - } -} diff --git a/src/clients/InventoryClient.ts b/src/clients/InventoryClient.ts index f81a218e7..8b83b28b6 100644 --- a/src/clients/InventoryClient.ts +++ b/src/clients/InventoryClient.ts @@ -27,7 +27,7 @@ import { getUsdcSymbol, } from "../utils"; import { HubPoolClient, TokenClient, BundleDataClient } from "."; -import { V3Deposit } from "../interfaces"; +import { Deposit } from "../interfaces"; import { InventoryConfig, isAliasConfig, TokenBalanceConfig } from "../interfaces/InventoryManagement"; import lodash from "lodash"; import { SLOW_WITHDRAWAL_CHAINS } from "../common"; @@ -340,10 +340,10 @@ export class InventoryClient { * so that it can batch compute LP fees for all possible repayment chains. By locating this function * here it ensures that the relayer and the inventory client are in sync as to which chains are possible * repayment chains for a given deposit. - * @param deposit V3Deposit + * @param deposit Deposit * @returns list of chain IDs that are possible repayment chains for the deposit. */ - getPossibleRepaymentChainIds(deposit: V3Deposit): number[] { + getPossibleRepaymentChainIds(deposit: Deposit): number[] { // Destination and Origin chain are always included in the repayment chain list. const { originChainId, destinationChainId, inputToken } = deposit; const chainIds = [originChainId, destinationChainId]; @@ -364,7 +364,7 @@ export class InventoryClient { * @returns boolean True if output and input tokens are equivalent or if input token is USDC and output token * is Bridged USDC. */ - validateOutputToken(deposit: V3Deposit): boolean { + validateOutputToken(deposit: Deposit): boolean { const { inputToken, outputToken, originChainId, destinationChainId } = deposit; // Return true if input and output tokens are mapped to the same L1 token via PoolRebalanceRoutes @@ -411,7 +411,7 @@ export class InventoryClient { * @returns list of chain IDs that are possible repayment chains for the deposit, sorted from highest * to lowest priority. */ - async determineRefundChainId(deposit: V3Deposit, l1Token?: string): Promise { + async determineRefundChainId(deposit: Deposit, l1Token?: string): Promise { const { originChainId, destinationChainId, inputToken, outputToken, inputAmount } = deposit; const hubChainId = this.hubPoolClient.chainId; diff --git a/src/clients/ProfitClient.ts b/src/clients/ProfitClient.ts index 7a35dec3a..70a6be144 100644 --- a/src/clients/ProfitClient.ts +++ b/src/clients/ProfitClient.ts @@ -29,14 +29,7 @@ import { TOKEN_EQUIVALENCE_REMAPPING, ZERO_ADDRESS, } from "../utils"; -import { - Deposit, - DepositWithBlock, - L1Token, - SpokePoolClientsByChain, - V3Deposit, - V3DepositWithBlock, -} from "../interfaces"; +import { Deposit, DepositWithBlock, L1Token, SpokePoolClientsByChain } from "../interfaces"; import { HubPoolClient } from "."; type TransactionCostEstimate = sdkUtils.TransactionCostEstimate; @@ -207,7 +200,7 @@ export class ProfitClient { return price; } - private async _getTotalGasCost(deposit: V3Deposit, relayer: string): Promise { + private async _getTotalGasCost(deposit: Deposit, relayer: string): Promise { try { return await this.relayerFeeQueries[deposit.destinationChainId].getGasCosts(deposit, relayer); } catch (err) { @@ -223,7 +216,7 @@ export class ProfitClient { } } - async getTotalGasCost(deposit: V3Deposit): Promise { + async getTotalGasCost(deposit: Deposit): Promise { const { destinationChainId: chainId } = deposit; // If there's no attached message, gas consumption from previous fills can be used in most cases. @@ -237,7 +230,7 @@ export class ProfitClient { // Estimate the gas cost of filling this relay. async estimateFillCost( - deposit: V3Deposit + deposit: Deposit ): Promise> { const { destinationChainId: chainId } = deposit; @@ -312,13 +305,13 @@ export class ProfitClient { } /** - * @param deposit V3Deposit object. + * @param deposit Deposit object. * @param lpFeePct Predetermined LP fee as a multiplier of the deposit inputAmount. * @param minRelayerFeePct Relayer minimum fee requirements. * @returns FillProfit object detailing the profitability breakdown. */ async calculateFillProfitability( - deposit: V3Deposit, + deposit: Deposit, lpFeePct: BigNumber, minRelayerFeePct: BigNumber ): Promise { @@ -426,7 +419,7 @@ export class ProfitClient { } async getFillProfitability( - deposit: V3Deposit, + deposit: Deposit, lpFeePct: BigNumber, l1Token: L1Token, repaymentChainId: number @@ -467,7 +460,7 @@ export class ProfitClient { } async isFillProfitable( - deposit: V3Deposit, + deposit: Deposit, lpFeePct: BigNumber, l1Token: L1Token, repaymentChainId: number @@ -501,7 +494,7 @@ export class ProfitClient { } captureUnprofitableFill( - deposit: V3DepositWithBlock, + deposit: DepositWithBlock, lpFeePct: BigNumber, relayerFeePct: BigNumber, gasCost: BigNumber diff --git a/src/clients/SpokePoolClient.ts b/src/clients/SpokePoolClient.ts index da7f0bdb9..e34ae24a0 100644 --- a/src/clients/SpokePoolClient.ts +++ b/src/clients/SpokePoolClient.ts @@ -260,6 +260,10 @@ export class IndexedSpokePoolClient extends clients.SpokePoolClient { } protected async _update(eventsToQuery: string[]): Promise { + if (this.pendingBlockNumber === this.deploymentBlock) { + return { success: false, reason: clients.UpdateFailureReason.NotReady }; + } + // If any events have been removed upstream, remove them first. this.pendingEventsRemoved = this.pendingEventsRemoved.filter((event) => !this.removeEvent(event)); diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index 9e707b1e1..8f3e1bd4c 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -1,6 +1,6 @@ import { utils as sdkUtils } from "@across-protocol/sdk"; import { HubPoolClient, SpokePoolClient } from "."; -import { CachingMechanismInterface, L1Token, V3Deposit } from "../interfaces"; +import { CachingMechanismInterface, L1Token, Deposit } from "../interfaces"; import { BigNumber, bnZero, @@ -64,7 +64,7 @@ export class TokenClient { return this.tokenShortfall?.[chainId]?.[token]?.deposits || []; } - hasBalanceForFill(deposit: V3Deposit): boolean { + hasBalanceForFill(deposit: Deposit): boolean { return this.getBalance(deposit.destinationChainId, deposit.outputToken).gte(deposit.outputAmount); } @@ -79,7 +79,7 @@ export class TokenClient { assign(this.tokenShortfall, [chainId, token], { deposits, totalRequirement }); } - captureTokenShortfallForFill(deposit: V3Deposit): void { + captureTokenShortfallForFill(deposit: Deposit): void { const { outputAmount: unfilledAmount } = deposit; this.logger.debug({ at: "TokenBalanceClient", message: "Handling token shortfall", deposit, unfilledAmount }); this.captureTokenShortfall(deposit.destinationChainId, deposit.outputToken, deposit.depositId, unfilledAmount); diff --git a/src/clients/bridges/CrossChainTransferClient.ts b/src/clients/bridges/CrossChainTransferClient.ts index f621c4288..def419c65 100644 --- a/src/clients/bridges/CrossChainTransferClient.ts +++ b/src/clients/bridges/CrossChainTransferClient.ts @@ -82,6 +82,10 @@ export class CrossChainTransferClient { async update(l1Tokens: string[], chainIds = this.getEnabledL2Chains()): Promise { const enabledChainIds = this.getEnabledL2Chains(); chainIds = chainIds.filter((chainId) => enabledChainIds.includes(chainId)); + if (chainIds.length === 0) { + return; + } + this.log("Updating cross chain transfers", { chainIds }); const outstandingTransfersPerChain = await Promise.all( diff --git a/src/clients/bridges/LineaAdapter.ts b/src/clients/bridges/LineaAdapter.ts index c1b5da2bf..b07db8856 100644 --- a/src/clients/bridges/LineaAdapter.ts +++ b/src/clients/bridges/LineaAdapter.ts @@ -1,417 +1,33 @@ -import * as sdk from "@across-protocol/sdk"; -import WETH_ABI from "../../common/abi/Weth.json"; -import { CONTRACT_ADDRESSES, SUPPORTED_TOKENS } from "../../common"; -import { OutstandingTransfers } from "../../interfaces"; -import { - BigNumber, - CHAIN_IDs, - Contract, - EventSearchConfig, - Event, - TOKEN_SYMBOLS_MAP, - TransactionResponse, - assert, - bnZero, - compareAddressesSimple, - isDefined, - paginatedEventQuery, - winston, -} from "../../utils"; +import { SUPPORTED_TOKENS, CUSTOM_BRIDGE, CANONICAL_BRIDGE, DEFAULT_GAS_MULTIPLIER } from "../../common"; +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP, winston } from "../../utils"; import { SpokePoolClient } from "../SpokePoolClient"; -import { BaseAdapter } from "./BaseAdapter"; - -export class LineaAdapter extends BaseAdapter { - readonly l1TokenBridge = CONTRACT_ADDRESSES[this.hubChainId].lineaL1TokenBridge.address; - readonly l1UsdcBridge = CONTRACT_ADDRESSES[this.hubChainId].lineaL1UsdcBridge.address; +import { BaseChainAdapter } from "../../adapter/BaseChainAdapter"; +export class LineaAdapter extends BaseChainAdapter { constructor( logger: winston.Logger, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, monitoredAddresses: string[] ) { - const { LINEA } = CHAIN_IDs; - super(spokePoolClients, LINEA, monitoredAddresses, logger, SUPPORTED_TOKENS[LINEA]); - } - async checkTokenApprovals(l1Tokens: string[]): Promise { - const address = await this.getSigner(this.hubChainId).getAddress(); - // Note: Linea has two bridges: one for - const associatedL1Bridges = l1Tokens - .map((l1Token) => { - if (!this.isSupportedToken(l1Token)) { - return null; - } - return this.getL1Bridge(l1Token).address; - }) - .filter(isDefined); - await this.checkAndSendTokenApprovals(address, l1Tokens, associatedL1Bridges); - } - - async wrapEthIfAboveThreshold( - threshold: BigNumber, - target: BigNumber, - simMode: boolean - ): Promise { - const { chainId } = this; - assert(sdk.utils.chainIsLinea(chainId), `ChainId ${chainId} is not supported as a Linea chain`); - const weth = TOKEN_SYMBOLS_MAP.WETH.addresses[chainId]; - const ethBalance = await this.getSigner(chainId).getBalance(); - if (ethBalance.gt(threshold)) { - const l2Signer = this.getSigner(chainId); - const contract = new Contract(weth, WETH_ABI, l2Signer); - const value = ethBalance.sub(target); - this.logger.debug({ at: this.getName(), message: "Wrapping ETH", threshold, target, value, ethBalance }); - return this._wrapEthIfAboveThreshold(threshold, contract, value, simMode); - } else { - this.logger.debug({ - at: this.getName(), - message: "ETH balance below threshold", - threshold, - ethBalance, - }); - } - return null; - } - - getL2MessageService(): Contract { - const chainId = this.chainId; - return new Contract( - CONTRACT_ADDRESSES[chainId].l2MessageService.address, - CONTRACT_ADDRESSES[chainId].l2MessageService.abi, - this.getSigner(chainId) - ); - } - - getL1MessageService(): Contract { - const { hubChainId } = this; - return new Contract( - CONTRACT_ADDRESSES[hubChainId].lineaMessageService.address, - CONTRACT_ADDRESSES[hubChainId].lineaMessageService.abi, - this.getSigner(hubChainId) - ); - } - - getL1TokenBridge(): Contract { - const { hubChainId } = this; - return new Contract( - this.l1TokenBridge, - CONTRACT_ADDRESSES[hubChainId].lineaL1TokenBridge.abi, - this.getSigner(hubChainId) - ); - } - - getL1UsdcBridge(): Contract { - const { hubChainId } = this; - return new Contract( - this.l1UsdcBridge, - CONTRACT_ADDRESSES[hubChainId].lineaL1UsdcBridge.abi, - this.getSigner(hubChainId) - ); - } - - getL2TokenBridge(): Contract { - const chainId = this.chainId; - return new Contract( - CONTRACT_ADDRESSES[chainId].lineaL2TokenBridge.address, - CONTRACT_ADDRESSES[chainId].lineaL2TokenBridge.abi, - this.getSigner(chainId) - ); - } - - getL2UsdcBridge(): Contract { - const chainId = this.chainId; - return new Contract( - CONTRACT_ADDRESSES[chainId].lineaL2UsdcBridge.address, - CONTRACT_ADDRESSES[chainId].lineaL2UsdcBridge.abi, - this.getSigner(chainId) - ); - } - - getL2Bridge(l1Token: string): Contract { - return this.isUsdc(l1Token) ? this.getL2UsdcBridge() : this.getL2TokenBridge(); - } - - isUsdc(l1Token: string): boolean { - return compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); - } - - getL1Bridge(l1Token: string): Contract { - return this.isWeth(l1Token) - ? this.getAtomicDepositor() - : this.isUsdc(l1Token) - ? this.getL1UsdcBridge() - : this.getL1TokenBridge(); - } - - async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { - const outstandingTransfers: OutstandingTransfers = {}; - const { l1SearchConfig, l2SearchConfig } = this.getUpdatedSearchConfigs(); - const supportedL1Tokens = this.filterSupportedTokens(l1Tokens); - await sdk.utils.mapAsync(this.monitoredAddresses, async (address) => { - // We can only support monitoring the spoke pool contract, not the hub pool. - if (address === CONTRACT_ADDRESSES[this.hubChainId]?.hubPool?.address) { - return; - } - await sdk.utils.mapAsync(supportedL1Tokens, async (l1Token) => { - if (this.isWeth(l1Token)) { - const l1MessageService = this.getL1MessageService(); - const l2MessageService = this.getL2MessageService(); - - // We need to do the following sequential steps. - // 1. Get all initiated MessageSent events from the L1MessageService where the 'to' address is the - // user's address. - // 2. Pipe the resulting _messageHash argument from step 1 into the MessageClaimed event filter - // 3. For each MessageSent, match the _messageHash to the _messageHash in the MessageClaimed event - // any unmatched MessageSent events are considered outstanding transfers. - const initiatedQueryResult = await this.getWethDepositInitiatedEvents( - l1MessageService, - address, - l1SearchConfig - ); - const internalMessageHashes = initiatedQueryResult.map(({ args }) => args._messageHash); - const finalizedQueryResult = await this.getWethDepositFinalizedEvents( - l2MessageService, - internalMessageHashes, - l2SearchConfig - ); - this.matchWethDepositEvents( - initiatedQueryResult, - finalizedQueryResult, - outstandingTransfers, - address, - l1Token - ); - } else { - const isUsdc = this.isUsdc(l1Token); - const l1Bridge = this.getL1Bridge(l1Token); - const l2Bridge = this.getL2Bridge(l1Token); - - // Define the initialized and finalized event filters for the L1 and L2 bridges. We only filter - // on the recipient so that the filters work both to track Hub-->Spoke transfers and EOA transfers, and - // because some filters like ReceivedFromOtherLayer only index the recipient. - const [initiatedQueryResult, finalizedQueryResult] = await Promise.all([ - isUsdc - ? this.getUsdcDepositInitiatedEvents(l1Bridge, address, l1SearchConfig) - : this.getErc20DepositInitiatedEvents(l1Bridge, address, l1Token, l1SearchConfig), - isUsdc - ? this.getUsdcDepositFinalizedEvents(l2Bridge, address, l2SearchConfig) - : this.getErc20DepositFinalizedEvents(l2Bridge, address, l1Token, l2SearchConfig), - ]); - if (isUsdc) { - this.matchUsdcDepositEvents( - initiatedQueryResult, - finalizedQueryResult, - outstandingTransfers, - address, - l1Token - ); - } else { - this.matchErc20DepositEvents( - initiatedQueryResult, - finalizedQueryResult, - outstandingTransfers, - address, - l1Token - ); - } - } - }); + const { LINEA, MAINNET } = CHAIN_IDs; + const l2Signer = spokePoolClients[LINEA].spokePool.signer; + const l1Signer = spokePoolClients[MAINNET].spokePool.signer; + const bridges = {}; + SUPPORTED_TOKENS[LINEA]?.forEach((symbol) => { + const l1Token = TOKEN_SYMBOLS_MAP[symbol].addresses[MAINNET]; + const bridgeConstructor = CUSTOM_BRIDGE[LINEA]?.[l1Token] ?? CANONICAL_BRIDGE[LINEA]; + bridges[l1Token] = new bridgeConstructor(LINEA, MAINNET, l1Signer, l2Signer, l1Token); }); - return outstandingTransfers; - } - - async getWethDepositInitiatedEvents( - l1MessageService: Contract, - l2RecipientAddress: string, - l1SearchConfig: EventSearchConfig - ): Promise { - const _initiatedQueryResult = await paginatedEventQuery( - l1MessageService, - l1MessageService.filters.MessageSent(null, l2RecipientAddress), - l1SearchConfig - ); - // @dev There will be a MessageSent to the SpokePool address for each RelayedRootBundle so remove - // those with 0 value. - return _initiatedQueryResult.filter(({ args }) => args._value.gt(0)); - } - - async getWethDepositFinalizedEvents( - l2MessageService: Contract, - internalMessageHashes: string[], - l2SearchConfig: EventSearchConfig - ): Promise { - return await paginatedEventQuery( - l2MessageService, - // Passing in an array of message hashes results in an OR filter - l2MessageService.filters.MessageClaimed(internalMessageHashes), - l2SearchConfig - ); - } - - matchWethDepositEvents( - initiatedQueryResult: Event[], - finalizedQueryResult: Event[], - outstandingTransfers: OutstandingTransfers, - monitoredAddress: string, - l1Token: string - ): void { - const transferEvents = initiatedQueryResult.filter( - ({ args }) => - !finalizedQueryResult.some( - (finalizedEvent) => args._messageHash.toLowerCase() === finalizedEvent.args._messageHash.toLowerCase() - ) - ); - this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); - } - - computeOutstandingTransfers( - outstandingTransfers: OutstandingTransfers, - monitoredAddress: string, - l1Token: string, - transferEvents: Event[] - ): void { - const l2Token = this.resolveL2TokenAddress(l1Token, false); // There's no native USDC on Linea - assert(!isDefined(TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId])); // We can blow up if this eventually stops being true - transferEvents.forEach((event) => { - const txHash = event.transactionHash; - // @dev WETH events have a _value field, while ERC20 events have an amount field. - const amount = event.args._value ?? event.args.amount; - outstandingTransfers[monitoredAddress] ??= {}; - outstandingTransfers[monitoredAddress][l1Token] ??= {}; - outstandingTransfers[monitoredAddress][l1Token][l2Token] ??= { totalAmount: bnZero, depositTxHashes: [] }; - outstandingTransfers[monitoredAddress][l1Token][l2Token] = { - totalAmount: outstandingTransfers[monitoredAddress][l1Token][l2Token].totalAmount.add(amount), - depositTxHashes: [...outstandingTransfers[monitoredAddress][l1Token][l2Token].depositTxHashes, txHash], - }; - }); - } - - async getErc20DepositInitiatedEvents( - l1Bridge: Contract, - monitoredAddress: string, - l1Token: string, - l1SearchConfig: EventSearchConfig - ): Promise { - const initiatedQueryResult = await paginatedEventQuery( - l1Bridge, - l1Bridge.filters.BridgingInitiatedV2(null /* sender */, monitoredAddress /* recipient */, l1Token), - l1SearchConfig - ); - return initiatedQueryResult; - } - - async getErc20DepositFinalizedEvents( - l2Bridge: Contract, - monitoredAddress: string, - l1Token: string, - l2SearchConfig: EventSearchConfig - ): Promise { - const finalizedQueryResult = await paginatedEventQuery( - l2Bridge, - l2Bridge.filters.BridgingFinalizedV2( - l1Token, - null /* bridgedToken */, - null /* bridgedToken */, - monitoredAddress /* recipient */ - ), - l2SearchConfig - ); - return finalizedQueryResult; - } - - matchErc20DepositEvents( - initiatedQueryResult: Event[], - finalizedQueryResult: Event[], - outstandingTransfers: OutstandingTransfers, - monitoredAddress: string, - l1Token: string - ): void { - const transferEvents = initiatedQueryResult.filter( - (initialEvent) => - !isDefined( - finalizedQueryResult.find( - (finalEvent) => - finalEvent.args.amount.eq(initialEvent.args.amount) && - compareAddressesSimple(initialEvent.args.recipient, finalEvent.args.recipient) && - compareAddressesSimple(finalEvent.args.nativeToken, initialEvent.args.token) - ) - ) - ); - - this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); - } - - getUsdcDepositInitiatedEvents( - l1Bridge: Contract, - monitoredAddress: string, - l1SearchConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - l1Bridge, - l1Bridge.filters.Deposited(null /* depositor */, null /* amount */, monitoredAddress /* to */), - l1SearchConfig - ); - } - - getUsdcDepositFinalizedEvents( - l2Bridge: Contract, - monitoredAddress: string, - l2SearchConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - l2Bridge, - l2Bridge.filters.ReceivedFromOtherLayer(monitoredAddress /* recipient */), - l2SearchConfig - ); - } - - matchUsdcDepositEvents( - initiatedQueryResult: Event[], - finalizedQueryResult: Event[], - outstandingTransfers: OutstandingTransfers, - monitoredAddress: string, - l1Token: string - ): void { - const transferEvents = initiatedQueryResult.filter( - (initialEvent) => - !isDefined( - finalizedQueryResult.find( - (finalEvent) => - finalEvent.args.amount.eq(initialEvent.args.amount) && - compareAddressesSimple(initialEvent.args.to, finalEvent.args.recipient) - ) - ) - ); - this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); - } - - sendTokenToTargetChain( - address: string, - l1Token: string, - l2Token: string, - amount: BigNumber, - simMode: boolean - ): Promise { - const isWeth = this.isWeth(l1Token); - const isUsdc = this.isUsdc(l1Token); - const l1Bridge = this.getL1Bridge(l1Token); - const l1BridgeMethod = isWeth ? "bridgeWethToLinea" : isUsdc ? "depositTo" : "bridgeToken"; - // prettier-ignore - const l1BridgeArgs = isUsdc - ? [amount, address] - : isWeth - ? [address, amount] - : [l1Token, amount, address]; - return this._sendTokenToTargetChain( - l1Token, - l2Token, - amount, - l1Bridge, - l1BridgeMethod, - l1BridgeArgs, - 2, - bnZero, - simMode + super( + spokePoolClients, + LINEA, + MAINNET, + monitoredAddresses, + logger, + SUPPORTED_TOKENS[LINEA], + bridges, + DEFAULT_GAS_MULTIPLIER[LINEA] ?? 1 ); } } diff --git a/src/clients/bridges/PolygonAdapter.ts b/src/clients/bridges/PolygonAdapter.ts index 22fbfeecf..163005a71 100644 --- a/src/clients/bridges/PolygonAdapter.ts +++ b/src/clients/bridges/PolygonAdapter.ts @@ -1,341 +1,32 @@ -import { - assign, - Contract, - BigNumber, - bnToHex, - winston, - Event, - isDefined, - BigNumberish, - TransactionResponse, - ZERO_ADDRESS, - spreadEventWithBlockNumber, - paginatedEventQuery, - CHAIN_IDs, - TOKEN_SYMBOLS_MAP, - bnZero, - assert, -} from "../../utils"; -import { SpokePoolClient } from "../../clients"; -import { SortableEvent, OutstandingTransfers } from "../../interfaces"; -import { CONTRACT_ADDRESSES, SUPPORTED_TOKENS } from "../../common"; -import { CCTPAdapter } from "./CCTPAdapter"; +import { SUPPORTED_TOKENS, CUSTOM_BRIDGE, CANONICAL_BRIDGE, DEFAULT_GAS_MULTIPLIER } from "../../common"; +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP, winston } from "../../utils"; +import { SpokePoolClient } from "../SpokePoolClient"; +import { BaseChainAdapter } from "../../adapter/BaseChainAdapter"; -// ether bridge = 0x8484Ef722627bf18ca5Ae6BcF031c23E6e922B30 -// erc20 bridge = 0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf -// matic bridge = 0x401f6c983ea34274ec46f84d70b31c151321188b - -// When bridging ETH to Polygon we MUST send ETH which is then wrapped in the bridge to WETH. We are unable to send WETH -// directly over the bridge, just like in the Optimism/Boba cases. - -// TODO: Move to ../../common/ContractAddresses.ts -const tokenToBridge = { - [TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // USDC - [TOKEN_SYMBOLS_MAP.USDT.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.USDT.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // USDT - [TOKEN_SYMBOLS_MAP.DAI.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.DAI.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // DAI - [TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // WBTC - [TOKEN_SYMBOLS_MAP.UMA.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.UMA.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // UMA - [TOKEN_SYMBOLS_MAP.BADGER.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.BADGER.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // BADGER - [TOKEN_SYMBOLS_MAP.BAL.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.BAL.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // BAL - [TOKEN_SYMBOLS_MAP.ACX.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.ACX.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // ACX - [TOKEN_SYMBOLS_MAP.POOL.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf", - l2TokenAddress: TOKEN_SYMBOLS_MAP.POOL.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedERC20", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // POOL - [TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x8484Ef722627bf18ca5Ae6BcF031c23E6e922B30", - l2TokenAddress: TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.POLYGON], - l1Method: "LockedEther", - l1AmountProp: "amount", - l2AmountProp: "value", - }, // WETH - [TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET]]: { - l1BridgeAddress: "0x401f6c983ea34274ec46f84d70b31c151321188b", - l2TokenAddress: ZERO_ADDRESS, - l1Method: "NewDepositBlock", - l1AmountProp: "amountOrNFTId", - l2AmountProp: "amount", - }, // MATIC -} as const; - -type SupportedL1Token = string; - -export class PolygonAdapter extends CCTPAdapter { +export class PolygonAdapter extends BaseChainAdapter { constructor( logger: winston.Logger, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, monitoredAddresses: string[] ) { - const { POLYGON } = CHAIN_IDs; - super(spokePoolClients, POLYGON, monitoredAddresses, logger, SUPPORTED_TOKENS[POLYGON]); - } - - // On polygon a bridge transaction looks like a transfer from address(0) to the target. - async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { - const { l1SearchConfig, l2SearchConfig } = this.getUpdatedSearchConfigs(); - - // Skip the tokens if we can't find the corresponding bridge. - // This is a valid use case as it's more convenient to check cross chain transfers for all tokens - // rather than maintaining a list of native bridge-supported tokens. - const availableTokens = this.filterSupportedTokens(l1Tokens); - - const promises: Promise[] = []; - const cctpOutstandingTransfersPromise: Record> = {}; - // Fetch bridge events for all monitored addresses. This function will not work to monitor the hub pool contract, - // only the spoke pool address and EOA's. - const monitoredAddresses = this.monitoredAddresses.filter((address) => address !== this.getHubPool().address); - for (const monitoredAddress of monitoredAddresses) { - for (const l1Token of availableTokens) { - if (this.isL1TokenUsdc(l1Token)) { - cctpOutstandingTransfersPromise[monitoredAddress] = this.getOutstandingCctpTransfers(monitoredAddress); - } - - const l1Bridge = this.getL1Bridge(l1Token); - const l2Token = this.getL2Token(l1Token); - - const l1Method = tokenToBridge[l1Token].l1Method; - let l1SearchFilter: (string | undefined)[] = []; - if (l1Method === "LockedERC20") { - l1SearchFilter = [undefined /* depositor */, monitoredAddress /* depositReceiver */, l1Token]; - } - if (l1Method === "LockedEther") { - l1SearchFilter = [undefined /* depositor */, monitoredAddress /* depositReceiver */]; - } - if (l1Method === "NewDepositBlock") { - // @dev This won't work for tracking Hub to Spoke transfers since the l1 "owner" will be different - // from the L2 "user". We leave it in here for future EOA relayer rebalancing of Matic. - l1SearchFilter = [monitoredAddress /* owner */, TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET]]; - } - - const l2Method = - l1Token === TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET] ? "TokenDeposited" : "Transfer"; - let l2SearchFilter: (string | undefined)[] = []; - if (l2Method === "Transfer") { - l2SearchFilter = [ZERO_ADDRESS, monitoredAddress]; - } - if (l2Method === "TokenDeposited") { - l2SearchFilter = [ - TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET], - ZERO_ADDRESS, - monitoredAddress /* user */, - ]; - } - - promises.push( - paginatedEventQuery(l1Bridge, l1Bridge.filters[l1Method](...l1SearchFilter), l1SearchConfig), - paginatedEventQuery(l2Token, l2Token.filters[l2Method](...l2SearchFilter), l2SearchConfig) - ); - } - } - - const [results, resolvedCCTPEvents] = await Promise.all([ - Promise.all(promises), - Promise.all(monitoredAddresses.map((monitoredAddress) => cctpOutstandingTransfersPromise[monitoredAddress])), - ]); - const resultingCCTPEvents: Record = Object.fromEntries( - monitoredAddresses.map((monitoredAddress, idx) => [monitoredAddress, resolvedCCTPEvents[idx]]) + const { POLYGON, MAINNET } = CHAIN_IDs; + const bridges = {}; + const l2Signer = spokePoolClients[POLYGON].spokePool.signer; + const l1Signer = spokePoolClients[MAINNET].spokePool.signer; + SUPPORTED_TOKENS[POLYGON]?.map((symbol) => { + const l1Token = TOKEN_SYMBOLS_MAP[symbol].addresses[MAINNET]; + const bridgeConstructor = CUSTOM_BRIDGE[POLYGON]?.[l1Token] ?? CANONICAL_BRIDGE[POLYGON]; + bridges[l1Token] = new bridgeConstructor(POLYGON, MAINNET, l1Signer, l2Signer, l1Token); + }); + super( + spokePoolClients, + POLYGON, + MAINNET, + monitoredAddresses, + logger, + SUPPORTED_TOKENS[POLYGON], + bridges, + DEFAULT_GAS_MULTIPLIER[POLYGON] ?? 1 ); - - // 2 events per token. - const numEventsPerMonitoredAddress = 2 * availableTokens.length; - - // Segregate the events list by monitored address. - const resultsByMonitoredAddress = Object.fromEntries( - monitoredAddresses.map((monitoredAddress, index) => { - const start = index * numEventsPerMonitoredAddress; - return [monitoredAddress, results.slice(start, start + numEventsPerMonitoredAddress)]; - }) - ); - - // Process events for each monitored address. - for (const monitoredAddress of monitoredAddresses) { - const eventsToProcess = resultsByMonitoredAddress[monitoredAddress]; - eventsToProcess.forEach((result, index) => { - if (eventsToProcess.length === 0) { - return; - } - assert(eventsToProcess.length % 2 === 0, "Events list length should be even"); - const l1Token = availableTokens[Math.floor(index / 2)]; - const amountProp = index % 2 === 0 ? tokenToBridge[l1Token].l1AmountProp : tokenToBridge[l1Token].l2AmountProp; - const events = result.map((event) => { - // Hacky typing here. We should probably rework the structure of this function to improve. - const eventSpread = spreadEventWithBlockNumber(event) as unknown as SortableEvent & { - [amount in typeof amountProp]?: BigNumberish; - } & { depositReceiver: string }; - return { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - amount: eventSpread[amountProp]!, - to: eventSpread["depositReceiver"], - ...eventSpread, - }; - }); - const eventsStorage = index % 2 === 0 ? this.l1DepositInitiatedEvents : this.l2DepositFinalizedEvents; - const l2Token = this.resolveL2TokenAddress(l1Token, false); // these are all either normal L2 tokens or bridged USDC - assign(eventsStorage, [monitoredAddress, l1Token, l2Token], events); - }); - if (isDefined(resultingCCTPEvents[monitoredAddress])) { - assign( - this.l1DepositInitiatedEvents, - [ - monitoredAddress, - TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId], - TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId], // Must map to the USDC Native L2 token address - ], - resultingCCTPEvents[monitoredAddress] - ); - } - } - - this.baseL1SearchConfig.fromBlock = l1SearchConfig.toBlock + 1; - this.baseL2SearchConfig.fromBlock = l2SearchConfig.toBlock + 1; - - return this.computeOutstandingCrossChainTransfers(availableTokens); - } - - sendTokenToTargetChain( - address: string, - l1Token: string, - l2Token: string, - amount: BigNumber, - simMode = false - ): Promise { - // If both the L1 & L2 tokens are native USDC, we use the CCTP bridge. - if (this.isL1TokenUsdc(l1Token) && this.isL2TokenUsdc(l2Token)) { - return this.sendCctpTokenToTargetChain(address, l1Token, l2Token, amount, simMode); - } else { - let method = "depositFor"; - // note that the amount is the bytes 32 encoding of the amount. - let args = [address, l1Token, bnToHex(amount)]; - - // If this token is WETH (the tokenToEvent maps to the ETH method) then we modify the params to deposit ETH. - if (this.isWeth(l1Token)) { - method = "bridgeWethToPolygon"; - args = [address, amount.toString()]; - } - return this._sendTokenToTargetChain( - l1Token, - l2Token, - amount, - this.getL1TokenGateway(l1Token), - method, - args, - 1, - bnZero, - simMode - ); - } - } - - async checkTokenApprovals(l1Tokens: string[]): Promise { - const address = await this.getSigner(this.hubChainId).getAddress(); - const l1TokenListToApprove = []; - - const associatedL1Bridges = l1Tokens - .flatMap((l1Token) => { - if (!this.isSupportedToken(l1Token)) { - return []; - } - if (this.isWeth(l1Token)) { - l1TokenListToApprove.push(l1Token); - return [this.getL1TokenGateway(l1Token)?.address]; - } - const bridgeAddresses: string[] = []; - if (this.isL1TokenUsdc(l1Token)) { - bridgeAddresses.push(this.getL1CCTPTokenMessengerBridge().address); - } - bridgeAddresses.push(this.getL1Bridge(l1Token).address); - - // Push the l1 token to the list of tokens to approve N times, where N is the number of bridges. - // I.e. the arrays have to be parallel. - l1TokenListToApprove.push(...Array(bridgeAddresses.length).fill(l1Token)); - - return bridgeAddresses; - }) - .filter(isDefined); - await this.checkAndSendTokenApprovals(address, l1TokenListToApprove, associatedL1Bridges); - } - - getL1Bridge(l1Token: SupportedL1Token): Contract { - return new Contract( - tokenToBridge[l1Token].l1BridgeAddress, - CONTRACT_ADDRESSES[1].polygonBridge.abi, - this.getSigner(this.hubChainId) - ); - } - - getL1TokenGateway(l1Token: string): Contract { - if (this.isWeth(l1Token)) { - return this.getAtomicDepositor(); - } else { - return new Contract( - CONTRACT_ADDRESSES[1].polygonRootChainManager.address, - CONTRACT_ADDRESSES[1].polygonRootChainManager.abi, - this.getSigner(this.hubChainId) - ); - } - } - - // Note that on polygon we dont query events on the L2 bridge. rather, we look for mint events on the L2 token. - getL2Token(l1Token: SupportedL1Token): Contract { - return new Contract( - tokenToBridge[l1Token].l2TokenAddress, - CONTRACT_ADDRESSES[137].withdrawableErc20.abi, - this.getSigner(this.chainId) - ); - } - - async wrapEthIfAboveThreshold(): Promise { - throw new Error("Unnecessary to wrap ETH on Polygon"); } } diff --git a/src/clients/index.ts b/src/clients/index.ts index bcfefbb3e..3e4a39109 100644 --- a/src/clients/index.ts +++ b/src/clients/index.ts @@ -5,9 +5,9 @@ export type SpokePoolUpdate = clients.SpokePoolUpdate; export const { SpokePoolClient } = clients; export { IndexedSpokePoolClient, SpokePoolClientMessage } from "./SpokePoolClient"; +export class BundleDataClient extends clients.BundleDataClient.BundleDataClient {} export * from "./BalanceAllocator"; -export * from "./BundleDataClient"; export * from "./HubPoolClient"; export * from "./ConfigStoreClient"; export * from "./MultiCallerClient"; diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index 965f807eb..a800eb1c5 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -15,6 +15,9 @@ import { ZERO_ADDRESS, chainIsMatic, CHAIN_IDs, + getWidestPossibleExpectedBlockRange, + getEndBlockBuffers, + _buildPoolRebalanceRoot, } from "../utils"; import { ProposedRootBundle, @@ -24,7 +27,7 @@ import { RunningBalances, PoolRebalanceLeaf, RelayerRefundLeaf, - V3SlowFillLeaf, + SlowFillLeaf, FillStatus, } from "../interfaces"; import { DataworkerClients } from "./DataworkerClientHelper"; @@ -37,12 +40,7 @@ import { l2TokensToCountTowardsSpokePoolLeafExecutionCapital, persistDataToArweave, } from "../dataworker/DataworkerUtils"; -import { - getEndBlockBuffers, - _buildPoolRebalanceRoot, - _buildRelayerRefundRoot, - _buildSlowRelayRoot, -} from "./DataworkerUtils"; +import { _buildRelayerRefundRoot, _buildSlowRelayRoot } from "./DataworkerUtils"; import _ from "lodash"; import { CONTRACT_ADDRESSES, spokePoolClientsToProviders } from "../common"; import * as sdk from "@across-protocol/sdk"; @@ -64,8 +62,8 @@ const ERROR_DISPUTE_REASONS = new Set(["insufficient-dataworker-lookback", "out- // Create a type for storing a collection of roots type SlowRootBundle = { - leaves: V3SlowFillLeaf[]; - tree: MerkleTree; + leaves: SlowFillLeaf[]; + tree: MerkleTree; }; type ProposeRootBundleReturnType = { @@ -73,8 +71,8 @@ type ProposeRootBundleReturnType = { poolRebalanceTree: MerkleTree; relayerRefundLeaves: RelayerRefundLeaf[]; relayerRefundTree: MerkleTree; - slowFillLeaves: V3SlowFillLeaf[]; - slowFillTree: MerkleTree; + slowFillLeaves: SlowFillLeaf[]; + slowFillTree: MerkleTree; bundleData: BundleData; }; @@ -85,7 +83,7 @@ export type PoolRebalanceRoot = { tree: MerkleTree; }; -type PoolRebalanceRootCache = Record>; +type PoolRebalanceRootCache = Record; // @notice Constructs roots to submit to HubPool on L1. Fetches all data synchronously from SpokePool/HubPool clients // so this class assumes that those upstream clients are already updated and have fetched on-chain data from RPC's. @@ -189,7 +187,7 @@ export class Dataworker { this.chainIdListForBundleEvaluationBlockNumbers )[1]; - return await this._getPoolRebalanceRoot( + return this._getPoolRebalanceRoot( blockRangesForChains, latestMainnetBlock ?? mainnetBundleEndBlock, mainnetBundleEndBlock, @@ -485,7 +483,7 @@ export class Dataworker { }; const [, mainnetBundleEndBlock] = blockRangesForProposal[0]; - const poolRebalanceRoot = await this._getPoolRebalanceRoot( + const poolRebalanceRoot = this._getPoolRebalanceRoot( blockRangesForProposal, latestMainnetBundleEndBlock, mainnetBundleEndBlock, @@ -682,8 +680,8 @@ export class Dataworker { leaves: RelayerRefundLeaf[]; }; slowRelayTree: { - tree: MerkleTree; - leaves: V3SlowFillLeaf[]; + tree: MerkleTree; + leaves: SlowFillLeaf[]; }; }; bundleData?: BundleData; @@ -702,8 +700,8 @@ export class Dataworker { leaves: RelayerRefundLeaf[]; }; slowRelayTree: { - tree: MerkleTree; - leaves: V3SlowFillLeaf[]; + tree: MerkleTree; + leaves: SlowFillLeaf[]; }; }; bundleData: BundleData; @@ -1136,10 +1134,10 @@ export class Dataworker { } async _executeSlowFillLeaf( - _leaves: V3SlowFillLeaf[], + _leaves: SlowFillLeaf[], balanceAllocator: BalanceAllocator, client: SpokePoolClient, - slowRelayTree: MerkleTree, + slowRelayTree: MerkleTree, submitExecution: boolean, rootBundleId?: number ): Promise { @@ -1152,7 +1150,7 @@ export class Dataworker { // If there is a message, we ignore the leaf and log an error. if (!sdk.utils.isMessageEmpty(message)) { - const { method, args } = this.encodeV3SlowFillLeaf(slowRelayTree, rootBundleId, leaf); + const { method, args } = this.encodeSlowFillLeaf(slowRelayTree, rootBundleId, leaf); this.logger.warn({ at: "Dataworker#_executeSlowFillLeaf", @@ -1287,7 +1285,7 @@ export class Dataworker { `amount: ${outputAmount.toString()}`; if (submitExecution) { - const { method, args } = this.encodeV3SlowFillLeaf(slowRelayTree, rootBundleId, leaf); + const { method, args } = this.encodeSlowFillLeaf(slowRelayTree, rootBundleId, leaf); this.clients.multiCallerClient.enqueueTransaction({ contract: client.spokePool, @@ -1312,11 +1310,11 @@ export class Dataworker { }); } - encodeV3SlowFillLeaf( - slowRelayTree: MerkleTree, + encodeSlowFillLeaf( + slowRelayTree: MerkleTree, rootBundleId: number, - leaf: V3SlowFillLeaf - ): { method: string; args: (number | string[] | V3SlowFillLeaf)[] } { + leaf: SlowFillLeaf + ): { method: string; args: (number | string[] | SlowFillLeaf)[] } { const method = "executeV3SlowRelayLeaf"; const proof = slowRelayTree.getHexProof(leaf); const args = [leaf, rootBundleId, proof]; @@ -2209,7 +2207,7 @@ export class Dataworker { poolRebalanceRoot: string, relayerRefundLeaves: RelayerRefundLeaf[], relayerRefundRoot: string, - slowRelayLeaves: V3SlowFillLeaf[], + slowRelayLeaves: SlowFillLeaf[], slowRelayRoot: string ): void { try { @@ -2264,7 +2262,7 @@ export class Dataworker { } } - async _getPoolRebalanceRoot( + _getPoolRebalanceRoot( blockRangesForChains: number[][], latestMainnetBlock: number, mainnetBundleEndBlock: number, @@ -2273,7 +2271,7 @@ export class Dataworker { bundleSlowFills: BundleSlowFills, unexecutableSlowFills: BundleExcessSlowFills, expiredDepositsToRefundV3: ExpiredDepositsToRefundV3 - ): Promise { + ): PoolRebalanceRoot { const key = JSON.stringify(blockRangesForChains); // FIXME: Temporary fix to disable root cache rebalancing and to keep the // executor running for tonight (2023-08-28) until we can fix the @@ -2293,7 +2291,14 @@ export class Dataworker { ); } - return _.cloneDeep(await this.rootCache[key]); + this.logger.debug({ + at: "Dataworker#_getPoolRebalanceRoot", + message: "Constructed new pool rebalance root", + key, + root: this.rootCache[key], + }); + + return _.cloneDeep(this.rootCache[key]); } _getRequiredEthForArbitrumPoolRebalanceLeaf(leaf: PoolRebalanceLeaf): BigNumber { @@ -2368,7 +2373,7 @@ export class Dataworker { mainnetBundleStartBlock: number ): number[][] { const chainIds = this.clients.configStoreClient.getChainIdIndicesForBlock(mainnetBundleStartBlock); - return PoolRebalanceUtils.getWidestPossibleExpectedBlockRange( + return getWidestPossibleExpectedBlockRange( // We only want as many block ranges as there are chains enabled at the time of the bundle start block. chainIds, spokePoolClients, diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index 55df531ca..a87f3ae8e 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -8,69 +8,32 @@ import { RelayerRefundLeaf, RelayerRefundLeafWithGroup, RunningBalances, - V3FillWithBlock, - V3SlowFillLeaf, + SlowFillLeaf, } from "../interfaces"; import { - AnyObject, BigNumber, bnZero, - buildPoolRebalanceLeafTree, buildRelayerRefundTree, buildSlowRelayTree, - count2DDictionaryValues, - count3DDictionaryValues, fixedPointAdjustment, + getRefundsFromBundle, getTimestampsForBundleEndBlocks, - groupObjectCountsByTwoProps, isDefined, MerkleTree, TOKEN_SYMBOLS_MAP, winston, } from "../utils"; -import { PoolRebalanceRoot } from "./Dataworker"; import { DataworkerClients } from "./DataworkerClientHelper"; -import { - addLastRunningBalance, - constructPoolRebalanceLeaves, - updateRunningBalance, - updateRunningBalanceForDeposit, -} from "./PoolRebalanceUtils"; import { getAmountToReturnForRelayerRefundLeaf, sortRefundAddresses, sortRelayerRefundLeaves, } from "./RelayerRefundUtils"; -import { - BundleDepositsV3, - BundleExcessSlowFills, - BundleFillsV3, - BundleSlowFills, - ExpiredDepositsToRefundV3, -} from "../interfaces/BundleData"; +import { BundleFillsV3, BundleSlowFills, ExpiredDepositsToRefundV3 } from "../interfaces/BundleData"; export const { getImpliedBundleBlockRanges, getBlockRangeForChain, getBlockForChain, parseWinston, formatWinston } = utils; import { any } from "superstruct"; -export function getEndBlockBuffers( - chainIdListForBundleEvaluationBlockNumbers: number[], - blockRangeEndBlockBuffer: { [chainId: number]: number } -): number[] { - // These buffers can be configured by the bot runner. They have two use cases: - // 1) Validate the end blocks specified in the pending root bundle. If the end block is greater than the latest - // block for its chain, then we should dispute the bundle because we can't look up events in the future for that - // chain. However, there are some cases where the proposer's node for that chain is returning a higher HEAD block - // than the bot-runner is seeing, so we can use this buffer to allow the proposer some margin of error. If - // the bundle end block is less than HEAD but within this buffer, then we won't dispute and we'll just exit - // early from this function. - // 2) Subtract from the latest block in a new root bundle proposal. This can be used to reduce the chance that - // bot runs using different providers see different contract state close to the HEAD block for a chain. - // Reducing the latest block that we query also gives partially filled deposits slightly more buffer for relayers - // to fully fill the deposit and reduces the chance that the data worker includes a slow fill payment that gets - // filled during the challenge period. - return chainIdListForBundleEvaluationBlockNumbers.map((chainId: number) => blockRangeEndBlockBuffer[chainId] ?? 0); -} - // TODO: Move to SDK since this implements UMIP logic about validating block ranges. // Return true if we won't be able to construct a root bundle for the bundle block ranges ("blockRanges") because // the bundle wants to look up data for events that weren't in the spoke pool client's search range. @@ -156,33 +119,11 @@ export async function blockRangesAreInvalidForSpokeClients( }); } -export function prettyPrintV3SpokePoolEvents( - bundleDepositsV3: BundleDepositsV3, - bundleFillsV3: BundleFillsV3, - bundleInvalidFillsV3: V3FillWithBlock[], - bundleSlowFillsV3: BundleSlowFills, - expiredDepositsToRefundV3: ExpiredDepositsToRefundV3, - unexecutableSlowFills: BundleExcessSlowFills -): AnyObject { - return { - bundleDepositsV3: count2DDictionaryValues(bundleDepositsV3), - bundleFillsV3: count3DDictionaryValues(bundleFillsV3, "fills"), - bundleSlowFillsV3: count2DDictionaryValues(bundleSlowFillsV3), - expiredDepositsToRefundV3: count2DDictionaryValues(expiredDepositsToRefundV3), - unexecutableSlowFills: count2DDictionaryValues(unexecutableSlowFills), - allInvalidFillsInRangeByDestinationChainAndRelayer: groupObjectCountsByTwoProps( - bundleInvalidFillsV3, - "destinationChainId", - (fill) => `${fill.relayer}` - ), - }; -} - export function _buildSlowRelayRoot(bundleSlowFillsV3: BundleSlowFills): { - leaves: V3SlowFillLeaf[]; - tree: MerkleTree; + leaves: SlowFillLeaf[]; + tree: MerkleTree; } { - const slowRelayLeaves: V3SlowFillLeaf[] = []; + const slowRelayLeaves: SlowFillLeaf[] = []; // Append V3 slow fills to the V2 leaf list Object.values(bundleSlowFillsV3).forEach((depositsForChain) => { @@ -211,7 +152,7 @@ export function _buildSlowRelayRoot(bundleSlowFillsV3: BundleSlowFills): { }; } -function buildV3SlowFillLeaf(deposit: interfaces.Deposit, lpFeePct: BigNumber): V3SlowFillLeaf { +function buildV3SlowFillLeaf(deposit: interfaces.Deposit, lpFeePct: BigNumber): SlowFillLeaf { const lpFee = deposit.inputAmount.mul(lpFeePct).div(fixedPointAdjustment); return { @@ -240,56 +181,6 @@ export type CombinedRefunds = { }; }; -// Create a combined `refunds` object containing refunds for V2 + V3 fills -// and expired deposits. -export function getRefundsFromBundle( - bundleFillsV3: BundleFillsV3, - expiredDepositsToRefundV3: ExpiredDepositsToRefundV3 -): CombinedRefunds { - const combinedRefunds: { - [repaymentChainId: number]: { - [repaymentToken: string]: interfaces.Refund; - }; - } = {}; - Object.entries(bundleFillsV3).forEach(([repaymentChainId, fillsForChain]) => { - combinedRefunds[repaymentChainId] ??= {}; - Object.entries(fillsForChain).forEach(([l2TokenAddress, { refunds }]) => { - // refunds can be undefined if these fills were all slow fill executions. - if (refunds === undefined) { - return; - } - // @dev use shallow copy so that modifying combinedRefunds doesn't modify the original refunds object. - const refundsShallowCopy = { ...refunds }; - if (combinedRefunds[repaymentChainId][l2TokenAddress] === undefined) { - combinedRefunds[repaymentChainId][l2TokenAddress] = refundsShallowCopy; - } else { - // Each refunds object should have a unique refund address so we can add new ones to the - // existing dictionary. - combinedRefunds[repaymentChainId][l2TokenAddress] = { - ...combinedRefunds[repaymentChainId][l2TokenAddress], - ...refundsShallowCopy, - }; - } - }); - }); - Object.entries(expiredDepositsToRefundV3).forEach(([originChainId, depositsForChain]) => { - combinedRefunds[originChainId] ??= {}; - Object.entries(depositsForChain).forEach(([l2TokenAddress, deposits]) => { - deposits.forEach((deposit) => { - if (combinedRefunds[originChainId][l2TokenAddress] === undefined) { - combinedRefunds[originChainId][l2TokenAddress] = { [deposit.depositor]: deposit.inputAmount }; - } else { - const existingRefundAmount = combinedRefunds[originChainId][l2TokenAddress][deposit.depositor]; - combinedRefunds[originChainId][l2TokenAddress][deposit.depositor] = deposit.inputAmount.add( - existingRefundAmount ?? bnZero - ); - } - }); - }); - }); - return combinedRefunds; -} - export function _buildRelayerRefundRoot( endBlockForMainnet: number, bundleFillsV3: BundleFillsV3, @@ -402,152 +293,6 @@ export function _buildRelayerRefundRoot( }; } -export async function _buildPoolRebalanceRoot( - latestMainnetBlock: number, - mainnetBundleEndBlock: number, - bundleV3Deposits: BundleDepositsV3, - bundleFillsV3: BundleFillsV3, - bundleSlowFillsV3: BundleSlowFills, - unexecutableSlowFills: BundleExcessSlowFills, - expiredDepositsToRefundV3: ExpiredDepositsToRefundV3, - clients: Pick, - maxL1TokenCountOverride?: number -): Promise { - // Running balances are the amount of tokens that we need to send to each SpokePool to pay for all instant and - // slow relay refunds. They are decreased by the amount of funds already held by the SpokePool. Balances are keyed - // by the SpokePool's network and L1 token equivalent of the L2 token to refund. - // Realized LP fees are keyed the same as running balances and represent the amount of LP fees that should be paid - // to LP's for each running balance. - - // For each FilledRelay group, identified by { repaymentChainId, L1TokenAddress }, initialize a "running balance" - // to the total refund amount for that group. - const runningBalances: RunningBalances = {}; - const realizedLpFees: RunningBalances = {}; - - /** - * REFUNDS FOR FAST FILLS - */ - - // Add running balances and lp fees for v3 relayer refunds using BundleDataClient.bundleFillsV3. Refunds - // should be equal to inputAmount - lpFees so that relayers get to keep the relayer fee. Add the refund amount - // to the running balance for the repayment chain. - Object.entries(bundleFillsV3).forEach(([_repaymentChainId, fillsForChain]) => { - const repaymentChainId = Number(_repaymentChainId); - Object.entries(fillsForChain).forEach( - ([l2TokenAddress, { realizedLpFees: totalRealizedLpFee, totalRefundAmount }]) => { - const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( - l2TokenAddress, - repaymentChainId, - mainnetBundleEndBlock - ); - - updateRunningBalance(runningBalances, repaymentChainId, l1TokenCounterpart, totalRefundAmount); - updateRunningBalance(realizedLpFees, repaymentChainId, l1TokenCounterpart, totalRealizedLpFee); - } - ); - }); - - /** - * PAYMENTS SLOW FILLS - */ - - // Add running balances and lp fees for v3 slow fills using BundleDataClient.bundleSlowFillsV3. - // Slow fills should still increment bundleLpFees and updatedOutputAmount should be equal to inputAmount - lpFees. - // Increment the updatedOutputAmount to the destination chain. - Object.entries(bundleSlowFillsV3).forEach(([_destinationChainId, depositsForChain]) => { - const destinationChainId = Number(_destinationChainId); - Object.entries(depositsForChain).forEach(([outputToken, deposits]) => { - deposits.forEach((deposit) => { - const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( - outputToken, - destinationChainId, - mainnetBundleEndBlock - ); - const lpFee = deposit.lpFeePct.mul(deposit.inputAmount).div(fixedPointAdjustment); - updateRunningBalance(runningBalances, destinationChainId, l1TokenCounterpart, deposit.inputAmount.sub(lpFee)); - // Slow fill LP fees are accounted for when the slow fill executes and a V3FilledRelay is emitted. i.e. when - // the slow fill execution is included in bundleFillsV3. - }); - }); - }); - - /** - * EXCESSES FROM UNEXECUTABLE SLOW FILLS - */ - - // Subtract destination chain running balances for BundleDataClient.unexecutableSlowFills. - // These are all slow fills that are impossible to execute and therefore the amount to return would be - // the updatedOutputAmount = inputAmount - lpFees. - Object.entries(unexecutableSlowFills).forEach(([_destinationChainId, slowFilledDepositsForChain]) => { - const destinationChainId = Number(_destinationChainId); - Object.entries(slowFilledDepositsForChain).forEach(([outputToken, slowFilledDeposits]) => { - slowFilledDeposits.forEach((deposit) => { - const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( - outputToken, - destinationChainId, - mainnetBundleEndBlock - ); - const lpFee = deposit.lpFeePct.mul(deposit.inputAmount).div(fixedPointAdjustment); - updateRunningBalance(runningBalances, destinationChainId, l1TokenCounterpart, lpFee.sub(deposit.inputAmount)); - // Slow fills don't add to lpFees, only when the slow fill is executed and a V3FilledRelay is emitted, so - // we don't need to subtract it here. Moreover, the HubPoole expects bundleLpFees to be > 0. - }); - }); - }); - - /** - * DEPOSITS - */ - - // Handle v3Deposits. These decrement running balances from the origin chain equal to the inputAmount. - // There should not be early deposits in v3. - Object.entries(bundleV3Deposits).forEach(([, depositsForChain]) => { - Object.entries(depositsForChain).forEach(([, deposits]) => { - deposits.forEach((deposit) => { - updateRunningBalanceForDeposit(runningBalances, clients.hubPoolClient, deposit, deposit.inputAmount.mul(-1)); - }); - }); - }); - - /** - * REFUNDS FOR EXPIRED DEPOSITS - */ - - // Add origin chain running balance for expired v3 deposits. These should refund the inputAmount. - Object.entries(expiredDepositsToRefundV3).forEach(([_originChainId, depositsForChain]) => { - const originChainId = Number(_originChainId); - Object.entries(depositsForChain).forEach(([inputToken, deposits]) => { - deposits.forEach((deposit) => { - const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( - inputToken, - originChainId, - mainnetBundleEndBlock - ); - updateRunningBalance(runningBalances, originChainId, l1TokenCounterpart, deposit.inputAmount); - }); - }); - }); - - // Add to the running balance value from the last valid root bundle proposal for {chainId, l1Token} - // combination if found. - addLastRunningBalance(latestMainnetBlock, runningBalances, clients.hubPoolClient); - - const leaves: PoolRebalanceLeaf[] = constructPoolRebalanceLeaves( - mainnetBundleEndBlock, - runningBalances, - realizedLpFees, - clients.configStoreClient, - maxL1TokenCountOverride - ); - - return { - runningBalances, - realizedLpFees, - leaves, - tree: buildPoolRebalanceLeafTree(leaves), - }; -} - /** * @notice Returns WETH and ETH token addresses for chain if defined, or throws an error if they're not * in the hardcoded dictionary. diff --git a/src/dataworker/PoolRebalanceUtils.ts b/src/dataworker/PoolRebalanceUtils.ts index 703baf084..4c37916ae 100644 --- a/src/dataworker/PoolRebalanceUtils.ts +++ b/src/dataworker/PoolRebalanceUtils.ts @@ -1,20 +1,11 @@ import { utils as sdkUtils } from "@across-protocol/sdk"; -import { ConfigStoreClient, HubPoolClient, SpokePoolClient } from "../clients"; -import { Clients } from "../common"; -import * as interfaces from "../interfaces"; -import { - PendingRootBundle, - PoolRebalanceLeaf, - RelayerRefundLeaf, - SpokePoolTargetBalance, - V3SlowFillLeaf, -} from "../interfaces"; +import { HubPoolClient } from "../clients"; +import { PendingRootBundle, PoolRebalanceLeaf, RelayerRefundLeaf, SlowFillLeaf } from "../interfaces"; import { bnZero, BigNumber, fixedPointAdjustment as fixedPoint, MerkleTree, - compareAddresses, convertFromWei, formatFeePct, shortenHexString, @@ -24,60 +15,10 @@ import { winston, assert, getNetworkName, + isChainDisabled, } from "../utils"; import { DataworkerClients } from "./DataworkerClientHelper"; -export function updateRunningBalance( - runningBalances: interfaces.RunningBalances, - l2ChainId: number, - l1Token: string, - updateAmount: BigNumber -): void { - // Initialize dictionary if empty. - if (!runningBalances[l2ChainId]) { - runningBalances[l2ChainId] = {}; - } - const runningBalance = runningBalances[l2ChainId][l1Token]; - if (runningBalance) { - runningBalances[l2ChainId][l1Token] = runningBalance.add(updateAmount); - } else { - runningBalances[l2ChainId][l1Token] = updateAmount; - } -} - -export function addLastRunningBalance( - latestMainnetBlock: number, - runningBalances: interfaces.RunningBalances, - hubPoolClient: HubPoolClient -): void { - Object.keys(runningBalances).forEach((repaymentChainId) => { - Object.keys(runningBalances[repaymentChainId]).forEach((l1TokenAddress) => { - const { runningBalance } = hubPoolClient.getRunningBalanceBeforeBlockForChain( - latestMainnetBlock, - Number(repaymentChainId), - l1TokenAddress - ); - if (!runningBalance.eq(bnZero)) { - updateRunningBalance(runningBalances, Number(repaymentChainId), l1TokenAddress, runningBalance); - } - }); - }); -} - -export function updateRunningBalanceForDeposit( - runningBalances: interfaces.RunningBalances, - hubPoolClient: HubPoolClient, - deposit: interfaces.V3DepositWithBlock, - updateAmount: BigNumber -): void { - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - deposit.inputToken, - deposit.originChainId, - deposit.quoteBlockNumber - ); - updateRunningBalance(runningBalances, deposit.originChainId, l1TokenCounterpart, updateAmount); -} - // TODO: Is summing up absolute values really the best way to compute a root bundle's "volume"? Said another way, // how do we measure a root bundle's "impact" or importance? export async function computePoolRebalanceUsdVolume( @@ -115,174 +56,6 @@ export async function computePoolRebalanceUsdVolume( }, bnZero); } -export function constructPoolRebalanceLeaves( - latestMainnetBlock: number, - runningBalances: interfaces.RunningBalances, - realizedLpFees: interfaces.RunningBalances, - configStoreClient: ConfigStoreClient, - maxL1TokenCount?: number -): interfaces.PoolRebalanceLeaf[] { - // Create one leaf per L2 chain ID. First we'll create a leaf with all L1 tokens for each chain ID, and then - // we'll split up any leaves with too many L1 tokens. - const leaves: interfaces.PoolRebalanceLeaf[] = []; - Object.keys(runningBalances) - .map((chainId) => Number(chainId)) - // Leaves should be sorted by ascending chain ID - .sort((chainIdA, chainIdB) => chainIdA - chainIdB) - .map((chainId) => { - // Sort addresses. - const sortedL1Tokens = Object.keys(runningBalances[chainId]).sort((addressA, addressB) => { - return compareAddresses(addressA, addressB); - }); - - // This begins at 0 and increments for each leaf for this { chainId, L1Token } combination. - let groupIndexForChainId = 0; - - // Split addresses into multiple leaves if there are more L1 tokens than allowed per leaf. - const maxL1TokensPerLeaf = - maxL1TokenCount || configStoreClient.getMaxRefundCountForRelayerRefundLeafForBlock(latestMainnetBlock); - for (let i = 0; i < sortedL1Tokens.length; i += maxL1TokensPerLeaf) { - const l1TokensToIncludeInThisLeaf = sortedL1Tokens.slice(i, i + maxL1TokensPerLeaf); - - const spokeTargetBalances = l1TokensToIncludeInThisLeaf.map((l1Token) => - configStoreClient.getSpokeTargetBalancesForBlock(l1Token, chainId, latestMainnetBlock) - ); - - // Build leaves using running balances and realized lp fees data for l1Token + chain, or default to - // zero if undefined. - const leafBundleLpFees = l1TokensToIncludeInThisLeaf.map( - (l1Token) => realizedLpFees[chainId]?.[l1Token] ?? bnZero - ); - const leafNetSendAmounts = l1TokensToIncludeInThisLeaf.map((l1Token, index) => - runningBalances[chainId] && runningBalances[chainId][l1Token] - ? getNetSendAmountForL1Token(spokeTargetBalances[index], runningBalances[chainId][l1Token]) - : bnZero - ); - const leafRunningBalances = l1TokensToIncludeInThisLeaf.map((l1Token, index) => - runningBalances[chainId]?.[l1Token] - ? getRunningBalanceForL1Token(spokeTargetBalances[index], runningBalances[chainId][l1Token]) - : bnZero - ); - - leaves.push({ - chainId: chainId, - bundleLpFees: leafBundleLpFees, - netSendAmounts: leafNetSendAmounts, - runningBalances: leafRunningBalances, - groupIndex: groupIndexForChainId++, - leafId: leaves.length, - l1Tokens: l1TokensToIncludeInThisLeaf, - }); - } - }); - return leaves; -} - -// Note: this function computes the intended transfer amount before considering the transfer threshold. -// A positive number indicates a transfer from hub to spoke. -export function computeDesiredTransferAmountToSpoke( - runningBalance: BigNumber, - spokePoolTargetBalance: SpokePoolTargetBalance -): BigNumber { - // Transfer is always desired if hub owes spoke. - if (runningBalance.gte(0)) { - return runningBalance; - } - - // Running balance is negative, but its absolute value is less than the spoke pool target balance threshold. - // In this case, we transfer nothing. - if (runningBalance.abs().lt(spokePoolTargetBalance.threshold)) { - return bnZero; - } - - // We are left with the case where the spoke pool is beyond the threshold. - // A transfer needs to be initiated to bring it down to the target. - const transferSize = runningBalance.abs().sub(spokePoolTargetBalance.target); - - // If the transferSize is < 0, this indicates that the target is still above the running balance. - // This can only happen if the threshold is less than the target. This is likely due to a misconfiguration. - // In this case, we transfer nothing until the target is exceeded. - if (transferSize.lt(0)) { - return bnZero; - } - - // Negate the transfer size because a transfer from spoke to hub is indicated by a negative number. - return transferSize.mul(-1); -} - -// If the running balance is greater than the token transfer threshold, then set the net send amount -// equal to the running balance and reset the running balance to 0. Otherwise, the net send amount should be -// 0, indicating that we do not want the data worker to trigger a token transfer between hub pool and spoke -// pool when executing this leaf. -export function getNetSendAmountForL1Token( - spokePoolTargetBalance: SpokePoolTargetBalance, - runningBalance: BigNumber -): BigNumber { - return computeDesiredTransferAmountToSpoke(runningBalance, spokePoolTargetBalance); -} - -export function getRunningBalanceForL1Token( - spokePoolTargetBalance: SpokePoolTargetBalance, - runningBalance: BigNumber -): BigNumber { - const desiredTransferAmount = computeDesiredTransferAmountToSpoke(runningBalance, spokePoolTargetBalance); - return runningBalance.sub(desiredTransferAmount); -} - -// This returns a possible next block range that could be submitted as a new root bundle, or used as a reference -// when evaluating pending root bundle. The block end numbers must be less than the latest blocks for each chain ID -// (because we can't evaluate events in the future), and greater than the expected start blocks, which are the -// greater of 0 and the latest bundle end block for an executed root bundle proposal + 1. -export function getWidestPossibleExpectedBlockRange( - chainIdListForBundleEvaluationBlockNumbers: number[], - spokeClients: { [chainId: number]: SpokePoolClient }, - endBlockBuffers: number[], - clients: Clients, - latestMainnetBlock: number, - enabledChains: number[] -): number[][] { - // We impose a buffer on the head of the chain to increase the probability that the received blocks are final. - // Reducing the latest block that we query also gives partially filled deposits slightly more buffer for relayers - // to fully fill the deposit and reduces the chance that the data worker includes a slow fill payment that gets - // filled during the challenge period. - const latestPossibleBundleEndBlockNumbers = chainIdListForBundleEvaluationBlockNumbers.map( - (chainId: number, index) => - spokeClients[chainId] && Math.max(spokeClients[chainId].latestBlockSearched - endBlockBuffers[index], 0) - ); - return chainIdListForBundleEvaluationBlockNumbers.map((chainId: number, index) => { - const lastEndBlockForChain = clients.hubPoolClient.getLatestBundleEndBlockForChain( - chainIdListForBundleEvaluationBlockNumbers, - latestMainnetBlock, - chainId - ); - - // If chain is disabled, re-use the latest bundle end block for the chain as both the start - // and end block. - if (!enabledChains.includes(chainId)) { - return [lastEndBlockForChain, lastEndBlockForChain]; - } else { - // If the latest block hasn't advanced enough from the previous proposed end block, then re-use it. It will - // be regarded as disabled by the Dataworker clients. Otherwise, add 1 to the previous proposed end block. - if (lastEndBlockForChain >= latestPossibleBundleEndBlockNumbers[index]) { - // @dev: Without this check, then `getNextBundleStartBlockNumber` could return `latestBlock+1` even when the - // latest block for the chain hasn't advanced, resulting in an invalid range being produced. - return [lastEndBlockForChain, lastEndBlockForChain]; - } else { - // Chain has advanced far enough including the buffer, return range from previous proposed end block + 1 to - // latest block for chain minus buffer. - return [ - clients.hubPoolClient.getNextBundleStartBlockNumber( - chainIdListForBundleEvaluationBlockNumbers, - latestMainnetBlock, - chainId - ), - latestPossibleBundleEndBlockNumbers[index], - ]; - } - } - }); -} - export function generateMarkdownForDisputeInvalidBundleBlocks( chainIdListForBundleEvaluationBlockNumbers: number[], pendingRootBundle: PendingRootBundle, @@ -315,10 +88,6 @@ export function generateMarkdownForDispute(pendingRootBundle: PendingRootBundle) ); } -export function isChainDisabled(blockRangeForChain: number[]): boolean { - return blockRangeForChain[0] === blockRangeForChain[1]; -} - export function generateMarkdownForRootBundle( hubPoolClient: HubPoolClient, chainIdListForBundleEvaluationBlockNumbers: number[], @@ -330,7 +99,7 @@ export function generateMarkdownForRootBundle( // eslint-disable-next-line @typescript-eslint/no-explicit-any relayerRefundLeaves: any[], relayerRefundRoot: string, - slowRelayLeaves: V3SlowFillLeaf[], + slowRelayLeaves: SlowFillLeaf[], slowRelayRoot: string ): string { // Create helpful logs to send to slack transport @@ -442,8 +211,8 @@ export function generateMarkdownForRootBundle( export function prettyPrintLeaves( logger: winston.Logger, - tree: MerkleTree | MerkleTree | MerkleTree, - leaves: PoolRebalanceLeaf[] | RelayerRefundLeaf[] | V3SlowFillLeaf[], + tree: MerkleTree | MerkleTree | MerkleTree, + leaves: PoolRebalanceLeaf[] | RelayerRefundLeaf[] | SlowFillLeaf[], logType = "Pool rebalance" ): void { leaves.forEach((leaf, index) => { diff --git a/src/dataworker/RelayerRefundUtils.ts b/src/dataworker/RelayerRefundUtils.ts index 120fb72c1..2a9eaaf1c 100644 --- a/src/dataworker/RelayerRefundUtils.ts +++ b/src/dataworker/RelayerRefundUtils.ts @@ -1,6 +1,5 @@ import { Refund, RelayerRefundLeaf, RelayerRefundLeafWithGroup, SpokePoolTargetBalance } from "../interfaces"; -import { BigNumber, bnZero, compareAddresses } from "../utils"; -import { getNetSendAmountForL1Token } from "./PoolRebalanceUtils"; +import { BigNumber, bnZero, compareAddresses, getNetSendAmountForL1Token } from "../utils"; export function getAmountToReturnForRelayerRefundLeaf( spokePoolTargetBalance: SpokePoolTargetBalance, diff --git a/src/finalizer/utils/linea/common.ts b/src/finalizer/utils/linea/common.ts index 75ada8abf..5de1ffe90 100644 --- a/src/finalizer/utils/linea/common.ts +++ b/src/finalizer/utils/linea/common.ts @@ -28,7 +28,7 @@ export type MessageWithStatus = Message & { txHash: string; }; -export const lineaAdapterIface = Linea_Adapter__factory.createInterface(); +export const lineaAdapterIface = Linea_Adapter__factory.createInterface() as ethers.utils.Interface; export function initLineaSdk(l1ChainId: number, l2ChainId: number): LineaSDK { return new LineaSDK({ diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index 96d133566..5d3bfc8e4 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -59,6 +59,7 @@ export type FillWithBlock = interfaces.FillWithBlock; export type SpeedUp = interfaces.SpeedUp; export type SlowFillRequest = interfaces.SlowFillRequest; export type SlowFillRequestWithBlock = interfaces.SlowFillRequestWithBlock; +export type SlowFillLeaf = interfaces.SlowFillLeaf; export type RootBundleRelay = interfaces.RootBundleRelay; export type RootBundleRelayWithBlock = interfaces.RootBundleRelayWithBlock; export type RelayerRefundExecution = interfaces.RelayerRefundExecution; @@ -69,12 +70,3 @@ export type TokensBridged = interfaces.TokensBridged; export const { FillType, FillStatus } = interfaces; export type CachingMechanismInterface = interfaces.CachingMechanismInterface; - -// V3 shims (to be removed later) -export type V3RelayData = interfaces.RelayData; -export type V3Deposit = interfaces.Deposit; -export type V3DepositWithBlock = interfaces.DepositWithBlock; -export type V3SpeedUp = interfaces.SpeedUp; -export type V3Fill = interfaces.Fill; -export type V3FillWithBlock = interfaces.FillWithBlock; -export type V3SlowFillLeaf = interfaces.SlowFillLeaf; diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index b1a430a2a..31826ce55 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -1,7 +1,8 @@ import assert from "assert"; import { utils as sdkUtils } from "@across-protocol/sdk"; import { utils as ethersUtils } from "ethers"; -import { FillStatus, L1Token, V3Deposit, V3DepositWithBlock } from "../interfaces"; +import { FillStatus, L1Token, Deposit, DepositWithBlock } from "../interfaces"; +import { updateSpokePoolClients } from "../common"; import { averageBlockTime, BigNumber, @@ -42,9 +43,12 @@ export class Relayer { public readonly fillStatus: { [depositHash: string]: number } = {}; private pendingTxnReceipts: { [chainId: number]: Promise } = {}; private lastLogTime = 0; + private lastMaintenance = 0; private hubPoolBlockBuffer: number; protected fillLimits: { [originChainId: number]: { fromBlock: number; limit: BigNumber }[] }; + protected inventoryChainIds: number[]; + protected updated = 0; constructor( relayerAddress: string, @@ -66,6 +70,108 @@ export class Relayer { }); this.relayerAddress = getAddress(relayerAddress); + this.inventoryChainIds = + this.config.pollingDelay === 0 ? Object.values(clients.spokePoolClients).map(({ chainId }) => chainId) : []; + } + + /** + * @description Perform one-time relayer init. Handle (for example) token approvals. + */ + async init(): Promise { + const { inventoryClient, tokenClient } = this.clients; + await tokenClient.update(); + + if (this.config.sendingRelaysEnabled) { + await tokenClient.setOriginTokenApprovals(); + } + + if (this.config.sendingRebalancesEnabled) { + await inventoryClient.setL1TokenApprovals(); + } + + this.logger.debug({ + at: "Relayer::init", + message: "Completed one-time init.", + }); + } + + /** + * @description Perform per-loop updates. + * @return True if all SpokePoolClients updated successfully, otherwise false. + */ + async update(): Promise { + const { + acrossApiClient, + configStoreClient, + hubPoolClient, + inventoryClient, + profitClient, + spokePoolClients, + tokenClient, + } = this.clients; + + // Some steps can be skipped on the first run. + if (this.updated++ > 0) { + // Clear state from profit and token clients. These should start fresh on each iteration. + profitClient.clearUnprofitableFills(); + tokenClient.clearTokenShortfall(); + tokenClient.clearTokenData(); + + await configStoreClient.update(); + await hubPoolClient.update(); + } + + await updateSpokePoolClients(spokePoolClients, [ + "V3FundsDeposited", + "RequestedSpeedUpV3Deposit", + "FilledV3Relay", + "RelayedRootBundle", + "ExecutedRelayerRefundRoot", + ]); + + await Promise.all([ + acrossApiClient.update(this.config.ignoreLimits), + inventoryClient.update(this.inventoryChainIds), + tokenClient.update(), + ]); + + return Object.values(spokePoolClients).every((spokePoolClient) => spokePoolClient.isUpdated); + } + + /** + * @description Perform inventory management as needed. This is capped to 1/minute in looping mode. + */ + async runMaintenance(): Promise { + const { inventoryClient, tokenClient } = this.clients; + + const currentTime = getCurrentTime(); + if (currentTime < this.lastMaintenance + this.config.maintenanceInterval) { + return; // Nothing to do. + } + + tokenClient.clearTokenData(); + await tokenClient.update(); + await inventoryClient.wrapL2EthIfAboveThreshold(); + + if (this.config.sendingRebalancesEnabled) { + // It's necessary to update token balances in case WETH was wrapped. + tokenClient.clearTokenData(); + await tokenClient.update(); + await inventoryClient.rebalanceInventoryIfNeeded(); + } + + // Unwrap WETH after filling deposits, but before rebalancing. + await inventoryClient.unwrapWeth(); + + // Placeholder: flush any stale state (i.e. deposit/fill events that are outside of the configured lookback window?) + + // May be less than maintenanceInterval if these blocking calls are slow. + this.lastMaintenance = currentTime; + + this.logger.debug({ + at: "Relayer::runMaintenance", + message: "Completed relayer maintenance.", + }); } /** @@ -108,7 +214,7 @@ export class Relayer { const fillAmountUsd = profitClient.getFillAmountInUsd(deposit); if (!isDefined(fillAmountUsd)) { this.logger.debug({ - at: "Relayer::evaluateFill", + at: "Relayer::filterDeposit", message: `Skipping ${srcChain} deposit due to uncertain fill amount.`, destinationChainId, outputToken: deposit.outputToken, @@ -430,10 +536,7 @@ export class Relayer { } // @node: This method is flagged for removal after computeFillLimits() has been proven. - computeRequiredDepositConfirmations( - deposits: V3Deposit[], - destinationChainId: number - ): { [chainId: number]: number } { + computeRequiredDepositConfirmations(deposits: Deposit[], destinationChainId: number): { [chainId: number]: number } { const { profitClient, tokenClient } = this.clients; const { minDepositConfirmations } = this.config; @@ -477,18 +580,29 @@ export class Relayer { // If all hold true then complete the fill. If there is insufficient balance to complete the fill and slow fills are // enabled then request a slow fill instead. async evaluateFill( - deposit: V3DepositWithBlock, + deposit: DepositWithBlock, fillStatus: number, lpFees: RepaymentFee[], maxBlockNumber: number, sendSlowRelays: boolean ): Promise { - const { depositId, depositor, recipient, destinationChainId, originChainId, inputToken } = deposit; + const { depositId, depositor, recipient, destinationChainId, originChainId, inputToken, transactionHash } = deposit; const { hubPoolClient, profitClient, spokePoolClients, tokenClient } = this.clients; const { slowDepositors } = this.config; + const [originChain, destChain] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; + + if (isDefined(this.pendingTxnReceipts[destinationChainId])) { + this.logger.info({ + at: "Relayer::evaluateFill", + message: `${destChain} transaction queue has pending fills; skipping ${originChain} deposit ${depositId}...`, + originChainId, + depositId, + transactionHash, + }); + return; + } // If the deposit does not meet the minimum number of block confirmations, skip it. - const originChain = getNetworkName(originChainId); if (deposit.blockNumber > maxBlockNumber) { this.logger.debug({ at: "Relayer::evaluateFill", @@ -496,7 +610,7 @@ export class Relayer { depositId, blockNumber: deposit.blockNumber, maxBlockNumber, - transactionHash: deposit.transactionHash, + transactionHash, }); // If we're in simulation mode, skip this early exit so that the user can evaluate // the full simulation run. @@ -526,13 +640,12 @@ export class Relayer { const depositAge = Math.floor(avgBlockTime * (originSpoke.latestBlockSearched - deposit.blockNumber)); if (minFillTime > depositAge) { - const dstChain = getNetworkName(destinationChainId); this.logger.debug({ at: "Relayer::evaluateFill", - message: `Skipping ${originChain} deposit due to insufficient fill time for ${dstChain}.`, + message: `Skipping ${originChain} deposit due to insufficient fill time for ${destChain}.`, depositAge, minFillTime, - transactionHash: deposit.transactionHash, + transactionHash, }); return; } @@ -563,7 +676,7 @@ export class Relayer { blockNumber, fillAmountUsd, limits, - transactionHash: deposit.transactionHash, + transactionHash, }); return; } @@ -609,7 +722,7 @@ export class Relayer { * @param relayData An object consisting of an originChainId, inputToken, inputAmount and quoteTimestamp. * @returns A string identifying the deposit in a BatchLPFees object. */ - getLPFeeKey(relayData: Pick): string { + getLPFeeKey(relayData: Pick): string { return `${relayData.originChainId}-${relayData.inputToken}-${relayData.inputAmount}-${relayData.quoteTimestamp}`; } @@ -621,7 +734,7 @@ export class Relayer { * @returns void */ async evaluateFills( - deposits: (V3DepositWithBlock & { fillStatus: number })[], + deposits: (DepositWithBlock & { fillStatus: number })[], lpFees: BatchLPFees, maxBlockNumbers: { [chainId: number]: number }, sendSlowRelays: boolean @@ -645,7 +758,7 @@ export class Relayer { * @param deposits An array of deposits. * @returns A BatchLPFees object uniquely identifying LP fees per unique input deposit. */ - async batchComputeLpFees(deposits: V3DepositWithBlock[]): Promise { + async batchComputeLpFees(deposits: DepositWithBlock[]): Promise { const { hubPoolClient, inventoryClient } = this.clients; // We need to compute LP fees for any possible repayment chain the inventory client could select @@ -784,12 +897,12 @@ export class Relayer { * @param deposit Deposit object. * @param status Fill status (Unfilled, Filled, RequestedSlowFill). */ - protected setFillStatus(deposit: V3Deposit, status: number): void { + protected setFillStatus(deposit: Deposit, status: number): void { const depositHash = this.clients.spokePoolClients[deposit.destinationChainId].getDepositHash(deposit); this.fillStatus[depositHash] = status; } - requestSlowFill(deposit: V3Deposit): void { + requestSlowFill(deposit: Deposit): void { // don't request slow fill if origin/destination chain is a lite chain if (deposit.fromLiteChain || deposit.toLiteChain) { this.logger.debug({ @@ -846,7 +959,7 @@ export class Relayer { this.setFillStatus(deposit, FillStatus.RequestedSlowFill); } - fillRelay(deposit: V3Deposit, repaymentChainId: number, realizedLpFeePct: BigNumber, gasLimit?: BigNumber): void { + fillRelay(deposit: Deposit, repaymentChainId: number, realizedLpFeePct: BigNumber, gasLimit?: BigNumber): void { const { spokePoolClients } = this.clients; this.logger.debug({ at: "Relayer::fillRelay", @@ -898,7 +1011,7 @@ export class Relayer { * or the profitability data of the most preferred repayment chain otherwise. */ protected async resolveRepaymentChain( - deposit: V3DepositWithBlock, + deposit: DepositWithBlock, hubPoolToken: L1Token, repaymentFees: RepaymentFee[] ): Promise<{ @@ -1207,11 +1320,7 @@ export class Relayer { } } - private constructRelayFilledMrkdwn( - deposit: V3Deposit, - repaymentChainId: number, - realizedLpFeePct: BigNumber - ): string { + private constructRelayFilledMrkdwn(deposit: Deposit, repaymentChainId: number, realizedLpFeePct: BigNumber): string { let mrkdwn = this.constructBaseFillMarkdown(deposit, realizedLpFeePct) + ` Relayer repayment: ${getNetworkName(repaymentChainId)}.`; @@ -1228,7 +1337,7 @@ export class Relayer { return mrkdwn; } - private constructBaseFillMarkdown(deposit: V3Deposit, _realizedLpFeePct: BigNumber): string { + private constructBaseFillMarkdown(deposit: Deposit, _realizedLpFeePct: BigNumber): string { const { symbol, decimals } = this.clients.hubPoolClient.getTokenInfoForDeposit(deposit); const srcChain = getNetworkName(deposit.originChainId); const dstChain = getNetworkName(deposit.destinationChainId); diff --git a/src/relayer/RelayerClientHelper.ts b/src/relayer/RelayerClientHelper.ts index 611f66577..b45cbdb34 100644 --- a/src/relayer/RelayerClientHelper.ts +++ b/src/relayer/RelayerClientHelper.ts @@ -17,7 +17,6 @@ import { constructSpokePoolClientsWithLookback, resolveSpokePoolActivationBlock, updateClients, - updateSpokePoolClients, } from "../common"; import { SpokePoolClientsByChain } from "../interfaces"; import { getBlockForTimestamp, getCurrentTime, getProvider, getRedisCache, Signer, SpokePool } from "../utils"; @@ -191,37 +190,3 @@ export async function constructRelayerClients( tryMulticallClient, }; } - -export async function updateRelayerClients(clients: RelayerClients, config: RelayerConfig): Promise { - // SpokePoolClient client requires up to date HubPoolClient and ConfigStore client. - const { spokePoolClients } = clients; - - // TODO: the code below can be refined by grouping with promise.all. however you need to consider the inter - // dependencies of the clients. some clients need to be updated before others. when doing this refactor consider - // having a "first run" update and then a "normal" update that considers this. see previous implementation here - // https://github.com/across-protocol/relayer/pull/37/files#r883371256 as a reference. - await updateSpokePoolClients(spokePoolClients, [ - "V3FundsDeposited", - "RequestedSpeedUpV3Deposit", - "FilledV3Relay", - "RelayedRootBundle", - "ExecutedRelayerRefundRoot", - ]); - - // Update the token client first so that inventory client has latest balances. - await clients.tokenClient.update(); - - // We can update the inventory client in parallel with checking for eth wrapping as these do not depend on each other. - // Cross-chain deposit tracking produces duplicates in looping mode, so in that case don't attempt it. This does not - // disable inventory management, but does make it ignorant of in-flight cross-chain transfers. The rebalancer is - // assumed to run separately from the relayer and with pollingDelay 0, so it doesn't loop and will track transfers - // correctly to avoid repeat rebalances. - const inventoryChainIds = - config.pollingDelay === 0 ? Object.values(spokePoolClients).map(({ chainId }) => chainId) : []; - await Promise.all([ - clients.acrossApiClient.update(config.ignoreLimits), - clients.inventoryClient.update(inventoryChainIds), - clients.inventoryClient.wrapL2EthIfAboveThreshold(), - config.sendingRelaysEnabled ? clients.tokenClient.setOriginTokenApprovals() : Promise.resolve(), - ]); -} diff --git a/src/relayer/RelayerConfig.ts b/src/relayer/RelayerConfig.ts index ec9437a0b..0d4dba09f 100644 --- a/src/relayer/RelayerConfig.ts +++ b/src/relayer/RelayerConfig.ts @@ -53,6 +53,10 @@ export class RelayerConfig extends CommonConfig { // The amount of runs the looping relayer will make before it logs shortfalls and unprofitable fills again. If set to the one-shot // relayer, then this environment variable will do nothing. readonly loggingInterval: number; + + // Maintenance interval (in seconds). + readonly maintenanceInterval: number; + // Set to false to skip querying max deposit limit from /limits Vercel API endpoint. Otherwise relayer will not // fill any deposit over the limit which is based on liquidReserves in the HubPool. readonly ignoreLimits: boolean; @@ -87,7 +91,8 @@ export class RelayerConfig extends CommonConfig { RELAYER_SPOKEPOOL_INDEXER_PATH, RELAYER_TRY_MULTICALL_CHAINS, RELAYER_USE_GENERIC_ADAPTER, - RELAYER_LOGGING_INTERVAL, + RELAYER_LOGGING_INTERVAL = "30", + RELAYER_MAINTENANCE_INTERVAL = "60", } = env; super(env); @@ -112,7 +117,8 @@ export class RelayerConfig extends CommonConfig { this.minRelayerFeePct = toBNWei(MIN_RELAYER_FEE_PCT || Constants.RELAYER_MIN_FEE_PCT); this.tryMulticallChains = JSON.parse(RELAYER_TRY_MULTICALL_CHAINS ?? "[]"); - this.loggingInterval = Number(RELAYER_LOGGING_INTERVAL ?? 30); + this.loggingInterval = Number(RELAYER_LOGGING_INTERVAL); + this.maintenanceInterval = Number(RELAYER_MAINTENANCE_INTERVAL); assert( !isDefined(RELAYER_EXTERNAL_INVENTORY_CONFIG) || !isDefined(RELAYER_INVENTORY_CONFIG), diff --git a/src/relayer/index.ts b/src/relayer/index.ts index f197240c9..81306e210 100644 --- a/src/relayer/index.ts +++ b/src/relayer/index.ts @@ -1,12 +1,23 @@ import { utils as sdkUtils } from "@across-protocol/sdk"; -import { config, delay, disconnectRedisClients, getCurrentTime, getNetworkName, Signer, winston } from "../utils"; +import { + config, + delay, + disconnectRedisClients, + getCurrentTime, + getNetworkName, + getRedisCache, + Signer, + winston, +} from "../utils"; import { Relayer } from "./Relayer"; import { RelayerConfig } from "./RelayerConfig"; -import { constructRelayerClients, updateRelayerClients } from "./RelayerClientHelper"; +import { constructRelayerClients } from "./RelayerClientHelper"; import { runAPIServer } from "../api"; config(); let logger: winston.Logger; +const ACTIVE_RELAYER_EXPIRY = 600; // 10 minutes. +const { RUN_IDENTIFIER: runIdentifier, BOT_IDENTIFIER: botIdentifier = "across-relayer" } = process.env; const randomNumber = () => Math.floor(Math.random() * 1_000_000); export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): Promise { @@ -15,9 +26,10 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P logger = _logger; const config = new RelayerConfig(process.env); + const { externalIndexer, pollingDelay, sendingRelaysEnabled, sendingSlowRelaysEnabled } = config; - const loop = config.pollingDelay > 0; - let stop = !loop; + const loop = pollingDelay > 0; + let stop = false; process.on("SIGHUP", () => { logger.debug({ at: "Relayer#run", @@ -26,62 +38,73 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P stop = true; }); + const redis = await getRedisCache(logger); + let activeRelayerUpdated = false; + // Explicitly don't log ignoredAddresses because it can be huge and can overwhelm log transports. const { ignoredAddresses: _ignoredConfig, ...loggedConfig } = config; logger.debug({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", loggedConfig, relayerRun }); const relayerClients = await constructRelayerClients(logger, config, baseSigner); const relayer = new Relayer(await baseSigner.getAddress(), logger, relayerClients, config); - const simulate = !config.sendingRelaysEnabled; - const enableSlowFills = config.sendingSlowRelaysEnabled; + await relayer.init(); + + const { spokePoolClients } = relayerClients; + const simulate = !sendingRelaysEnabled; + let txnReceipts: { [chainId: number]: Promise } = {}; logger.info({ at: "Relayer#run", message: "Starting relayer API server." }); await runAPIServer(logger); - let run = 1; - let txnReceipts: { [chainId: number]: Promise }; try { - do { + for (let run = 1; !stop; ++run) { if (loop) { logger.debug({ at: "relayer#run", message: `Starting relayer execution loop ${run}.` }); } const tLoopStart = performance.now(); - if (run !== 1) { - await relayerClients.configStoreClient.update(); - await relayerClients.hubPoolClient.update(); + const ready = await relayer.update(); + const activeRelayer = redis ? await redis.get(botIdentifier) : undefined; + + // If there is another active relayer, allow up to 10 update cycles for this instance to be ready, + // then proceed unconditionally to protect against any RPC outages blocking the relayer. + if (!ready && activeRelayer && run < 10) { + const runTime = Math.round((performance.now() - tLoopStart) / 1000); + const delta = pollingDelay - runTime; + logger.debug({ at: "Relayer#run", message: `Not ready to relay, waiting ${delta} seconds.` }); + await delay(delta); + continue; } - await updateRelayerClients(relayerClients, config); - - // Since the above spoke pool updates are slow, refresh token client before sending rebalances now: - relayerClients.tokenClient.clearTokenData(); - await relayerClients.tokenClient.update(); - txnReceipts = await relayer.checkForUnfilledDepositsAndFill(enableSlowFills, simulate); - - // Unwrap WETH after filling deposits so we don't mess up slow fill logic, but before rebalancing - // any tokens so rebalancing can take into account unwrapped WETH balances. - await relayerClients.inventoryClient.unwrapWeth(); - - if (config.sendingRebalancesEnabled) { - // Since the above spoke pool updates are slow, refresh token client before sending rebalances now: - relayerClients.tokenClient.clearTokenData(); - await relayerClients.tokenClient.update(); - await relayerClients.inventoryClient.setL1TokenApprovals(); - await relayerClients.inventoryClient.rebalanceInventoryIfNeeded(); + + // Signal to any existing relayer that a handover is underway, or alternatively + // check for handover initiated by another (newer) relayer instance. + if (loop && runIdentifier && redis) { + if (activeRelayer !== runIdentifier) { + if (!activeRelayerUpdated) { + await redis.set(botIdentifier, runIdentifier, ACTIVE_RELAYER_EXPIRY); + activeRelayerUpdated = true; + } else { + logger.debug({ at: "Relayer#run", message: `Handing over to ${botIdentifier} instance ${activeRelayer}.` }); + stop = true; + } + } } - // Clear state from profit and token clients. These are updated on every iteration and should start fresh. - relayerClients.profitClient.clearUnprofitableFills(); - relayerClients.tokenClient.clearTokenShortfall(); + if (!stop) { + txnReceipts = await relayer.checkForUnfilledDepositsAndFill(sendingSlowRelaysEnabled, simulate); + await relayer.runMaintenance(); + } - if (loop) { + if (!loop) { + stop = true; + } else { const runTime = Math.round((performance.now() - tLoopStart) / 1000); logger.debug({ at: "Relayer#run", - message: `Completed relayer execution loop ${run++} in ${runTime} seconds.`, + message: `Completed relayer execution loop ${run} in ${runTime} seconds.`, }); - if (!stop && runTime < config.pollingDelay) { - const delta = config.pollingDelay - runTime; + if (!stop && runTime < pollingDelay) { + const delta = pollingDelay - runTime; logger.debug({ at: "relayer#run", message: `Waiting ${delta} s before next loop.`, @@ -89,7 +112,7 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P await delay(delta); } } - } while (!stop); + } // Before exiting, wait for transaction submission to complete. for (const [chainId, submission] of Object.entries(txnReceipts)) { @@ -105,8 +128,8 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P } finally { await disconnectRedisClients(logger); - if (config.externalIndexer) { - Object.values(relayerClients.spokePoolClients).map((spokePoolClient) => spokePoolClient.stopWorker()); + if (externalIndexer) { + Object.values(spokePoolClients).map((spokePoolClient) => spokePoolClient.stopWorker()); } } diff --git a/src/scripts/validateRootBundle.ts b/src/scripts/validateRootBundle.ts index ce87868fc..b21d3255c 100644 --- a/src/scripts/validateRootBundle.ts +++ b/src/scripts/validateRootBundle.ts @@ -22,15 +22,16 @@ import { getDisputeForTimestamp, disconnectRedisClients, Signer, + getEndBlockBuffers, + getWidestPossibleExpectedBlockRange, } from "../utils"; import { constructSpokePoolClientsForFastDataworker, getSpokePoolClientEventSearchConfigsForFastDataworker, } from "../dataworker/DataworkerClientHelper"; import { PendingRootBundle, ProposedRootBundle } from "../interfaces"; -import { getWidestPossibleExpectedBlockRange } from "../dataworker/PoolRebalanceUtils"; import { createDataworker } from "../dataworker"; -import { getBlockForChain, getEndBlockBuffers } from "../dataworker/DataworkerUtils"; +import { getBlockForChain } from "../dataworker/DataworkerUtils"; config(); let logger: winston.Logger; @@ -47,7 +48,7 @@ export async function validate(_logger: winston.Logger, baseSigner: Signer): Pro // enough data to limit # of excess historical deposit queries. // - SPOKE_ROOTS_LOOKBACK_COUNT unused in this script so set to something < DATAWORKER_FAST_LOOKBACK_COUNT // to avoid configuration error. - process.env.DATAWORKER_FAST_LOOKBACK_COUNT = "8"; + process.env.DATAWORKER_FAST_LOOKBACK_COUNT = "10"; process.env.SPOKE_ROOTS_LOOKBACK_COUNT = "1"; const { clients, config, dataworker } = await createDataworker(logger, baseSigner); logger[startupLogLevel(config)]({ diff --git a/src/scripts/validateRunningBalances.ts b/src/scripts/validateRunningBalances.ts index d09fd131a..9c6c6593a 100644 --- a/src/scripts/validateRunningBalances.ts +++ b/src/scripts/validateRunningBalances.ts @@ -49,13 +49,14 @@ import { disconnectRedisClients, Signer, getSigner, + getEndBlockBuffers, + getWidestPossibleExpectedBlockRange, assert, CHAIN_IDs, } from "../utils"; import { createDataworker } from "../dataworker"; -import { getWidestPossibleExpectedBlockRange } from "../dataworker/PoolRebalanceUtils"; -import { getBlockForChain, getEndBlockBuffers } from "../dataworker/DataworkerUtils"; -import { ProposedRootBundle, SpokePoolClientsByChain, V3SlowFillLeaf } from "../interfaces"; +import { getBlockForChain } from "../dataworker/DataworkerUtils"; +import { ProposedRootBundle, SpokePoolClientsByChain, SlowFillLeaf } from "../interfaces"; import { CONTRACT_ADDRESSES, constructSpokePoolClientsWithStartBlocks, updateSpokePoolClients } from "../common"; import { createConsoleTransport } from "@uma/logger"; @@ -469,7 +470,7 @@ export async function runScript(_logger: winston.Logger, baseSigner: Signer): Pr bundle: ProposedRootBundle, olderBundle: ProposedRootBundle, futureBundle: ProposedRootBundle - ): Promise<{ slowFills: V3SlowFillLeaf[]; bundleSpokePoolClients: SpokePoolClientsByChain }> { + ): Promise<{ slowFills: SlowFillLeaf[]; bundleSpokePoolClients: SpokePoolClientsByChain }> { // Construct custom spoke pool clients to query events needed to build slow roots. const spokeClientFromBlocks = Object.fromEntries( dataworker.chainIdListForBundleEvaluationBlockNumbers.map((chainId, i) => { diff --git a/src/utils/FillUtils.ts b/src/utils/FillUtils.ts index d79028ad3..cce7d59b9 100644 --- a/src/utils/FillUtils.ts +++ b/src/utils/FillUtils.ts @@ -1,53 +1,9 @@ -import { utils as sdkUtils } from "@across-protocol/sdk"; import { HubPoolClient } from "../clients"; -import { Fill, FillStatus, SpokePoolClientsByChain, V3DepositWithBlock } from "../interfaces"; +import { Fill, FillStatus, SpokePoolClientsByChain, DepositWithBlock } from "../interfaces"; import { bnZero } from "../utils"; -import { getBlockRangeForChain } from "../dataworker/DataworkerUtils"; -export function getRefundInformationFromFill( - fill: Fill, - hubPoolClient: HubPoolClient, - blockRangesForChains: number[][], - chainIdListForBundleEvaluationBlockNumbers: number[], - fromLiteChain: boolean -): { - chainToSendRefundTo: number; - repaymentToken: string; -} { - // Handle slow relay where repaymentChainId = 0. Slow relays always pay recipient on destination chain. - // So, save the slow fill under the destination chain, and save the fast fill under its repayment chain. - let chainToSendRefundTo = sdkUtils.isSlowFill(fill) ? fill.destinationChainId : fill.repaymentChainId; - // If the fill is for a deposit originating from the lite chain, the repayment chain is the origin chain - // regardless of whether it is a slow or fast fill (we ignore slow fills but this is for posterity). - if (fromLiteChain) { - chainToSendRefundTo = fill.originChainId; - } - - // Save fill data and associate with repayment chain and L2 token refund should be denominated in. - const endBlockForMainnet = getBlockRangeForChain( - blockRangesForChains, - hubPoolClient.chainId, - chainIdListForBundleEvaluationBlockNumbers - )[1]; - - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - fill.inputToken, - fill.originChainId, - endBlockForMainnet - ); - - const repaymentToken = hubPoolClient.getL2TokenForL1TokenAtBlock( - l1TokenCounterpart, - chainToSendRefundTo, - endBlockForMainnet - ); - return { - chainToSendRefundTo, - repaymentToken, - }; -} export type RelayerUnfilledDeposit = { - deposit: V3DepositWithBlock; + deposit: DepositWithBlock; version: number; invalidFills: Fill[]; }; diff --git a/src/utils/MerkleTreeUtils.ts b/src/utils/MerkleTreeUtils.ts index 3b299d436..6d83ff3c6 100644 --- a/src/utils/MerkleTreeUtils.ts +++ b/src/utils/MerkleTreeUtils.ts @@ -1,9 +1,9 @@ import { MerkleTree, EMPTY_MERKLE_ROOT } from "@across-protocol/contracts"; -import { PoolRebalanceLeaf, RelayerRefundLeaf, RelayerRefundLeafWithGroup, V3SlowFillLeaf } from "../interfaces"; +import { RelayerRefundLeaf, RelayerRefundLeafWithGroup, SlowFillLeaf } from "../interfaces"; import { getParamType, utils } from "."; -export function buildSlowRelayTree(relays: V3SlowFillLeaf[]): MerkleTree { - const hashFn = (input: V3SlowFillLeaf) => { +export function buildSlowRelayTree(relays: SlowFillLeaf[]): MerkleTree { + const hashFn = (input: SlowFillLeaf) => { const verifyFn = "verifyV3SlowRelayFulfillment"; const paramType = getParamType("MerkleLibTest", verifyFn, "slowFill"); return utils.keccak256(utils.defaultAbiCoder.encode([paramType], [input])); @@ -11,28 +11,6 @@ export function buildSlowRelayTree(relays: V3SlowFillLeaf[]): MerkleTree { - for (let i = 0; i < poolRebalanceLeaves.length; i++) { - // The 4 provided parallel arrays must be of equal length. Running Balances can optionally be 2x the length - if ( - poolRebalanceLeaves[i].l1Tokens.length !== poolRebalanceLeaves[i].bundleLpFees.length || - poolRebalanceLeaves[i].netSendAmounts.length !== poolRebalanceLeaves[i].bundleLpFees.length - ) { - throw new Error("Provided lef arrays are not of equal length"); - } - if ( - poolRebalanceLeaves[i].runningBalances.length !== poolRebalanceLeaves[i].bundleLpFees.length * 2 && - poolRebalanceLeaves[i].runningBalances.length !== poolRebalanceLeaves[i].bundleLpFees.length - ) { - throw new Error("Running balances length unexpected"); - } - } - - const paramType = getParamType("MerkleLibTest", "verifyPoolRebalance", "rebalance"); - const hashFn = (input: PoolRebalanceLeaf) => utils.keccak256(utils.defaultAbiCoder.encode([paramType], [input])); - return new MerkleTree(poolRebalanceLeaves, hashFn); -} - export function buildRelayerRefundTree(relayerRefundLeaves: RelayerRefundLeaf[]): MerkleTree { for (let i = 0; i < relayerRefundLeaves.length; i++) { // The 2 provided parallel arrays must be of equal length. diff --git a/src/utils/ProviderUtils.ts b/src/utils/ProviderUtils.ts index be7a41458..108f37e7a 100644 --- a/src/utils/ProviderUtils.ts +++ b/src/utils/ProviderUtils.ts @@ -3,7 +3,7 @@ import { providers as sdkProviders } from "@across-protocol/sdk"; import { ethers } from "ethers"; import winston from "winston"; import { CHAIN_CACHE_FOLLOW_DISTANCE, DEFAULT_NO_TTL_DISTANCE } from "../common"; -import { delay, getOriginFromURL } from "./"; +import { delay, getOriginFromURL, Logger } from "./"; import { getRedisCache } from "./RedisUtils"; import { isDefined } from "./TypeGuards"; @@ -45,7 +45,11 @@ export function getChainQuorum(chainId: number): number { * with a redis client attached so that all RPC requests are cached. Will load the provider from an in memory * "provider cache" if this function was called once before with the same chain ID. */ -export async function getProvider(chainId: number, logger?: winston.Logger, useCache = true): Promise { +export async function getProvider( + chainId: number, + logger: winston.Logger = Logger, + useCache = true +): Promise { const redisClient = await getRedisCache(logger); if (useCache) { const cachedProvider = providerCache[getProviderCacheKey(chainId, redisClient !== undefined)]; @@ -152,6 +156,7 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC allowGzip: true, throttleSlotInterval: 1, // Effectively disables ethers' internal backoff algorithm. throttleCallback: rpcRateLimited({ nodeMaxConcurrency, logger }), + errorPassThrough: true, }, chainId, ] @@ -169,7 +174,8 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC redisClient, disableProviderCache ? undefined : standardTtlBlockDistance, disableNoTtlCaching ? undefined : noTtlBlockDistance, - providerCacheTtl + providerCacheTtl, + logger ); if (useCache) { diff --git a/src/utils/SDKUtils.ts b/src/utils/SDKUtils.ts index 1ff02bf1c..a417c6de7 100644 --- a/src/utils/SDKUtils.ts +++ b/src/utils/SDKUtils.ts @@ -51,3 +51,13 @@ export const { getL1TokenInfo, getUsdcSymbol, } = sdk.utils; + +export const { + getRefundsFromBundle, + isChainDisabled, + getWidestPossibleExpectedBlockRange, + getEndBlockBuffers, + buildPoolRebalanceLeafTree, + getNetSendAmountForL1Token, + _buildPoolRebalanceRoot, +} = sdk.clients.BundleDataClient; diff --git a/src/utils/SuperstructUtils.ts b/src/utils/SuperstructUtils.ts index aa6537434..49415b0aa 100644 --- a/src/utils/SuperstructUtils.ts +++ b/src/utils/SuperstructUtils.ts @@ -1,136 +1,4 @@ -import { - object, - min, - number, - optional, - string, - array, - record, - coerce, - instance, - integer, - pattern, - boolean, -} from "superstruct"; -import { BigNumber } from "ethers"; - -const PositiveIntegerStringSS = pattern(string(), /\d+/); -const Web3AddressSS = pattern(string(), /^0x[a-fA-F0-9]{40}$/); - -const BigNumberType = coerce(instance(BigNumber), string(), (value) => { - try { - // Attempt to convert the string to a BigNumber - return BigNumber.from(value); - } catch (error) { - // In case of any error during conversion, return the original value - // This will lead to a validation error, as the resulting value won't match the expected BigNumber type - return value; - } -}); - -const FillTypeSS = number(); - -const V3RelayDataSS = { - inputToken: string(), - inputAmount: BigNumberType, - outputToken: string(), - outputAmount: BigNumberType, - fillDeadline: number(), - exclusiveRelayer: string(), - exclusivityDeadline: number(), - originChainId: number(), - depositor: string(), - recipient: string(), - depositId: number(), - message: string(), -}; - -const SortableEventSS = { - blockNumber: number(), - transactionIndex: number(), - logIndex: number(), - transactionHash: string(), -}; - -const V3DepositSS = { - fromLiteChain: optional(boolean()), - toLiteChain: optional(boolean()), - destinationChainId: number(), - quoteTimestamp: number(), - relayerFeePct: optional(BigNumberType), - speedUpSignature: optional(string()), - updatedRecipient: optional(string()), - updatedOutputAmount: optional(BigNumberType), - updatedMessage: optional(string()), -}; - -const _V3DepositWithBlockSS = { - quoteBlockNumber: number(), - ...V3DepositSS, - ...SortableEventSS, - ...V3RelayDataSS, -}; - -const V3DepositWithBlockSS = object(_V3DepositWithBlockSS); -const V3DepositWithBlockLpFeeSS = object({ - ..._V3DepositWithBlockSS, - lpFeePct: BigNumberType, -}); - -const V3RelayExecutionEventInfoSS = object({ - updatedOutputAmount: BigNumberType, - fillType: FillTypeSS, - updatedRecipient: string(), - updatedMessage: string(), -}); - -const V3FillSS = { - ...V3RelayDataSS, - destinationChainId: number(), - relayer: string(), - repaymentChainId: number(), - relayExecutionInfo: V3RelayExecutionEventInfoSS, - quoteTimestamp: number(), -}; - -const V3FillWithBlockSS = { - ...SortableEventSS, - ...V3FillSS, -}; - -const BundleFillV3SS = object({ - ...V3FillWithBlockSS, - lpFeePct: BigNumberType, -}); - -const nestedV3DepositRecordSS = record(PositiveIntegerStringSS, record(Web3AddressSS, array(V3DepositWithBlockSS))); -const nestedV3DepositRecordWithLpFeePctSS = record( - PositiveIntegerStringSS, - record(Web3AddressSS, array(V3DepositWithBlockLpFeeSS)) -); - -const nestedV3BundleFillsSS = record( - // Must be a chainId - PositiveIntegerStringSS, - record( - Web3AddressSS, - object({ - fills: array(BundleFillV3SS), - refunds: record(string(), BigNumberType), - totalRefundAmount: BigNumberType, - realizedLpFees: BigNumberType, - }) - ) -); - -export const BundleDataSS = object({ - bundleBlockRanges: array(array(number())), - bundleDepositsV3: nestedV3DepositRecordSS, - expiredDepositsToRefundV3: nestedV3DepositRecordSS, - unexecutableSlowFills: nestedV3DepositRecordWithLpFeePctSS, - bundleSlowFillsV3: nestedV3DepositRecordWithLpFeePctSS, - bundleFillsV3: nestedV3BundleFillsSS, -}); +import { object, min, string, integer } from "superstruct"; export const EventsAddedMessage = object({ blockNumber: min(integer(), 0), diff --git a/test/Dataworker.blockRangeUtils.ts b/test/Dataworker.blockRangeUtils.ts index 5dc48f564..418828bc9 100644 --- a/test/Dataworker.blockRangeUtils.ts +++ b/test/Dataworker.blockRangeUtils.ts @@ -4,13 +4,12 @@ import { setupDataworker } from "./fixtures/Dataworker.Fixture"; // Tested import { DataworkerClients } from "../src/dataworker/DataworkerClientHelper"; import { HubPoolClient, SpokePoolClient } from "../src/clients"; -import { getWidestPossibleExpectedBlockRange } from "../src/dataworker/PoolRebalanceUtils"; import { originChainId } from "./constants"; -import { blockRangesAreInvalidForSpokeClients, getEndBlockBuffers } from "../src/dataworker/DataworkerUtils"; +import { blockRangesAreInvalidForSpokeClients } from "../src/dataworker/DataworkerUtils"; import { getDeployedBlockNumber } from "@across-protocol/contracts"; import { MockHubPoolClient, MockSpokePoolClient } from "./mocks"; import { getTimestampsForBundleEndBlocks } from "../src/utils/BlockUtils"; -import { assert } from "../src/utils"; +import { assert, Contract, getEndBlockBuffers, getWidestPossibleExpectedBlockRange } from "../src/utils"; import { CONSERVATIVE_BUNDLE_FREQUENCY_SECONDS } from "../src/common"; let dataworkerClients: DataworkerClients; @@ -264,10 +263,10 @@ describe("Dataworker block range-related utility methods", async function () { // Create a fake spoke pool so we can manipulate the fill deadline buffer. Make sure it returns a realistic // current time so that computing bundle end block timestamps gives us realistic numbers. const fakeSpokePool = await smock.fake(originSpokePoolClient.spokePool.interface); - fakeSpokePool.getCurrentTime.returns(originSpokePoolClient.currentTime); + fakeSpokePool.getCurrentTime.returns((originSpokePoolClient as unknown as { currentTime: number }).currentTime); const mockSpokePoolClient = new MockSpokePoolClient( originSpokePoolClient.logger, - fakeSpokePool, + fakeSpokePool as unknown as Contract, originSpokePoolClient.chainId, originSpokePoolClient.eventSearchConfig.fromBlock - 1 // Set deployment block less than eventSearchConfig.fromBlock // to force blockRangesAreInvalidForSpokeClients to compare the client's oldestTime() with its diff --git a/test/InventoryClient.RefundChain.ts b/test/InventoryClient.RefundChain.ts index bb7770fbc..9906214c4 100644 --- a/test/InventoryClient.RefundChain.ts +++ b/test/InventoryClient.RefundChain.ts @@ -18,7 +18,7 @@ import { import { ConfigStoreClient, InventoryClient } from "../src/clients"; // Tested import { CrossChainTransferClient } from "../src/clients/bridges"; -import { V3Deposit, InventoryConfig } from "../src/interfaces"; +import { Deposit, InventoryConfig } from "../src/interfaces"; import { CHAIN_IDs, ZERO_ADDRESS, bnZero, getNetworkName, TOKEN_SYMBOLS_MAP } from "../src/utils"; import { MockAdapterManager, @@ -38,7 +38,7 @@ describe("InventoryClient: Refund chain selection", async function () { let bundleDataClient: MockBundleDataClient; let owner: SignerWithAddress, spy: sinon.SinonSpy, spyLogger: winston.Logger; let inventoryClient: InventoryClient; // tested - let sampleDepositData: V3Deposit; + let sampleDepositData: Deposit; let crossChainTransferClient: CrossChainTransferClient; // construct two mappings of chainId to token address. Set the l1 token address to the "real" token address. @@ -94,7 +94,7 @@ describe("InventoryClient: Refund chain selection", async function () { }); }; - const computeOutputAmount = async (deposit: V3Deposit) => { + const computeOutputAmount = async (deposit: Deposit) => { return deposit.inputAmount; }; diff --git a/test/ProfitClient.ConsiderProfitability.ts b/test/ProfitClient.ConsiderProfitability.ts index 4693d4ebb..e94e1d152 100644 --- a/test/ProfitClient.ConsiderProfitability.ts +++ b/test/ProfitClient.ConsiderProfitability.ts @@ -2,7 +2,7 @@ import { assert } from "chai"; import { random } from "lodash"; import { constants as sdkConstants, utils as sdkUtils } from "@across-protocol/sdk"; import { ConfigStoreClient, FillProfit, SpokePoolClient } from "../src/clients"; -import { V3Deposit } from "../src/interfaces"; +import { Deposit } from "../src/interfaces"; import { bnZero, bnOne, @@ -41,7 +41,7 @@ describe("ProfitClient: Consider relay profit", () => { const lpFeePct = toBNWei(1).div(1e4); const relayerFeePct = toBNWei(1).div(1e4); const gasFeePct = toBNWei(1).div(1e4); - const v3DepositTemplate: V3Deposit = { + const v3DepositTemplate: Deposit = { originChainId, depositId: 1, destinationChainId, @@ -56,6 +56,8 @@ describe("ProfitClient: Consider relay profit", () => { fillDeadline: now, exclusivityDeadline: 0, exclusiveRelayer: ZERO_ADDRESS, + fromLiteChain: false, + toLiteChain: false, }; const chainIds = [originChainId, destinationChainId]; diff --git a/test/Relayer.BasicFill.ts b/test/Relayer.BasicFill.ts index c3021e45e..731796c46 100644 --- a/test/Relayer.BasicFill.ts +++ b/test/Relayer.BasicFill.ts @@ -1,6 +1,6 @@ import { clients, constants, utils as sdkUtils } from "@across-protocol/sdk"; import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient } from "../src/clients"; -import { FillStatus, V3Deposit, V3RelayData } from "../src/interfaces"; +import { FillStatus, Deposit, RelayData } from "../src/interfaces"; import { CONFIG_STORE_VERSION } from "../src/common"; import { averageBlockTime, bnZero, bnOne, bnUint256Max, getNetworkName, getAllUnfilledDeposits } from "../src/utils"; import { Relayer } from "../src/relayer/Relayer"; @@ -191,7 +191,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { describe("Relayer: Check for Unfilled v3 Deposits and Fill", async function () { // Helper for quickly computing fill amounts. - const getFillAmount = (relayData: V3RelayData, tokenPrice: BigNumber): BigNumber => + const getFillAmount = (relayData: RelayData, tokenPrice: BigNumber): BigNumber => relayData.outputAmount.mul(tokenPrice).div(fixedPoint); const findOriginChainLimitIdx = ( @@ -422,7 +422,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { it("Ignores exclusive deposits", async function () { const currentTime = (await spokePool_2.getCurrentTime()).toNumber(); const exclusivityDeadline = currentTime + 7200; - const deposits: V3Deposit[] = []; + const deposits: Deposit[] = []; const { fillStatus, relayerAddress } = relayerInstance; // Make two deposits - one with the relayer as exclusiveRelayer, and one with a random address. diff --git a/test/cross-chain-adapters/Linea.ts b/test/cross-chain-adapters/Linea.ts deleted file mode 100644 index 55de999fe..000000000 --- a/test/cross-chain-adapters/Linea.ts +++ /dev/null @@ -1,236 +0,0 @@ -import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; -import { SpokePoolClient } from "../../src/clients"; -import { LineaAdapter } from "../../src/clients/bridges/LineaAdapter"; -import { ethers, getContractFactory, Contract, randomAddress, expect, createRandomBytes32, toBN } from "../utils"; -import { utils } from "@across-protocol/sdk"; -import { ZERO_ADDRESS } from "@uma/common"; -import { CONTRACT_ADDRESSES } from "../../src/common"; - -describe("Cross Chain Adapter: Linea", async function () { - let adapter: LineaAdapter; - let monitoredEoa: string; - let l1Token, l1USDCToken, l1WETHToken: string; - - let wethBridgeContract: Contract; - let usdcBridgeContract: Contract; - let erc20BridgeContract: Contract; - let searchConfig: utils.EventSearchConfig; - - beforeEach(async function () { - searchConfig = { - fromBlock: 0, - toBlock: 1_000_000, - }; - const [deployer] = await ethers.getSigners(); - - monitoredEoa = randomAddress(); - l1Token = TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.MAINNET]; - l1USDCToken = TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.MAINNET]; - l1WETHToken = TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.MAINNET]; - - const spokePool = await (await getContractFactory("MockSpokePool", deployer)).deploy(ZERO_ADDRESS); - - const l2SpokePoolClient = new SpokePoolClient(null, spokePool, null, CHAIN_IDs.LINEA, 0, { - fromBlock: 0, - }); - const l1SpokePoolClient = new SpokePoolClient(null, spokePool, null, CHAIN_IDs.MAINNET, 0, { - fromBlock: 0, - }); - adapter = new LineaAdapter( - null, - { - [CHAIN_IDs.LINEA]: l2SpokePoolClient, - [CHAIN_IDs.MAINNET]: l1SpokePoolClient, - }, // Don't need spoke pool clients for this test - [] // monitored address doesn't matter for this test since we inject it into the function - ); - - wethBridgeContract = await (await getContractFactory("LineaWethBridge", deployer)).deploy(); - usdcBridgeContract = await (await getContractFactory("LineaUsdcBridge", deployer)).deploy(); - erc20BridgeContract = await (await getContractFactory("LineaERC20Bridge", deployer)).deploy(); - }); - - describe("WETH", function () { - it("Get L1 initiated events", async function () { - // Emit events: - // - some with monitored address as sender - // - some with monitored address as recipient - // Function should return only events with recipient equal - // to monitored address and value greater than 0 - await wethBridgeContract.emitMessageSent(randomAddress(), monitoredEoa, 0); - await wethBridgeContract.emitMessageSent(monitoredEoa, randomAddress(), 0); - await wethBridgeContract.emitMessageSent(randomAddress(), monitoredEoa, 1); - await wethBridgeContract.emitMessageSent(monitoredEoa, randomAddress(), 1); - const result = await adapter.getWethDepositInitiatedEvents(wethBridgeContract, monitoredEoa, searchConfig); - expect(result.length).to.equal(1); - expect(result[0].args._to).to.equal(monitoredEoa); - expect(result[0].args._value).to.equal(1); - }); - it("Get L2 finalized events", async function () { - // Function should return only finalized events that match - // on message hash. - const messageHash = createRandomBytes32(); - const otherMessageHash = createRandomBytes32(); - await wethBridgeContract.emitMessageClaimed(messageHash); - await wethBridgeContract.emitMessageClaimed(otherMessageHash); - const result = await adapter.getWethDepositFinalizedEvents(wethBridgeContract, [messageHash], searchConfig); - expect(result.length).to.equal(1); - expect(result[0].args._messageHash).to.equal(messageHash); - }); - it("Matches L1 and L2 events", async function () { - const messageHash = createRandomBytes32(); - await wethBridgeContract.emitMessageSentWithMessageHash(randomAddress(), monitoredEoa, 1, messageHash); - await wethBridgeContract.emitMessageClaimed(messageHash); - const l1Events = await adapter.getWethDepositInitiatedEvents(wethBridgeContract, monitoredEoa, searchConfig); - const l2Events = await adapter.getWethDepositFinalizedEvents(wethBridgeContract, [messageHash], searchConfig); - - let outstandingTransfers = {}; - - // 1. If l1 and l2 events pair off, outstanding transfers will be empty - adapter.matchWethDepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1WETHToken); - expect(outstandingTransfers).to.deep.equal({}); - - // 2. If finalized event is missing, there will be an outstanding transfer. - outstandingTransfers = {}; - adapter.matchWethDepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1WETHToken); - expect( - outstandingTransfers[monitoredEoa][l1WETHToken][TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.LINEA]] - ).to.deep.equal({ - totalAmount: toBN(1), - depositTxHashes: l1Events.map((e) => e.transactionHash), - }); - }); - }); - describe("USDC", function () { - it("Get L1 initiated events", async function () { - await usdcBridgeContract.emitDeposited(randomAddress(), monitoredEoa); - await usdcBridgeContract.emitDeposited(monitoredEoa, randomAddress()); - const result = await adapter.getUsdcDepositInitiatedEvents(usdcBridgeContract, monitoredEoa, searchConfig); - expect(result.length).to.equal(1); - expect(result[0].args.to).to.equal(monitoredEoa); - }); - it("Get L2 finalized events", async function () { - await usdcBridgeContract.emitReceivedFromOtherLayer(randomAddress()); - await usdcBridgeContract.emitReceivedFromOtherLayer(monitoredEoa); - const result = await adapter.getUsdcDepositFinalizedEvents(usdcBridgeContract, monitoredEoa, searchConfig); - expect(result.length).to.equal(1); - expect(result[0].args.recipient).to.equal(monitoredEoa); - }); - it("Matches L1 and L2 events", async function () { - await usdcBridgeContract.emitDeposited(randomAddress(), monitoredEoa); - await usdcBridgeContract.emitReceivedFromOtherLayer(monitoredEoa); - const l1Events = await adapter.getUsdcDepositInitiatedEvents(usdcBridgeContract, monitoredEoa, searchConfig); - const l2Events = await adapter.getUsdcDepositFinalizedEvents(usdcBridgeContract, monitoredEoa, searchConfig); - - let outstandingTransfers = {}; - - // 1. If l1 and l2 events pair off, outstanding transfers will be empty - adapter.matchUsdcDepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1USDCToken); - expect(outstandingTransfers).to.deep.equal({}); - - // 2. If finalized event is missing, there will be an outstanding transfer. - outstandingTransfers = {}; - adapter.matchUsdcDepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1USDCToken); - expect( - outstandingTransfers[monitoredEoa][l1USDCToken][TOKEN_SYMBOLS_MAP["USDC.e"].addresses[CHAIN_IDs.LINEA]] - ).to.deep.equal({ - totalAmount: toBN(0), - depositTxHashes: l1Events.map((e) => e.transactionHash), - }); - }); - }); - describe("ERC20", function () { - it("Get L1 initiated events", async function () { - await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, l1Token); - await erc20BridgeContract.emitBridgingInitiated(monitoredEoa, randomAddress(), l1Token); - await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, randomAddress()); - const result = await adapter.getErc20DepositInitiatedEvents( - erc20BridgeContract, - monitoredEoa, - l1Token, - searchConfig - ); - expect(result.length).to.equal(1); - expect(result[0].args.recipient).to.equal(monitoredEoa); - expect(result[0].args.token).to.equal(l1Token); - }); - it("Get L2 finalized events", async function () { - // Should return only event - await erc20BridgeContract.emitBridgingFinalized(l1Token, monitoredEoa); - await erc20BridgeContract.emitBridgingFinalized(randomAddress(), monitoredEoa); - await erc20BridgeContract.emitBridgingFinalized(l1Token, randomAddress()); - const result = await adapter.getErc20DepositFinalizedEvents( - erc20BridgeContract, - monitoredEoa, - l1Token, - searchConfig - ); - expect(result.length).to.equal(1); - expect(result[0].args.recipient).to.equal(monitoredEoa); - expect(result[0].args.nativeToken).to.equal(l1Token); - }); - it("Matches L1 and L2 events", async function () { - await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, l1Token); - await erc20BridgeContract.emitBridgingFinalized(l1Token, monitoredEoa); - const l1Events = await adapter.getErc20DepositInitiatedEvents( - erc20BridgeContract, - monitoredEoa, - l1Token, - searchConfig - ); - const l2Events = await adapter.getErc20DepositFinalizedEvents( - erc20BridgeContract, - monitoredEoa, - l1Token, - searchConfig - ); - - let outstandingTransfers = {}; - - // 1. If l1 and l2 events pair off, outstanding transfers will be empty - adapter.matchErc20DepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1Token); - expect(outstandingTransfers).to.deep.equal({}); - - // 2. If finalized event is missing, there will be an outstanding transfer. - outstandingTransfers = {}; - adapter.matchErc20DepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1Token); - expect( - outstandingTransfers[monitoredEoa][l1Token][TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.LINEA]] - ).to.deep.equal({ - totalAmount: toBN(0), - depositTxHashes: l1Events.map((e) => e.transactionHash), - }); - }); - }); - - it("getL1MessageService", async function () { - const l1MessageService = adapter.getL1MessageService(); - expect(l1MessageService).to.not.be.undefined; - expect(l1MessageService.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaMessageService"].address; - }); - it("getL2MessageService", async function () { - const l2MessageService = adapter.getL2MessageService(); - expect(l2MessageService).to.not.be.undefined; - expect(l2MessageService.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["l2MessageService"].address; - }); - it("getL1Bridge: USDC", async function () { - const bridge = adapter.getL1Bridge(TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); - expect(bridge).to.not.be.undefined; - expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaL1UsdcBridge"].address; - }); - it("getL2Bridge: USDC", async function () { - const bridge = adapter.getL2Bridge(TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); - expect(bridge).to.not.be.undefined; - expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["lineaL2UsdcBridge"].address; - }); - it("getL1Bridge: ERC20", async function () { - const bridge = adapter.getL1Bridge(TOKEN_SYMBOLS_MAP.WBTC.addresses[this.hubChainId]); - expect(bridge).to.not.be.undefined; - expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaL1TokenBridge"].address; - }); - it("getL2Bridge: ERC20", async function () { - const bridge = adapter.getL2Bridge(TOKEN_SYMBOLS_MAP.WBTC.addresses[this.hubChainId]); - expect(bridge).to.not.be.undefined; - expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["lineaL2TokenBridge"].address; - }); -}); diff --git a/test/generic-adapters/Linea.ts b/test/generic-adapters/Linea.ts index 12dac5c8f..5666f0b2a 100644 --- a/test/generic-adapters/Linea.ts +++ b/test/generic-adapters/Linea.ts @@ -6,6 +6,7 @@ import { ethers, getContractFactory, Contract, randomAddress, expect, createRand import { utils } from "@across-protocol/sdk"; import { ZERO_ADDRESS } from "@uma/common"; import { CONTRACT_ADDRESSES, SUPPORTED_TOKENS } from "../../src/common"; +import { BlockFinder } from "../../src/utils/SDKUtils"; describe("Cross Chain Adapter: Linea", async function () { let adapter: BaseChainAdapter; @@ -106,7 +107,7 @@ describe("Cross Chain Adapter: Linea", async function () { await wethBridgeContract.emitMessageSent(monitoredEoa, randomAddress(), 1); const wethBridge = adapter.bridges[l1WETHToken]; - const result = await wethBridge.queryL1BridgeInitiationEvents(l1WETHToken, monitoredEoa, undefined, searchConfig); + const result = await wethBridge.queryL1BridgeInitiationEvents(l1WETHToken, undefined, monitoredEoa, searchConfig); expect(Object.keys(result).length).to.equal(1); expect(result[l2WETHToken].length).to.equal(1); expect(result[l2WETHToken][0].to).to.equal(monitoredEoa); @@ -122,19 +123,25 @@ describe("Cross Chain Adapter: Linea", async function () { await wethBridgeContract.emitMessageSentWithMessageHash(randomAddress(), monitoredEoa, 2, unfinalizedMessageHash); await wethBridgeContract.emitMessageSentWithMessageHash(monitoredEoa, randomAddress(), 1, otherMessageHash); - await wethBridgeContract.emitMessageClaimed(expectedMessageHash); + const expectedTxn = await wethBridgeContract.emitMessageClaimed(expectedMessageHash); await wethBridgeContract.emitMessageClaimed(otherMessageHash); + await adapter.updateSpokePoolClients(); + searchConfig = adapter.getUpdatedSearchConfigs().l2SearchConfig; + const wethBridge = adapter.bridges[l1WETHToken]; + wethBridge.blockFinder = new BlockFinder(wethBridgeContract.provider); const result = await wethBridge.queryL2BridgeFinalizationEvents( l1WETHToken, - monitoredEoa, undefined, + monitoredEoa, searchConfig ); - expect(Object.keys(result).length).to.equal(1); expect(result[l2WETHToken][0].amount).to.equal(1); + + // The transaction hash should correspond to the L2 finalization call. + expect(result[l2WETHToken][0].transactionHash).to.equal(expectedTxn.hash); }); it("Matches L1 and L2 events", async function () { const messageHash = createRandomBytes32(); @@ -148,6 +155,7 @@ describe("Cross Chain Adapter: Linea", async function () { ); await wethBridgeContract.emitMessageClaimed(messageHash); await adapter.updateSpokePoolClients(); + adapter.bridges[l1WETHToken].blockFinder = new BlockFinder(wethBridgeContract.provider); const result = await adapter.getOutstandingCrossChainTransfers([l1WETHToken]); // There should be one outstanding transfer, since there are two deposit events and one @@ -164,7 +172,7 @@ describe("Cross Chain Adapter: Linea", async function () { await usdcBridgeContract.emitDeposited(monitoredEoa, randomAddress()); const usdcBridge = adapter.bridges[l1USDCToken]; - const result = await usdcBridge.queryL1BridgeInitiationEvents(l1USDCToken, monitoredEoa, undefined, searchConfig); + const result = await usdcBridge.queryL1BridgeInitiationEvents(l1USDCToken, undefined, monitoredEoa, searchConfig); expect(Object.keys(result).length).to.equal(1); expect(result[l2USDCToken][0].to).to.equal(monitoredEoa); @@ -176,8 +184,8 @@ describe("Cross Chain Adapter: Linea", async function () { const usdcBridge = adapter.bridges[l1USDCToken]; const result = await usdcBridge.queryL2BridgeFinalizationEvents( l1USDCToken, - monitoredEoa, undefined, + monitoredEoa, searchConfig ); @@ -207,7 +215,7 @@ describe("Cross Chain Adapter: Linea", async function () { await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, randomAddress()); const erc20Bridge = adapter.bridges[l1Token]; - const result = await erc20Bridge.queryL1BridgeInitiationEvents(l1Token, monitoredEoa, undefined, searchConfig); + const result = await erc20Bridge.queryL1BridgeInitiationEvents(l1Token, undefined, monitoredEoa, searchConfig); expect(Object.keys(result).length).to.equal(1); expect(result[l2Token][0].to).to.equal(monitoredEoa); @@ -219,7 +227,7 @@ describe("Cross Chain Adapter: Linea", async function () { await erc20BridgeContract.emitBridgingFinalized(l1Token, randomAddress()); const erc20Bridge = adapter.bridges[l1Token]; - const result = await erc20Bridge.queryL2BridgeFinalizationEvents(l1Token, monitoredEoa, undefined, searchConfig); + const result = await erc20Bridge.queryL2BridgeFinalizationEvents(l1Token, undefined, monitoredEoa, searchConfig); expect(Object.keys(result).length).to.equal(1); expect(result[l2Token][0].to).to.equal(monitoredEoa); diff --git a/test/generic-adapters/Polygon.ts b/test/generic-adapters/Polygon.ts new file mode 100644 index 000000000..67c8ad4ec --- /dev/null +++ b/test/generic-adapters/Polygon.ts @@ -0,0 +1,1065 @@ +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { utils } from "@across-protocol/sdk"; +import { SpokePoolClient } from "../../src/clients"; +import { BaseChainAdapter } from "../../src/adapter/BaseChainAdapter"; +import { PolygonWethBridge, PolygonERC20Bridge, UsdcTokenSplitterBridge } from "../../src/adapter/bridges"; +import { + ethers, + expect, + BigNumber, + Contract, + createSpyLogger, + getContractFactory, + randomAddress, + toBN, +} from "../utils"; +import { ZERO_ADDRESS } from "../constants"; +import { hashCCTPSourceAndNonce } from "../../src/utils"; +import { chainIdsToCctpDomains } from "../../src/common"; + +const { MAINNET, POLYGON } = CHAIN_IDs; +const { USDC, WETH, WBTC } = TOKEN_SYMBOLS_MAP; +const l1Weth = WETH.addresses[MAINNET]; +const l1Token = WBTC.addresses[MAINNET]; +const l1Usdc = USDC.addresses[MAINNET]; +const l2Usdc = USDC.addresses[POLYGON]; +let l2Weth, l2UsdcE, l2Token; + +let l1Bridge: Contract, l2Bridge: Contract; +let l1TokenMessenger: Contract, l2MessageTransmitter: Contract; +let hubPool: Contract, spokePool: Contract; + +class TestBaseChainAdapter extends BaseChainAdapter { + public setL1Bridge(address: string, bridge: Contract) { + this.bridges[address].l1Bridge = bridge; + } + + public setL2Bridge(address: string, bridge: Contract) { + this.bridges[address].l2Bridge = bridge; + } + + public setL1CanonicalBridge(address: string, bridge: Contract) { + this.bridges[address].canonicalBridge.l1Bridge = bridge; + } + + public setL2CanonicalBridge(address: string, bridge: Contract) { + this.bridges[address].canonicalBridge.l2Bridge = bridge; + } + + public setL1UsdcBridge(address: string, bridge: Contract) { + this.bridges[address].cctpBridge.l1Bridge = bridge; + } + + public setL2UsdcBridge(address: string, bridge: Contract) { + this.bridges[address].cctpBridge.l2Bridge = bridge; + } +} + +describe("Cross Chain Adapter: Polygon", async function () { + const logger = createSpyLogger().spyLogger; + + let adapter: TestAdapter; + let monitoredEoa: string; + let randomEoa: string; + + let searchConfig: utils.EventSearchConfig; + let depositAmount: BigNumber; + + beforeEach(async function () { + const [depositor] = await ethers.getSigners(); + monitoredEoa = await depositor.getAddress(); + randomEoa = randomAddress(); + + hubPool = await (await getContractFactory("MockHubPool", depositor)).deploy(); + + spokePool = await (await getContractFactory("MockSpokePool", depositor)).deploy(ZERO_ADDRESS); + const deploymentBlock = spokePool.deployTransaction.blockNumber!; + + const hubPoolClient = null; + const l2SpokePoolClient = new SpokePoolClient(logger, spokePool, hubPoolClient, POLYGON, deploymentBlock, { + fromBlock: deploymentBlock, + }); + const l1SpokePoolClient = new SpokePoolClient(logger, spokePool, hubPoolClient, MAINNET, deploymentBlock, { + fromBlock: deploymentBlock, + }); + searchConfig = { fromBlock: deploymentBlock, toBlock: 1_000_000 }; + + const l1Signer = l1SpokePoolClient.spokePool.signer; + const l2Signer = l2SpokePoolClient.spokePool.signer; + + const bridges = { + [WETH.addresses[MAINNET]]: new PolygonWethBridge(POLYGON, MAINNET, l1Signer, l2Signer, l1Weth), + [USDC.addresses[MAINNET]]: new UsdcTokenSplitterBridge(POLYGON, MAINNET, l1Signer, l2Signer, l1Usdc), + [WBTC.addresses[MAINNET]]: new PolygonERC20Bridge(POLYGON, MAINNET, l1Signer, l2Signer, l1Token), + }; + + adapter = new TestBaseChainAdapter( + { + [MAINNET]: l1SpokePoolClient, + [POLYGON]: l2SpokePoolClient, + }, + POLYGON, + MAINNET, + [monitoredEoa, hubPool.address, spokePool.address], + logger, + ["WETH", "USDC", "WBTC"], + bridges, + 1 + ); + + // Point the adapter to the proper bridges. + l1Bridge = await (await getContractFactory("Polygon_L1Bridge", depositor)).deploy(); + l2Bridge = await (await getContractFactory("Polygon_L2Bridge", depositor)).deploy(); + l1TokenMessenger = await (await getContractFactory("CctpTokenMessenger", depositor)).deploy(); + l2MessageTransmitter = await (await getContractFactory("CctpMessageTransmitter", depositor)).deploy(); + // WBTC + adapter.setL1Bridge(l1Token, l1Bridge); + adapter.setL2Bridge(l1Token, l2Bridge); + // WETH + adapter.setL1Bridge(l1Weth, l1Bridge); + adapter.setL2Bridge(l1Weth, l2Bridge); + // USDC + adapter.setL1CanonicalBridge(l1Usdc, l1Bridge); + adapter.setL2CanonicalBridge(l1Usdc, l2Bridge); + adapter.setL1UsdcBridge(l1Usdc, l1TokenMessenger); + adapter.setL2UsdcBridge(l1Usdc, l2MessageTransmitter); + + depositAmount = toBN(Math.round(Math.random() * 1e18)); + l2UsdcE = adapter.bridges[l1Usdc].resolveL2TokenAddress(l1Usdc); + l2Token = adapter.bridges[l1Token].resolveL2TokenAddress(l1Token); + l2Weth = adapter.bridges[l1Weth].resolveL2TokenAddress(l1Weth); + }); + + describe("WETH bridge", function () { + it("Get L1 deposits: EOA", async function () { + await l1Bridge.depositEtherFor(monitoredEoa, monitoredEoa, depositAmount); + await l1Bridge.depositEtherFor(randomEoa, randomEoa, depositAmount); + + const result = await adapter.bridges[l1Weth].queryL1BridgeInitiationEvents( + l1Weth, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(result).to.exist; + expect(Object.keys(result).length).to.equal(1); + + const deposit = result[l2Weth]; + expect(deposit).to.exist; + const { from, to, amount } = deposit[0]; + expect(from).to.equal(monitoredEoa); + expect(to).to.equal(monitoredEoa); + expect(amount).to.equal(amount); + }); + + it("Get L2 receipts: EOA", async function () { + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + await l2Bridge.transfer(ZERO_ADDRESS, randomEoa, depositAmount); + + const result = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(Object.keys(result).length).to.equal(1); + + const receipt = result[l2Weth]; + expect(receipt).to.exist; + const { from, to, amount } = receipt[0]; + expect(from).to.equal(ZERO_ADDRESS); + expect(to).to.equal(monitoredEoa); + expect(amount).to.equal(amount); + }); + + it("Matches L1 and L2 events: EOA", async function () { + // There should be no pre-existing outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + let transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Make a single l1 -> l2 deposit. + await l1Bridge.depositEtherFor(monitoredEoa, monitoredEoa, depositAmount); + const deposits = await adapter.bridges[l1Weth].queryL1BridgeInitiationEvents( + l1Weth, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(deposits).to.exist; + expect(deposits[l2Weth].length).to.equal(1); + + let receipts = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Weth].length).to.equal(0); + + // There should be 1 outstanding transfer. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [deposits[l2Weth][0].transactionHash], + totalAmount: deposits[l2Weth][0].amount, + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Finalise the ongoing deposit on the destination chain. + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); // Simulate WETH transfer to recipient EOA. + receipts = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Weth].length).to.equal(1); + + // There should be no outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + }); + + it("Get L1 deposits: HubPool", async function () { + await l1Bridge.depositEtherFor(hubPool.address, spokePool.address, depositAmount); + + const result = await adapter.bridges[l1Weth].queryL1BridgeInitiationEvents( + l1Weth, + spokePool.address, + spokePool.address, + searchConfig + ); + expect(result).to.exist; + expect(result[l2Weth].length).to.equal(1); + + const deposit = result[l2Weth]; + expect(deposit[0]).to.exist; + const { to, amount, from } = deposit[0]; + expect(from).to.equal(hubPool.address); + expect(to).to.equal(spokePool.address); + expect(amount).to.equal(depositAmount); + }); + + it("Get L2 receipts: HubPool", async function () { + await l2Bridge.transfer(ZERO_ADDRESS, spokePool.address, depositAmount); + + const result = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + spokePool.address, + spokePool.address, + searchConfig + ); + expect(result[l2Weth].length).to.equal(1); + + const receipt = result[l2Weth]; + expect(receipt).to.exist; + const { from, to, amount } = receipt[0]; + expect(from).to.equal(ZERO_ADDRESS); + expect(to).to.equal(spokePool.address); + expect(amount).to.equal(depositAmount); + }); + + it("Matches L1 and L2 events: HubPool", async function () { + // There should be no pre-existing outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + let transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Make a single l1 -> l2 deposit. + await l1Bridge.depositEtherFor(hubPool.address, spokePool.address, depositAmount); + const deposits = await adapter.bridges[l1Weth].queryL1BridgeInitiationEvents( + l1Weth, + spokePool.address, + spokePool.address, + searchConfig + ); + expect(deposits).to.exist; + expect(deposits[l2Weth].length).to.equal(1); + + let receipts = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + spokePool.address, + spokePool.address, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Weth].length).to.equal(0); + + // There should be 1 outstanding transfer. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [deposits[l2Weth][0].transactionHash], + totalAmount: deposits[l2Weth][0].amount, + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Finalise the ongoing deposit on the destination chain. + await l2Bridge.transfer(ZERO_ADDRESS, spokePool.address, depositAmount); + receipts = await adapter.bridges[l1Weth].queryL2BridgeFinalizationEvents( + l1Weth, + spokePool.address, + spokePool.address, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Weth].length).to.equal(1); + + // There should be no outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Weth]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Weth]: { + [l2Weth]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + }); + }); + + describe("ERC20 bridge", function () { + it("Get L1 deposits: EOA", async function () { + await l1Bridge.depositFor(monitoredEoa, monitoredEoa, l1Token, depositAmount); + await l1Bridge.depositFor(monitoredEoa, randomEoa, l1Token, depositAmount); + + const result = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents(l1Token, null, null, searchConfig); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(2); + + // Ensure that the recipient address filters work. + for (const recipient of [monitoredEoa, randomEoa]) { + const result = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents( + l1Token, + monitoredEoa, + recipient, + searchConfig + ); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(1); + + const deposit = result[l2Token]; + expect(deposit[0]).to.exist; + const { from, to, rootToken } = deposit[0]; + expect(from).to.equal(monitoredEoa); + expect(to).to.equal(recipient); + expect(rootToken).to.equal(l1Token); + } + }); + + it("Get L2 receipts: EOA", async function () { + // Should return only event + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + await l2Bridge.transfer(ZERO_ADDRESS, randomEoa, depositAmount); + + const result = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents(l1Token, null, null, searchConfig); + expect(result[l2Token].length).to.equal(2); + + // Ensure that the recipient address filters work. + for (const recipient of [monitoredEoa, randomEoa]) { + const result = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + monitoredEoa, + recipient, + searchConfig + ); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(1); + + const deposit = result[l2Token]; + expect(deposit[0]).to.exist; + const { from, to } = deposit[0]; + expect(from).to.equal(ZERO_ADDRESS); + expect(to).to.equal(recipient); + } + }); + + it("Matches l1 deposits and l2 receipts: EOA", async function () { + // There should be no pre-existing outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + let transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Make a single l1 -> l2 deposit. + await l1Bridge.depositFor(monitoredEoa, monitoredEoa, l1Token, depositAmount); + const deposits = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents(l1Token, null, null, searchConfig); + expect(deposits).to.exist; + expect(deposits[l2Token].length).to.equal(1); + + let receipts = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Token].length).to.equal(0); + + // There should be 1 outstanding transfer. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [deposits[l2Token][0].transactionHash], + totalAmount: deposits[l2Token][0].amount, + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Finalise the ongoing deposit on the destination chain. + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + receipts = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Token].length).to.equal(1); + + // There should be no outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + }); + + it("Get L1 deposits: HubPool", async function () { + await l1Bridge.depositFor(hubPool.address, spokePool.address, l1Token, depositAmount); + await l1Bridge.depositFor(randomEoa, monitoredEoa, l1Token, depositAmount); + + const result = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents(l1Token, null, null, searchConfig); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(2); + + // Ensure that the recipient address filters work. + for (const [sender, recipient] of [ + [hubPool.address, spokePool.address], + [randomEoa, monitoredEoa], + ]) { + const result = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents( + l1Token, + sender, + recipient, + searchConfig + ); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(1); + + const deposit = result[l2Token]; + expect(deposit[0]).to.exist; + const { from, to, rootToken } = deposit[0]; + expect(from).to.equal(sender); + expect(to).to.equal(recipient); + expect(rootToken).to.equal(l1Token); + } + }); + + it("Get L2 receipts: HubPool", async function () { + // Should return only event + await l2Bridge.transfer(ZERO_ADDRESS, spokePool.address, depositAmount); + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + + const result = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents(l1Token, null, null, searchConfig); + expect(result[l2Token].length).to.equal(2); + + // Ensure that the recipient address filters work. + // Note: for Polygon, bridge finalization events are always mints from the ERC20 token. + for (const [sender, recipient] of [ + [ZERO_ADDRESS, spokePool.address], + [ZERO_ADDRESS, monitoredEoa], + ]) { + const result = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + sender, + recipient, + searchConfig + ); + expect(result).to.exist; + expect(result[l2Token].length).to.equal(1); + + const deposit = result[l2Token]; + expect(deposit[0]).to.exist; + const { from, to } = deposit[0]; + expect(from).to.equal(sender); + expect(to).to.equal(recipient); + } + }); + + it("Matches l1 deposits and l2 receipts: HubPool", async function () { + // There should be no pre-existing outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + let transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Make a single l1 -> l2 deposit. + await l1Bridge.depositFor(hubPool.address, spokePool.address, l1Token, depositAmount); + const deposits = await adapter.bridges[l1Token].queryL1BridgeInitiationEvents(l1Token, null, null, searchConfig); + expect(deposits).to.exist; + expect(deposits[l2Token].length).to.equal(1); + + let receipts = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + null, + spokePool.address, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Token].length).to.equal(0); + + // There should be 1 outstanding transfer. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [deposits[l2Token][0].transactionHash], + totalAmount: deposits[l2Token][0].amount, + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Finalise the ongoing deposit on the destination chain. + await l2Bridge.transfer(ZERO_ADDRESS, spokePool.address, depositAmount); + receipts = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Token, + null, + spokePool.address, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2Token].length).to.equal(1); + + // There should be no outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Token]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Token]: { + [l2Token]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + }); + }); + + describe("USDC Token Splitter Bridge", () => { + it("Queries L1 Events: CCTP Bridge", async () => { + const usdcTokenSplitterBridge = adapter.bridges[l1Usdc]; + const processedNonce = 1; + const unprocessedNonce = 2; + await l1TokenMessenger.emitDepositForBurn( + processedNonce, + l1Usdc, + 1, + monitoredEoa, + ethers.utils.hexZeroPad(monitoredEoa, 32), + chainIdsToCctpDomains[POLYGON], + ethers.utils.hexZeroPad(l2MessageTransmitter.address, 32), + ethers.utils.hexZeroPad(monitoredEoa, 32) + ); + await l1TokenMessenger.emitDepositForBurn( + unprocessedNonce, + l1Usdc, + 1, + monitoredEoa, + ethers.utils.hexZeroPad(monitoredEoa, 32), + chainIdsToCctpDomains[POLYGON], + ethers.utils.hexZeroPad(l2MessageTransmitter.address, 32), + ethers.utils.hexZeroPad(monitoredEoa, 32) + ); + await l2MessageTransmitter.setUsedNonce( + hashCCTPSourceAndNonce(chainIdsToCctpDomains[MAINNET], processedNonce), + processedNonce + ); + + const events = await usdcTokenSplitterBridge.queryL1BridgeInitiationEvents( + l1Usdc, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(events[l2Usdc].length).to.equal(1); + expect(events[l2UsdcE].length).to.equal(0); + expect(events[l2Usdc][0].nonce.toString()).to.equal(unprocessedNonce.toString()); + }); + it("Queries L1 Events: Canonical Bridge", async () => { + const usdcTokenSplitterBridge = adapter.bridges[l1Usdc]; + await l1Bridge.depositFor(monitoredEoa, monitoredEoa, l1Usdc, depositAmount); + + const events = await usdcTokenSplitterBridge.queryL1BridgeInitiationEvents( + l1Usdc, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(events[l2Usdc].length).to.equal(0); + expect(events[l2UsdcE].length).to.equal(1); + const { from, to, amount } = events[l2UsdcE][0]; + expect(from).to.equal(monitoredEoa); + expect(to).to.equal(monitoredEoa); + expect(amount).to.equal(depositAmount); + }); + + // L2 Query events for the UsdcCCTPBridge does not exist. + it("Queries L2 Events: Canonical Bridge", async () => { + const usdcTokenSplitterBridge = adapter.bridges[l1Usdc]; + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + + const events = await usdcTokenSplitterBridge.queryL2BridgeFinalizationEvents( + l1Usdc, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(events[l2Usdc]).to.be.undefined; + expect(events[l2UsdcE].length).to.equal(1); + const { from, to, amount } = events[l2UsdcE][0]; + expect(from).to.equal(ZERO_ADDRESS); + expect(to).to.equal(monitoredEoa); + expect(amount).to.equal(depositAmount); + }); + + it("Determines outstanding transfers", async () => { + // There should be no pre-existing outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + let transfers = await adapter.getOutstandingCrossChainTransfers([l1Usdc]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Make a single l1 -> l2 deposit via canonical bridge and via CCTP Bridge. + const unprocessedNonce = 2; + await l1TokenMessenger.emitDepositForBurn( + unprocessedNonce, + l1Usdc, + 1, + monitoredEoa, + ethers.utils.hexZeroPad(monitoredEoa, 32), + chainIdsToCctpDomains[POLYGON], + ethers.utils.hexZeroPad(l2MessageTransmitter.address, 32), + ethers.utils.hexZeroPad(monitoredEoa, 32) + ); + await l1Bridge.depositFor(monitoredEoa, monitoredEoa, l1Usdc, depositAmount); + const deposits = await adapter.bridges[l1Usdc].queryL1BridgeInitiationEvents(l1Usdc, null, null, searchConfig); + expect(deposits).to.exist; + expect(deposits[l2Usdc].length).to.equal(1); + expect(deposits[l2UsdcE].length).to.equal(1); + + let receipts = await adapter.bridges[l1Usdc].queryL2BridgeFinalizationEvents( + l1Usdc, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2UsdcE].length).to.equal(0); + expect(receipts[l2Usdc]).to.be.undefined; + + // There should be 1 outstanding transfer. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Usdc]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [deposits[l2Usdc][0].transactionHash], + totalAmount: deposits[l2Usdc][0].amount, + }, + [l2UsdcE]: { + depositTxHashes: [deposits[l2UsdcE][0].transactionHash], + totalAmount: deposits[l2UsdcE][0].amount, + }, + }, + }, + [spokePool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + + // Finalise the ongoing deposit on the destination chain. + await l2Bridge.transfer(ZERO_ADDRESS, monitoredEoa, depositAmount); + await l2MessageTransmitter.setUsedNonce( + hashCCTPSourceAndNonce(chainIdsToCctpDomains[MAINNET], unprocessedNonce), + unprocessedNonce + ); + receipts = await adapter.bridges[l1Token].queryL2BridgeFinalizationEvents( + l1Usdc, + monitoredEoa, + monitoredEoa, + searchConfig + ); + expect(receipts).to.exist; + expect(receipts[l2UsdcE].length).to.equal(1); + expect(receipts[l2Usdc]).to.be.undefined; + + // There should be no outstanding transfers. + await Promise.all(Object.values(adapter.spokePoolClients).map((spokePoolClient) => spokePoolClient.update())); + transfers = await adapter.getOutstandingCrossChainTransfers([l1Usdc]); + expect(transfers).to.deep.equal({ + [monitoredEoa]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [spokePool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + [hubPool.address]: { + [l1Usdc]: { + [l2Usdc]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + [l2UsdcE]: { + depositTxHashes: [], + totalAmount: BigNumber.from(0), + }, + }, + }, + }); + }); + }); +}); diff --git a/test/utils/SpokePoolUtils.ts b/test/utils/SpokePoolUtils.ts index 445c55dda..76b13eac6 100644 --- a/test/utils/SpokePoolUtils.ts +++ b/test/utils/SpokePoolUtils.ts @@ -5,13 +5,13 @@ import { SlowFillRequestWithBlock } from "../../src/interfaces"; import { SignerWithAddress } from "./utils"; export function V3FillFromDeposit( - deposit: interfaces.V3DepositWithBlock, + deposit: interfaces.DepositWithBlock, relayer: string, repaymentChainId?: number, fillType = interfaces.FillType.FastFill -): interfaces.V3Fill { +): interfaces.Fill { const { blockNumber, transactionHash, logIndex, transactionIndex, quoteTimestamp, ...relayData } = deposit; - const fill: interfaces.V3Fill = { + const fill: interfaces.Fill = { ...relayData, relayer, realizedLpFeePct: deposit.realizedLpFeePct ?? bnZero, @@ -29,9 +29,9 @@ export function V3FillFromDeposit( export async function fillV3( spokePool: Contract, relayer: SignerWithAddress, - deposit: interfaces.V3Deposit, + deposit: interfaces.Deposit, _repaymentChainId = repaymentChainId -): Promise { +): Promise { await spokePool .connect(relayer) .fillV3Relay( @@ -56,7 +56,7 @@ export async function fillV3( spokePool.chainId(), ]); const lastEvent = events[events.length - 1]; - const fillObject: interfaces.V3FillWithBlock = { + const fillObject: interfaces.FillWithBlock = { inputToken: lastEvent.args?.inputToken, outputToken: lastEvent.args?.outputToken, inputAmount: lastEvent.args?.inputAmount, @@ -89,7 +89,7 @@ export async function fillV3( export async function requestSlowFill( spokePool: Contract, relayer: SignerWithAddress, - deposit?: interfaces.V3Deposit + deposit?: interfaces.Deposit ): Promise { await spokePool .connect(relayer) diff --git a/test/utils/utils.ts b/test/utils/utils.ts index 6c46e8ad6..b76f84f88 100644 --- a/test/utils/utils.ts +++ b/test/utils/utils.ts @@ -9,7 +9,7 @@ import chaiExclude from "chai-exclude"; import sinon from "sinon"; import winston from "winston"; import { GLOBAL_CONFIG_STORE_KEYS } from "../../src/clients"; -import { V3Deposit, V3DepositWithBlock, V3FillWithBlock, V3SlowFillLeaf } from "../../src/interfaces"; +import { Deposit, DepositWithBlock, FillWithBlock, SlowFillLeaf } from "../../src/interfaces"; import { isDefined, spreadEvent, toBN, toBNWei, toWei, utf8ToHex, ZERO_ADDRESS } from "../../src/utils"; import { DEFAULT_BLOCK_RANGE_FOR_CHAIN, @@ -276,7 +276,7 @@ export async function depositV3( exclusivityDeadline?: number; exclusiveRelayer?: string; } = {} -): Promise { +): Promise { const depositor = signer.address; const recipient = opts.recipient ?? depositor; @@ -349,7 +349,7 @@ export async function depositV3( export async function updateDeposit( spokePool: Contract, - deposit: V3Deposit, + deposit: Deposit, depositor: SignerWithAddress ): Promise { const { updatedRecipient, updatedOutputAmount, updatedMessage } = deposit; @@ -380,10 +380,10 @@ export async function updateDeposit( export async function fillV3Relay( spokePool: Contract, - deposit: Omit, + deposit: Omit, signer: SignerWithAddress, repaymentChainId?: number -): Promise { +): Promise { const destinationChainId = Number(await spokePool.chainId()); assert.notEqual(deposit.originChainId, destinationChainId); @@ -439,13 +439,13 @@ export async function addLiquidity( await hubPool.connect(signer).addLiquidity(l1Token.address, amount); } -export function buildV3SlowRelayLeaves(deposits: interfaces.V3Deposit[], lpFeePct: BigNumber): V3SlowFillLeaf[] { +export function buildV3SlowRelayLeaves(deposits: interfaces.Deposit[], lpFeePct: BigNumber): SlowFillLeaf[] { const chainId = deposits[0].destinationChainId; assert.isTrue(deposits.every(({ destinationChainId }) => chainId === destinationChainId)); return deposits .map((deposit) => { const lpFee = deposit.inputAmount.mul(lpFeePct).div(toBNWei(1)); - const slowFillLeaf: V3SlowFillLeaf = { + const slowFillLeaf: SlowFillLeaf = { relayData: { depositor: deposit.depositor, recipient: deposit.recipient, diff --git a/yarn.lock b/yarn.lock index 57d46af82..e9dfc0365 100644 --- a/yarn.lock +++ b/yarn.lock @@ -50,15 +50,16 @@ axios "^1.6.2" zksync-web3 "^0.14.3" -"@across-protocol/sdk@^3.1.31": - version "3.1.31" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.1.31.tgz#77e047301e446c63c85f83a603236dc161ffd482" - integrity sha512-2gg43gbmMuTsKSdgqv47GuzNmAMZz2oi4K/3o207Kj8A8snHRPwlg+Mgpg+I5fVXXBVfBSu3bN600aDaXLf/aw== +"@across-protocol/sdk@^3.1.36": + version "3.1.36" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.1.36.tgz#8187f4771ca68f14c165033f0c6cc6e35e211919" + integrity sha512-Im9ELYj+m0WMow1zUWYerU5v+A+Tpty2jC/7pUSI2pMnHn/mdAWmMoL/EKMqpaxMFucGw6d4fUb1EvLeazBqlg== dependencies: "@across-protocol/across-token" "^1.0.0" "@across-protocol/constants" "^3.1.14" "@across-protocol/contracts" "^3.0.10" "@eth-optimism/sdk" "^3.3.1" + "@ethersproject/bignumber" "^5.7.0" "@pinata/sdk" "^2.1.0" "@types/mocha" "^10.0.1" "@uma/sdk" "^0.34.1" @@ -5180,21 +5181,12 @@ axios@^0.27.2: follow-redirects "^1.14.9" form-data "^4.0.0" -axios@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.1.tgz#76550d644bf0a2d469a01f9244db6753208397d7" - integrity sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g== +axios@^1.6.2, axios@^1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.4.tgz#4c8ded1b43683c8dd362973c393f3ede24052aa2" + integrity sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw== dependencies: - follow-redirects "^1.15.0" - form-data "^4.0.0" - proxy-from-env "^1.1.0" - -axios@^1.6.2: - version "1.6.7" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.7.tgz#7b48c2e27c96f9c68a2f8f31e2ab19f59b06b0a7" - integrity sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA== - dependencies: - follow-redirects "^1.15.4" + follow-redirects "^1.15.6" form-data "^4.0.0" proxy-from-env "^1.1.0" @@ -5400,10 +5392,10 @@ bn.js@^5.0.0, bn.js@^5.1.2, bn.js@^5.1.3, bn.js@^5.2.0, bn.js@^5.2.1: resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3, body-parser@^1.16.0: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -5413,29 +5405,11 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" -body-parser@^1.16.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== - dependencies: - bytes "3.1.2" - content-type "~1.0.4" - debug "2.6.9" - depd "2.0.0" - destroy "1.2.0" - http-errors "2.0.0" - iconv-lite "0.4.24" - on-finished "2.4.1" - qs "6.10.3" - raw-body "2.5.1" - type-is "~1.6.18" - unpipe "1.0.0" - boolbase@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" @@ -5474,7 +5448,7 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@^3.0.2, braces@~3.0.2: +braces@^3.0.3, braces@~3.0.2: version "3.0.3" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== @@ -5763,6 +5737,17 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -6784,6 +6769,15 @@ deferred-leveldown@~5.3.0: abstract-leveldown "~6.2.1" inherits "^2.0.3" +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-properties@^1.1.2, define-properties@^1.1.3, define-properties@^1.1.4, define-properties@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" @@ -7139,6 +7133,11 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + encoding-down@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/encoding-down/-/encoding-down-6.3.0.tgz#b1c4eb0e1728c146ecaef8e32963c549e76d082b" @@ -7254,6 +7253,18 @@ es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.20.4: unbox-primitive "^1.0.2" which-typed-array "^1.1.9" +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-set-tostringtag@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" @@ -8055,37 +8066,74 @@ expand-template@^2.0.3: resolved "https://registry.yarnpkg.com/expand-template/-/expand-template-2.0.3.tgz#6e14b3fcee0f3a6340ecb57d2e8918692052a47c" integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== -express@^4.14.0, express@^4.16.3, express@^4.19.2: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== +express@^4.14.0, express@^4.16.3: + version "4.20.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.20.0.tgz#f1d08e591fcec770c07be4767af8eb9bcfd67c48" + integrity sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" finalhandler "1.2.0" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.0" + setprototypeof "1.2.0" + statuses "2.0.1" + type-is "~1.6.18" + utils-merge "1.0.1" + vary "~1.1.2" + +express@^4.19.2: + version "4.21.0" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" + integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== + dependencies: + accepts "~1.3.8" + array-flatten "1.1.1" + body-parser "1.20.3" + content-disposition "0.5.4" + content-type "~1.0.4" + cookie "0.6.0" + cookie-signature "1.0.6" + debug "2.6.9" + depd "2.0.0" + encodeurl "~2.0.0" + escape-html "~1.0.3" + etag "~1.8.1" + finalhandler "1.3.1" + fresh "0.5.2" + http-errors "2.0.0" + merge-descriptors "1.0.3" + methods "~1.1.2" + on-finished "2.4.1" + parseurl "~1.3.3" + path-to-regexp "0.1.10" + proxy-addr "~2.0.7" + qs "6.13.0" + range-parser "~1.2.1" + safe-buffer "5.2.1" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -8247,6 +8295,19 @@ finalhandler@1.2.0: statuses "2.0.1" unpipe "~1.0.0" +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== + dependencies: + debug "2.6.9" + encodeurl "~2.0.0" + escape-html "~1.0.3" + on-finished "2.4.1" + parseurl "~1.3.3" + statuses "2.0.1" + unpipe "~1.0.0" + find-replace@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-replace/-/find-replace-3.0.0.tgz#3e7e23d3b05167a76f770c9fbd5258b0def68c38" @@ -8329,10 +8390,10 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.9, follow-redirects@^1.15.0, follow-redirects@^1.15.4: - version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" - integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== +follow-redirects@^1.12.1, follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.9, follow-redirects@^1.15.6: + version "1.15.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" + integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== for-each@^0.3.3: version "0.3.3" @@ -8516,6 +8577,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" @@ -8676,6 +8742,17 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@ has "^1.0.3" has-symbols "^1.0.3" +get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-iterator@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/get-iterator/-/get-iterator-1.0.2.tgz#cd747c02b4c084461fac14f48f6b45a80ed25c82" @@ -9294,6 +9371,13 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + has-proto@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" @@ -9360,6 +9444,13 @@ hash.js@1.1.7, hash.js@^1.0.0, hash.js@^1.0.3, hash.js@^1.1.7: inherits "^2.0.3" minimalistic-assert "^1.0.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + he@1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" @@ -11306,10 +11397,10 @@ memorystream@^0.3.1: resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" integrity sha1-htcJCzDORV1j+64S3aUaR93K+bI= -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-options@^3.0.4: version "3.0.4" @@ -11371,11 +11462,11 @@ methods@^1.1.1, methods@~1.1.2: integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== dependencies: - braces "^3.0.2" + braces "^3.0.3" picomatch "^2.3.1" miller-rabin@^4.0.0: @@ -12453,6 +12544,11 @@ object-inspect@^1.12.3, object-inspect@^1.9.0: resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== +object-inspect@^1.13.1: + version "1.13.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.2.tgz#dea0088467fb991e67af4058147a24824a3043ff" + integrity sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g== + object-keys@^1.0.11, object-keys@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" @@ -12866,10 +12962,10 @@ path-scurry@^1.10.1: lru-cache "^9.1.1 || ^10.0.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-to-regexp@^1.7.0: version "1.8.0" @@ -13421,13 +13517,6 @@ qrcode-terminal@^0.12.0: resolved "https://registry.yarnpkg.com/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz#bb5b699ef7f9f0505092a3748be4464fe71b5819" integrity sha512-EXtzRZmC+YGmGlDFbXKxQiMZNwCLEO6BANKXG4iCtSIM0yqc/pappSx3RIKr4r0uh5JsBckOXeKrB3Iz7mdQpQ== -qs@6.10.3, qs@^6.4.0, qs@^6.7.0, qs@^6.9.4: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" - qs@6.11.0: version "6.11.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" @@ -13435,6 +13524,20 @@ qs@6.11.0: dependencies: side-channel "^1.0.4" +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== + dependencies: + side-channel "^1.0.6" + +qs@^6.4.0, qs@^6.7.0, qs@^6.9.4: + version "6.10.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" + integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== + dependencies: + side-channel "^1.0.4" + qs@~6.5.2: version "6.5.3" resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" @@ -13498,20 +13601,20 @@ range-parser@~1.2.1: resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.5.1, raw-body@^2.4.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== dependencies: bytes "3.1.2" http-errors "2.0.0" iconv-lite "0.4.24" unpipe "1.0.0" -raw-body@2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== +raw-body@^2.4.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== dependencies: bytes "3.1.2" http-errors "2.0.0" @@ -14214,6 +14317,25 @@ send@0.18.0: range-parser "~1.2.1" statuses "2.0.1" +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== + dependencies: + debug "2.6.9" + depd "2.0.0" + destroy "1.2.0" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "2.0.0" + mime "1.6.0" + ms "2.1.3" + on-finished "2.4.1" + range-parser "~1.2.1" + statuses "2.0.1" + sentence-case@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-2.1.1.tgz#1f6e2dda39c168bf92d13f86d4a918933f667ed4" @@ -14236,16 +14358,26 @@ serialize-javascript@6.0.0: dependencies: randombytes "^2.1.0" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.0: + version "1.16.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.0.tgz#2bf4ed49f8af311b519c46f272bf6ac3baf38a92" + integrity sha512-pDLK8zwl2eKaYrs8mrPZBJua4hMplRWJ1tIFksVC3FtBEBnl8dxgeHtsaMS8DhS9i4fLObaon6ABoc4/hQGdPA== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" send "0.18.0" +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== + dependencies: + encodeurl "~2.0.0" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.19.0" + servify@^0.1.12: version "0.1.12" resolved "https://registry.yarnpkg.com/servify/-/servify-0.1.12.tgz#142ab7bee1f1d033b66d0707086085b17c06db95" @@ -14262,6 +14394,18 @@ set-blocking@^2.0.0, set-blocking@~2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" + set-immediate-shim@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" @@ -14340,6 +14484,16 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== + dependencies: + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" + signal-exit@^3.0.0, signal-exit@^3.0.2: version "3.0.7" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9"