diff --git a/.eslintrc.js b/.eslintrc.js index dd1052aae..b5052e51d 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -36,8 +36,8 @@ module.exports = { "@typescript-eslint/no-unused-vars": ["error", { ignoreRestSiblings: true }], "chai-expect/missing-assertion": 2, "no-duplicate-imports": "error", - // "require-await": "error", "@typescript-eslint/no-floating-promises": ["error"], + "@typescript-eslint/no-misused-promises": ["error", { checksVoidReturn: false }], }, settings: { node: { diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 085337781..1cc0c4dcd 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -11,4 +11,4 @@ jobs: - name: "Checkout Repository" uses: actions/checkout@v3 - name: "Dependency Review" - uses: actions/dependency-review-action@v3.1.2 + uses: actions/dependency-review-action@v4.3.2 diff --git a/Dockerfile b/Dockerfile index 568871721..7b3a7113c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM node:16 -WORKDIR /relayer-v2 +WORKDIR /relayer COPY . ./ diff --git a/README.md b/README.md index 688cf4d7e..f6c5a2801 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,23 @@


- Across + Across
-

Across is a secure and instant asset-transfer bridge between EVM networks.

+

Across is a secure and instant asset-transfer bridge between EVM networks.


-# Across V2 Relayer +# Across V3 Relayer -This code implements [UMIP-157](https://github.com/UMAprotocol/UMIPs/blob/master/UMIPs/umip-157.md) and interacts with these [smart contracts](https://github.com/across-protocol/contracts-v2). The contracts were audited [by OpenZeppelin](https://blog.openzeppelin.com/uma-across-v2-audit/). +This code implements [UMIP-157](https://github.com/UMAprotocol/UMIPs/blob/master/UMIPs/umip-157.md) and interacts with these [smart contracts](https://github.com/across-protocol/contracts). The contracts were audited [by OpenZeppelin](https://blog.openzeppelin.com/across-v3-incremental-audit). # How to run a Relayer -Check out [this guide](https://docs.across.to/v2/developers/running-a-relayer) for detailed bot instructions! +Check out [this guide](https://docs.across.to/relayers/running-a-relayer) for detailed bot instructions! ## Prerequisites -After installing dependencies and building the repository, be sure to to [install RedisDB](https://redis.io/docs/getting-started/installation/), an in-memory storage layer that is required to make the bots work. The bots query blockchain RPCs for a lot of smart contract events so its important that the bot +After installing dependencies and building the repository, be sure to [install RedisDB](https://redis.io/docs/getting-started/installation/), an in-memory storage layer that is required to make the bots work. The bots query blockchain RPCs for a lot of smart contract events so its important that the bot cache some of this data in order to maintain its speed. The first time that the bot runs, it might be slower than usual as the Redis DB fills up. This slowdown should disappear on subsequent runs. @@ -38,7 +38,7 @@ REDIS_URL=redis://localhost:6379 ```sh # install dependencies -cd relayer-v2 +cd relayer yarn install # build relayer bot @@ -69,13 +69,13 @@ Read through [CONTRIBUTING.md](https://github.com/UMAprotocol/protocol/blob/mast ## Bug Bounty -Here's the official Across [bug bounty program](https://docs.across.to/v2/miscellaneous/bug-bounty). The bug bounty only applies to the `master` branch and is agnostic of release versions. +Here's the official Across [bug bounty program](https://docs.across.to/resources/bug-bounty). The bug bounty only applies to the `master` branch and is agnostic of release versions. ## Integration tests You can conveniently run the dataworker, relayer, and finalizer functions via the hardhat task `integration-tests` which sets safe configurations like `PROPOSER_ENABLED=false` and prevents the user from accidentally sending an on-chain transaction. The test will complete successfully if no functions throw an error, which can be used as an additional source of confidence (in addition to the unit tests) that code has not broken the production bots. -If you want to read more about the three different agents in the Across system, check out the [docs](https://docs.across.to/v2/how-does-across-work/overview/user-roles). +If you want to read more about the three different agents in the Across system, check out the [docs](https://docs.across.to/reference/actors-in-the-system). ```sh LOG_IN_TEST=true yarn hardhat integration-tests --wallet mnemonic @@ -85,9 +85,9 @@ LOG_IN_TEST=true yarn hardhat integration-tests --wallet mnemonic ### Active Branches -| Branch | Status | -| ------------------------------------------------------------------- | ---------------- | -| [master](https://github.com/across-protocol/relayer-v2/tree/master) | Accepts all PRs. | +| Branch | Status | +| ---------------------------------------------------------------- | ---------------- | +| [master](https://github.com/across-protocol/relayer/tree/master) | Accepts all PRs. | ### Overview diff --git a/contracts/AtomicWethDepositor.sol b/contracts/AtomicWethDepositor.sol index 2335e527d..80e948cc9 100644 --- a/contracts/AtomicWethDepositor.sol +++ b/contracts/AtomicWethDepositor.sol @@ -46,6 +46,7 @@ interface LineaL1MessageService { contract AtomicWethDepositor { Weth public immutable weth = Weth(0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2); OvmL1Bridge public immutable optimismL1Bridge = OvmL1Bridge(0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1); + OvmL1Bridge public immutable modeL1Bridge = OvmL1Bridge(0x735aDBbE72226BD52e818E7181953f42E3b0FF21); OvmL1Bridge public immutable bobaL1Bridge = OvmL1Bridge(0xdc1664458d2f0B6090bEa60A8793A4E66c2F1c00); OvmL1Bridge public immutable baseL1Bridge = OvmL1Bridge(0x3154Cf16ccdb4C6d922629664174b904d80F2C35); PolygonL1Bridge public immutable polygonL1Bridge = PolygonL1Bridge(0xA0c68C638235ee32657e8f720a23ceC1bFc77C77); @@ -55,6 +56,7 @@ contract AtomicWethDepositor { event ZkSyncEthDepositInitiated(address indexed from, address indexed to, uint256 amount); event LineaEthDepositInitiated(address indexed from, address indexed to, uint256 amount); + event OvmEthDepositInitiated(uint256 indexed chainId, address indexed from, address indexed to, uint256 amount); function bridgeWethToOvm(address to, uint256 amount, uint32 l2Gas, uint256 chainId) public { weth.transferFrom(msg.sender, address(this), amount); @@ -64,11 +66,15 @@ contract AtomicWethDepositor { optimismL1Bridge.depositETHTo{ value: amount }(to, l2Gas, ""); } else if (chainId == 8453) { baseL1Bridge.depositETHTo{ value: amount }(to, l2Gas, ""); + } else if (chainId == 34443) { + modeL1Bridge.depositETHTo{ value: amount }(to, l2Gas, ""); } else if (chainId == 288) { bobaL1Bridge.depositETHTo{ value: amount }(to, l2Gas, ""); } else { revert("Invalid OVM chainId"); } + + emit OvmEthDepositInitiated(chainId, msg.sender, to, amount); } function bridgeWethToPolygon(address to, uint256 amount) public { diff --git a/contracts/MockLineaEvents.sol b/contracts/MockLineaEvents.sol new file mode 100644 index 000000000..ac8ccdacb --- /dev/null +++ b/contracts/MockLineaEvents.sol @@ -0,0 +1,60 @@ +/// This file contains contracts that can be used to unit test the src/clients/bridges/LineaAdapter.ts +/// code which reads events from Linea contracts facilitating cross chain transfers. + +pragma solidity ^0.8.0; + +contract LineaWethBridge { + event MessageClaimed(bytes32 indexed _messageHash); + event MessageSent( + address indexed _from, + address indexed _to, + uint256 _fee, + uint256 _value, + uint256 _nonce, + bytes _calldata, + bytes32 indexed _messageHash + ); + + function emitMessageSent(address from, address to, uint256 value) external { + emit MessageSent(from, to, 0, value, 0, new bytes(0), bytes32(0)); + } + + function emitMessageSentWithMessageHash(address from, address to, uint256 value, bytes32 messageHash) external { + emit MessageSent(from, to, 0, value, 0, new bytes(0), messageHash); + } + + function emitMessageClaimed(bytes32 messageHash) external { + emit MessageClaimed(messageHash); + } +} + +contract LineaUsdcBridge { + event Deposited(address indexed depositor, uint256 amount, address indexed to); + event ReceivedFromOtherLayer(address indexed recipient, uint256 amount); + + function emitDeposited(address depositor, address to) external { + emit Deposited(depositor, 0, to); + } + + function emitReceivedFromOtherLayer(address recipient) external { + emit ReceivedFromOtherLayer(recipient, 0); + } +} + +contract LineaERC20Bridge { + event BridgingInitiatedV2(address indexed sender, address indexed recipient, address indexed token, uint256 amount); + event BridgingFinalizedV2( + address indexed nativeToken, + address indexed bridgedToken, + uint256 amount, + address indexed recipient + ); + + function emitBridgingInitiated(address sender, address recipient, address token) external { + emit BridgingInitiatedV2(sender, recipient, token, 0); + } + + function emitBridgingFinalized(address l1Token, address recipient) external { + emit BridgingFinalizedV2(l1Token, address(0), 0, recipient); + } +} diff --git a/contracts/MockOpStackEvents.sol b/contracts/MockOpStackEvents.sol new file mode 100644 index 000000000..d251361c8 --- /dev/null +++ b/contracts/MockOpStackEvents.sol @@ -0,0 +1,24 @@ +/// This file contains contracts that can be used to unit test the src/clients/bridges/op-stack +/// code which reads events from OpStack contracts facilitating cross chain transfers. + +pragma solidity ^0.8.0; + +contract OpStackWethBridge { + event ETHDepositInitiated(address indexed _from, address indexed _to, uint256 _amount, bytes _data); + event DepositFinalized( + address indexed _l1Token, + address indexed _l2Token, + address indexed _from, + address _to, + uint256 _amount, + bytes _data + ); + + function emitDepositInitiated(address from, address to, uint256 amount) external { + emit ETHDepositInitiated(from, to, amount, new bytes(0)); + } + + function emitDepositFinalized(address from, address to, uint256 amount) external { + emit DepositFinalized(address(0), address(0), from, to, amount, new bytes(0)); + } +} diff --git a/contracts/MockSpokePool.sol b/contracts/MockSpokePool.sol index 9d764f27d..288bd4933 100644 --- a/contracts/MockSpokePool.sol +++ b/contracts/MockSpokePool.sol @@ -1,7 +1,7 @@ //SPDX-License-Identifier: Unlicense pragma solidity ^0.8.0; -import "@across-protocol/contracts-v2/contracts/test/MockSpokePool.sol"; +import "@across-protocol/contracts/contracts/test/MockSpokePool.sol"; /** * @title MockSpokePool diff --git a/deploy/001_deploy_atomic_depositor.ts b/deploy/001_deploy_atomic_depositor.ts index 8a90908ac..2d6231279 100644 --- a/deploy/001_deploy_atomic_depositor.ts +++ b/deploy/001_deploy_atomic_depositor.ts @@ -11,7 +11,7 @@ const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { await deploy("AtomicWethDepositor", { from: deployer, log: true, - skipIfAlreadyDeployed: true, + skipIfAlreadyDeployed: false, }); }; module.exports = func; diff --git a/deployments/mainnet/AtomicWethDepositor.json b/deployments/mainnet/AtomicWethDepositor.json index c83885eeb..73f149f3b 100644 --- a/deployments/mainnet/AtomicWethDepositor.json +++ b/deployments/mainnet/AtomicWethDepositor.json @@ -1,5 +1,5 @@ { - "address": "0x6e41f79772c3CF7D6d15d17d899e129d5aAAA740", + "address": "0x24d8b91aB9c461d7c0D6fB9F5a294CEA61D11710", "abi": [ { "anonymous": false, @@ -26,6 +26,37 @@ "name": "LineaEthDepositInitiated", "type": "event" }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "uint256", + "name": "chainId", + "type": "uint256" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + } + ], + "name": "OvmEthDepositInitiated", + "type": "event" + }, { "anonymous": false, "inputs": [ @@ -191,6 +222,19 @@ "stateMutability": "view", "type": "function" }, + { + "inputs": [], + "name": "modeL1Bridge", + "outputs": [ + { + "internalType": "contract OvmL1Bridge", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, { "inputs": [], "name": "optimismL1Bridge", @@ -248,41 +292,26 @@ "type": "receive" } ], - "transactionHash": "0xc63bae32e6775468f9484b83a7082197a03b6cad4441daac3a412c52f7808c8f", + "transactionHash": "0xab0f6561d4e8342062dd577a9b27028f3df2adc9f17d4dbb610bd990fb714c5d", "receipt": { "to": null, "from": "0x9A8f92a830A5cB89a3816e3D267CB7791c16b04D", - "contractAddress": "0x6e41f79772c3CF7D6d15d17d899e129d5aAAA740", - "transactionIndex": 10, - "gasUsed": "985505", + "contractAddress": "0x24d8b91aB9c461d7c0D6fB9F5a294CEA61D11710", + "transactionIndex": 57, + "gasUsed": "1069656", "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "blockHash": "0x9fa0bf35f52aa30aa37e7c4da3d1e1d565cc11866066a00897ab23b1f7630826", - "transactionHash": "0xc63bae32e6775468f9484b83a7082197a03b6cad4441daac3a412c52f7808c8f", + "blockHash": "0x2e9721e7f55a1b53f00009c706b20995bfd049d73aeb6dea3fdd532776e13d1b", + "transactionHash": "0xab0f6561d4e8342062dd577a9b27028f3df2adc9f17d4dbb610bd990fb714c5d", "logs": [], - "blockNumber": 19476934, - "cumulativeGasUsed": "1931450", + "blockNumber": 19926809, + "cumulativeGasUsed": "9024031", "status": 1, "byzantium": true }, "args": [], - "numDeployments": 3, - "solcInputHash": "30dfeae412ba7ba24be6d29ee0e15ed9", - "metadata": "{\"compiler\":{\"version\":\"0.8.23+commit.f704f362\"},\"language\":\"Solidity\",\"output\":{\"abi\":[{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"LineaEthDepositInitiated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"ZkSyncEthDepositInitiated\",\"type\":\"event\"},{\"stateMutability\":\"payable\",\"type\":\"fallback\"},{\"inputs\":[],\"name\":\"baseL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"bobaL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToLinea\",\"outputs\":[],\"stateMutability\":\"payable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"},{\"internalType\":\"uint32\",\"name\":\"l2Gas\",\"type\":\"uint32\"},{\"internalType\":\"uint256\",\"name\":\"chainId\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToOvm\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToPolygon\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"l2GasLimit\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"l2GasPerPubdataByteLimit\",\"type\":\"uint256\"},{\"internalType\":\"address\",\"name\":\"refundRecipient\",\"type\":\"address\"}],\"name\":\"bridgeWethToZkSync\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"lineaL1MessageService\",\"outputs\":[{\"internalType\":\"contract LineaL1MessageService\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"optimismL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"polygonL1Bridge\",\"outputs\":[{\"internalType\":\"contract PolygonL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"weth\",\"outputs\":[{\"internalType\":\"contract Weth\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"zkSyncL1Bridge\",\"outputs\":[{\"internalType\":\"contract ZkSyncL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"stateMutability\":\"payable\",\"type\":\"receive\"}],\"devdoc\":{\"kind\":\"dev\",\"methods\":{},\"version\":1},\"userdoc\":{\"kind\":\"user\",\"methods\":{},\"notice\":\"Contract deployed on Ethereum helps relay bots atomically unwrap and bridge WETH over the canonical chain bridges for Optimism, Base, Boba, ZkSync, Linea, and Polygon. Needed as these chains only support bridging of ETH, not WETH.\",\"version\":1}},\"settings\":{\"compilationTarget\":{\"contracts/AtomicWethDepositor.sol\":\"AtomicWethDepositor\"},\"evmVersion\":\"shanghai\",\"libraries\":{},\"metadata\":{\"bytecodeHash\":\"ipfs\",\"useLiteralContent\":true},\"optimizer\":{\"enabled\":true,\"runs\":1000000},\"remappings\":[],\"viaIR\":true},\"sources\":{\"contracts/AtomicWethDepositor.sol\":{\"content\":\"// SPDX-License-Identifier: GPL-3.0-only\\npragma solidity ^0.8.0;\\n\\ninterface Weth {\\n function withdraw(uint256 _wad) external;\\n\\n function transferFrom(address _from, address _to, uint256 _wad) external;\\n}\\n\\ninterface OvmL1Bridge {\\n function depositETHTo(address _to, uint32 _l2Gas, bytes calldata _data) external payable;\\n}\\n\\ninterface PolygonL1Bridge {\\n function depositEtherFor(address _to) external payable;\\n}\\n\\ninterface ZkSyncL1Bridge {\\n function requestL2Transaction(\\n address _contractL2,\\n uint256 _l2Value,\\n bytes calldata _calldata,\\n uint256 _l2GasLimit,\\n uint256 _l2GasPerPubdataByteLimit,\\n bytes[] calldata _factoryDeps,\\n address _refundRecipient\\n ) external payable;\\n\\n function l2TransactionBaseCost(\\n uint256 _gasPrice,\\n uint256 _l2GasLimit,\\n uint256 _l2GasPerPubdataByteLimit\\n ) external pure returns (uint256);\\n}\\n\\ninterface LineaL1MessageService {\\n function sendMessage(address _to, uint256 _fee, bytes calldata _calldata) external payable;\\n}\\n\\n/**\\n * @notice Contract deployed on Ethereum helps relay bots atomically unwrap and bridge WETH over the canonical chain\\n * bridges for Optimism, Base, Boba, ZkSync, Linea, and Polygon. Needed as these chains only support bridging of ETH,\\n * not WETH.\\n */\\n\\ncontract AtomicWethDepositor {\\n Weth public immutable weth = Weth(0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2);\\n OvmL1Bridge public immutable optimismL1Bridge = OvmL1Bridge(0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1);\\n OvmL1Bridge public immutable bobaL1Bridge = OvmL1Bridge(0xdc1664458d2f0B6090bEa60A8793A4E66c2F1c00);\\n OvmL1Bridge public immutable baseL1Bridge = OvmL1Bridge(0x3154Cf16ccdb4C6d922629664174b904d80F2C35);\\n PolygonL1Bridge public immutable polygonL1Bridge = PolygonL1Bridge(0xA0c68C638235ee32657e8f720a23ceC1bFc77C77);\\n ZkSyncL1Bridge public immutable zkSyncL1Bridge = ZkSyncL1Bridge(0x32400084C286CF3E17e7B677ea9583e60a000324);\\n LineaL1MessageService public immutable lineaL1MessageService =\\n LineaL1MessageService(0xd19d4B5d358258f05D7B411E21A1460D11B0876F);\\n\\n event ZkSyncEthDepositInitiated(address indexed from, address indexed to, uint256 amount);\\n event LineaEthDepositInitiated(address indexed from, address indexed to, uint256 amount);\\n\\n function bridgeWethToOvm(address to, uint256 amount, uint32 l2Gas, uint256 chainId) public {\\n weth.transferFrom(msg.sender, address(this), amount);\\n weth.withdraw(amount);\\n\\n if (chainId == 10) {\\n optimismL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \\\"\\\");\\n } else if (chainId == 8453) {\\n baseL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \\\"\\\");\\n } else if (chainId == 288) {\\n bobaL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \\\"\\\");\\n } else {\\n revert(\\\"Invalid OVM chainId\\\");\\n }\\n }\\n\\n function bridgeWethToPolygon(address to, uint256 amount) public {\\n weth.transferFrom(msg.sender, address(this), amount);\\n weth.withdraw(amount);\\n polygonL1Bridge.depositEtherFor{ value: amount }(to);\\n }\\n\\n function bridgeWethToLinea(address to, uint256 amount) public payable {\\n weth.transferFrom(msg.sender, address(this), amount);\\n weth.withdraw(amount);\\n lineaL1MessageService.sendMessage{ value: amount + msg.value }(to, msg.value, \\\"\\\");\\n // Emit an event that we can easily track in the Linea-related adapters/finalizers\\n emit LineaEthDepositInitiated(msg.sender, to, amount);\\n }\\n\\n function bridgeWethToZkSync(\\n address to,\\n uint256 amount,\\n uint256 l2GasLimit,\\n uint256 l2GasPerPubdataByteLimit,\\n address refundRecipient\\n ) public {\\n // The ZkSync Mailbox contract checks that the msg.value of the transaction is enough to cover the transaction base\\n // cost. The transaction base cost can be queried from the Mailbox by passing in an L1 \\\"executed\\\" gas price,\\n // which is the priority fee plus base fee. This is the same as calling tx.gasprice on-chain as the Mailbox\\n // contract does here:\\n // https://github.com/matter-labs/era-contracts/blob/3a4506522aaef81485d8abb96f5a6394bd2ba69e/ethereum/contracts/zksync/facets/Mailbox.sol#L287\\n uint256 l2TransactionBaseCost = zkSyncL1Bridge.l2TransactionBaseCost(\\n tx.gasprice,\\n l2GasLimit,\\n l2GasPerPubdataByteLimit\\n );\\n uint256 valueToSubmitXChainMessage = l2TransactionBaseCost + amount;\\n weth.transferFrom(msg.sender, address(this), valueToSubmitXChainMessage);\\n weth.withdraw(valueToSubmitXChainMessage);\\n zkSyncL1Bridge.requestL2Transaction{ value: valueToSubmitXChainMessage }(\\n to,\\n amount,\\n \\\"\\\",\\n l2GasLimit,\\n l2GasPerPubdataByteLimit,\\n new bytes[](0),\\n refundRecipient\\n );\\n\\n // Emit an event that we can easily track in the ZkSyncAdapter because otherwise there is no easy event to\\n // track ETH deposit initiations.\\n emit ZkSyncEthDepositInitiated(msg.sender, to, amount);\\n }\\n\\n fallback() external payable {}\\n\\n // Included to remove a compilation warning.\\n // NOTE: this should not affect behavior.\\n receive() external payable {}\\n}\\n\",\"keccak256\":\"0x5ed63dd7768a3dc7e19c6006742f1baec13a69e6f49241e83d8a4200de763ded\",\"license\":\"GPL-3.0-only\"}},\"version\":1}", - "bytecode": "0x61016080604052346101545773c02aaa39b223fe8d0a0e5c4f27ead9083c756cc26080527399c9fc46f92e8a1c0dec1b1747d010903e884be160a05273dc1664458d2f0b6090bea60a8793a4e66c2f1c0060c052733154cf16ccdb4c6d922629664174b904d80f2c3560e0526101009073a0c68c638235ee32657e8f720a23cec1bfc77c7782526101207332400084c286cf3e17e7b677ea9583e60a00032481526101409073d19d4b5d358258f05d7b411e21a1460d11b0876f82526110fd93846101598539608051848181610105015281816106a9015281816108da015281816109500152610c19015260a0518481816101e101526104e1015260c05184818161035c0152611005015260e0518481816102ba015261062e015251838181610550015261077a01525182818161086b01528181610bd901528181610d0f0152610dce0152518181816105bf0152610a210152f35b5f80fdfe6080806040526004908136101561001d575b5050361561001b57005b005b5f905f3560e01c908163019f8e8114610fbd57508063128d5f6814610b3457806336918a97146108fe5780633fc8cef31461088f5780635970eafa14610820578063b3d5ccc314610652578063b745c3f3146105e3578063c04b953414610574578063c80dcc3814610505578063d3cdc8f9146104965763e88650c403610011573461029a5760807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a576100d5611029565b916024359060443563ffffffff811681036104925773ffffffffffffffffffffffffffffffffffffffff946064357f00000000000000000000000000000000000000000000000000000000000000008716803b1561046f57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820188905290889082908190606001038183865af1801561048757908891610473575b5050803b1561046f578680916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528b8b8401525af1801561046457908791610450575b5050600a81036102ac575084957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af1801561029d5761028a57505080f35b6102939061104c565b61029a5780f35b80fd5b6040513d84823e3d90fd5b8580fd5b6121058103610350575084957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b610120036103f25784957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b6064836020604051917f08c379a0000000000000000000000000000000000000000000000000000000008352820152601360248201527f496e76616c6964204f564d20636861696e4964000000000000000000000000006044820152fd5b6104599061104c565b6102a857855f6101d2565b6040513d89823e3d90fd5b8680fd5b61047c9061104c565b61046f57865f610185565b6040513d8a823e3d90fd5b8380fd5b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b5080913461081d5760407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261081d5761068c611029565b6024359073ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b156102a857604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820186905290879082908190606001038183865af1908115610464578791610809575b5050803b156102a8578580916024604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af19081156107fe5786916107e6575b5050817f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857859360249260405196879586947f4faa8a2600000000000000000000000000000000000000000000000000000000865216908401525af1801561029d5761028a5750f35b6107ef9061104c565b6107fa57845f610775565b8480fd5b6040513d88823e3d90fd5b6108129061104c565b6102a857855f610728565b50fd5b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b509060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610b3057610932611029565b826024359173ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b1561049257604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523388820190815230602082015291820187905290859082908190606001038183865af1908115610b25578591610b11575b5050803b15610492578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528a8c8401525af1908115610b06578491610af2575b5050817f00000000000000000000000000000000000000000000000000000000000000001691610a4c348661108d565b833b156107fa57849260849160405195869485937f9f3ce55a00000000000000000000000000000000000000000000000000000000855216809a840152346024840152606060448401528560648401525af1801561029d57610ada575b50506040519081527f61ed67a945fe5f4d777919629ad666c7e81d66dc5fbaf4c143edd000c15d67dd60203392a380f35b610ae39061104c565b610aee57825f610aa9565b8280fd5b610afb9061104c565b610aee57825f610a1c565b6040513d86823e3d90fd5b610b1a9061104c565b61049257835f6109cf565b6040513d87823e3d90fd5b5080fd5b5034610f515760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610f5157610b6c611029565b602490604435906064359460843573ffffffffffffffffffffffffffffffffffffffff93848216809203610f51576040517fb473318e0000000000000000000000000000000000000000000000000000000081523a848201528187820152886044820152602081606481897f0000000000000000000000000000000000000000000000000000000000000000165afa8015610f46575f90610f55575b610c15915087359061108d565b91857f000000000000000000000000000000000000000000000000000000000000000016803b15610f5157604080517f23b872dd00000000000000000000000000000000000000000000000000000000815233878201908152306020820152918201869052905f9082908190606001038183865af18015610f4657610f33575b50803b15610f2f5788809189604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af18015610f2457908991610f10575b505060405191602083019280841067ffffffffffffffff851117610ee557899a8460409b98999a9b52878252897f0000000000000000000000000000000000000000000000000000000000000000163b15610ee15760409a989695949a99979951957feb6724190000000000000000000000000000000000000000000000000000000087528888169087015288358987015260e060448701528960e48701526101048601926064870152608486015261010060a48601525180915261012490818501918160051b860101999189905b828210610e43575050505082809281808b8b979560c4899701520391887f0000000000000000000000000000000000000000000000000000000000000000165af1801561029d57610e2f575b50506040519235835216907fa3e601130860a6f97b42655ad74f631ddf0c8e5adaa98402fded9c09bc35a44060203392a380f35b610e389061104c565b610492578385610dfb565b7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedc878d969596030181528b855180518092528c5b828110610ecb575050808d0160209081018d9052601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016909c018c019b93948401936001929092019101610daf565b808f602082818095870101519201015201610e77565b8780fd5b886041877f4e487b71000000000000000000000000000000000000000000000000000000005f52525ffd5b610f199061104c565b610ee157875f610ce0565b6040513d8b823e3d90fd5b8880fd5b610f3e91995061104c565b5f975f610c95565b6040513d5f823e3d90fd5b5f80fd5b5060203d602011610fb6575b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f820116820182811067ffffffffffffffff821117610ee557602091839160405281010312610f5157610c159051610c08565b503d610f61565b34610f51575f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610f515760209073ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b6004359073ffffffffffffffffffffffffffffffffffffffff82168203610f5157565b67ffffffffffffffff811161106057604052565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b9190820180921161109a57565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffdfea2646970667358221220b493edd1358f39227ec9f65101f64f280c9134d4324520aa2669fc1fade687f764736f6c63430008170033", - "deployedBytecode": "0x6080806040526004908136101561001d575b5050361561001b57005b005b5f905f3560e01c908163019f8e8114610fbd57508063128d5f6814610b3457806336918a97146108fe5780633fc8cef31461088f5780635970eafa14610820578063b3d5ccc314610652578063b745c3f3146105e3578063c04b953414610574578063c80dcc3814610505578063d3cdc8f9146104965763e88650c403610011573461029a5760807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a576100d5611029565b916024359060443563ffffffff811681036104925773ffffffffffffffffffffffffffffffffffffffff946064357f00000000000000000000000000000000000000000000000000000000000000008716803b1561046f57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820188905290889082908190606001038183865af1801561048757908891610473575b5050803b1561046f578680916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528b8b8401525af1801561046457908791610450575b5050600a81036102ac575084957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af1801561029d5761028a57505080f35b6102939061104c565b61029a5780f35b80fd5b6040513d84823e3d90fd5b8580fd5b6121058103610350575084957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b610120036103f25784957f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857610277948694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b6064836020604051917f08c379a0000000000000000000000000000000000000000000000000000000008352820152601360248201527f496e76616c6964204f564d20636861696e4964000000000000000000000000006044820152fd5b6104599061104c565b6102a857855f6101d2565b6040513d89823e3d90fd5b8680fd5b61047c9061104c565b61046f57865f610185565b6040513d8a823e3d90fd5b8380fd5b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b5080913461081d5760407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261081d5761068c611029565b6024359073ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b156102a857604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820186905290879082908190606001038183865af1908115610464578791610809575b5050803b156102a8578580916024604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af19081156107fe5786916107e6575b5050817f00000000000000000000000000000000000000000000000000000000000000001690813b156102a857859360249260405196879586947f4faa8a2600000000000000000000000000000000000000000000000000000000865216908401525af1801561029d5761028a5750f35b6107ef9061104c565b6107fa57845f610775565b8480fd5b6040513d88823e3d90fd5b6108129061104c565b6102a857855f610728565b50fd5b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b503461029a57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261029a57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b509060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610b3057610932611029565b826024359173ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b1561049257604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523388820190815230602082015291820187905290859082908190606001038183865af1908115610b25578591610b11575b5050803b15610492578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528a8c8401525af1908115610b06578491610af2575b5050817f00000000000000000000000000000000000000000000000000000000000000001691610a4c348661108d565b833b156107fa57849260849160405195869485937f9f3ce55a00000000000000000000000000000000000000000000000000000000855216809a840152346024840152606060448401528560648401525af1801561029d57610ada575b50506040519081527f61ed67a945fe5f4d777919629ad666c7e81d66dc5fbaf4c143edd000c15d67dd60203392a380f35b610ae39061104c565b610aee57825f610aa9565b8280fd5b610afb9061104c565b610aee57825f610a1c565b6040513d86823e3d90fd5b610b1a9061104c565b61049257835f6109cf565b6040513d87823e3d90fd5b5080fd5b5034610f515760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610f5157610b6c611029565b602490604435906064359460843573ffffffffffffffffffffffffffffffffffffffff93848216809203610f51576040517fb473318e0000000000000000000000000000000000000000000000000000000081523a848201528187820152886044820152602081606481897f0000000000000000000000000000000000000000000000000000000000000000165afa8015610f46575f90610f55575b610c15915087359061108d565b91857f000000000000000000000000000000000000000000000000000000000000000016803b15610f5157604080517f23b872dd00000000000000000000000000000000000000000000000000000000815233878201908152306020820152918201869052905f9082908190606001038183865af18015610f4657610f33575b50803b15610f2f5788809189604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af18015610f2457908991610f10575b505060405191602083019280841067ffffffffffffffff851117610ee557899a8460409b98999a9b52878252897f0000000000000000000000000000000000000000000000000000000000000000163b15610ee15760409a989695949a99979951957feb6724190000000000000000000000000000000000000000000000000000000087528888169087015288358987015260e060448701528960e48701526101048601926064870152608486015261010060a48601525180915261012490818501918160051b860101999189905b828210610e43575050505082809281808b8b979560c4899701520391887f0000000000000000000000000000000000000000000000000000000000000000165af1801561029d57610e2f575b50506040519235835216907fa3e601130860a6f97b42655ad74f631ddf0c8e5adaa98402fded9c09bc35a44060203392a380f35b610e389061104c565b610492578385610dfb565b7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedc878d969596030181528b855180518092528c5b828110610ecb575050808d0160209081018d9052601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016909c018c019b93948401936001929092019101610daf565b808f602082818095870101519201015201610e77565b8780fd5b886041877f4e487b71000000000000000000000000000000000000000000000000000000005f52525ffd5b610f199061104c565b610ee157875f610ce0565b6040513d8b823e3d90fd5b8880fd5b610f3e91995061104c565b5f975f610c95565b6040513d5f823e3d90fd5b5f80fd5b5060203d602011610fb6575b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f820116820182811067ffffffffffffffff821117610ee557602091839160405281010312610f5157610c159051610c08565b503d610f61565b34610f51575f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610f515760209073ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b6004359073ffffffffffffffffffffffffffffffffffffffff82168203610f5157565b67ffffffffffffffff811161106057604052565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b9190820180921161109a57565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffdfea2646970667358221220b493edd1358f39227ec9f65101f64f280c9134d4324520aa2669fc1fade687f764736f6c63430008170033", - "devdoc": { - "kind": "dev", - "methods": {}, - "version": 1 - }, - "userdoc": { - "kind": "user", - "methods": {}, - "notice": "Contract deployed on Ethereum helps relay bots atomically unwrap and bridge WETH over the canonical chain bridges for Optimism, Base, Boba, ZkSync, Linea, and Polygon. Needed as these chains only support bridging of ETH, not WETH.", - "version": 1 - }, - "storageLayout": { - "storage": [], - "types": null - } + "numDeployments": 4, + "solcInputHash": "5700d0188c8ea770a99799abe93c0e83", + "metadata": "{\"compiler\":{\"version\":\"0.8.23+commit.f704f362\"},\"language\":\"Solidity\",\"output\":{\"abi\":[{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"LineaEthDepositInitiated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"chainId\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"OvmEthDepositInitiated\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"address\",\"name\":\"from\",\"type\":\"address\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"ZkSyncEthDepositInitiated\",\"type\":\"event\"},{\"stateMutability\":\"payable\",\"type\":\"fallback\"},{\"inputs\":[],\"name\":\"baseL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"bobaL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToLinea\",\"outputs\":[],\"stateMutability\":\"payable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"},{\"internalType\":\"uint32\",\"name\":\"l2Gas\",\"type\":\"uint32\"},{\"internalType\":\"uint256\",\"name\":\"chainId\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToOvm\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"}],\"name\":\"bridgeWethToPolygon\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"address\",\"name\":\"to\",\"type\":\"address\"},{\"internalType\":\"uint256\",\"name\":\"amount\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"l2GasLimit\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"l2GasPerPubdataByteLimit\",\"type\":\"uint256\"},{\"internalType\":\"address\",\"name\":\"refundRecipient\",\"type\":\"address\"}],\"name\":\"bridgeWethToZkSync\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"lineaL1MessageService\",\"outputs\":[{\"internalType\":\"contract LineaL1MessageService\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"modeL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"optimismL1Bridge\",\"outputs\":[{\"internalType\":\"contract OvmL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"polygonL1Bridge\",\"outputs\":[{\"internalType\":\"contract PolygonL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"weth\",\"outputs\":[{\"internalType\":\"contract Weth\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[],\"name\":\"zkSyncL1Bridge\",\"outputs\":[{\"internalType\":\"contract ZkSyncL1Bridge\",\"name\":\"\",\"type\":\"address\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"stateMutability\":\"payable\",\"type\":\"receive\"}],\"devdoc\":{\"kind\":\"dev\",\"methods\":{},\"version\":1},\"userdoc\":{\"kind\":\"user\",\"methods\":{},\"notice\":\"Contract deployed on Ethereum helps relay bots atomically unwrap and bridge WETH over the canonical chain bridges for Optimism, Base, Boba, ZkSync, Linea, and Polygon. Needed as these chains only support bridging of ETH, not WETH.\",\"version\":1}},\"settings\":{\"compilationTarget\":{\"contracts/AtomicWethDepositor.sol\":\"AtomicWethDepositor\"},\"evmVersion\":\"shanghai\",\"libraries\":{},\"metadata\":{\"bytecodeHash\":\"ipfs\"},\"optimizer\":{\"enabled\":true,\"runs\":1000000},\"remappings\":[],\"viaIR\":true},\"sources\":{\"contracts/AtomicWethDepositor.sol\":{\"keccak256\":\"0xb8f4ed8b52b0c2abdce9d4ee43cc3747f285c2573724abf3c81d427d025c008b\",\"license\":\"GPL-3.0-only\",\"urls\":[\"bzz-raw://a78a51bc88428b506928a4439010f5e6f79a6b55a33ab2ab2e94d6d7ab0e1313\",\"dweb:/ipfs/QmU4SASEHcmSQDvRRDnqxA8xw3T3S2mxJ4sQvm6EExdxoj\"]}},\"version\":1}", + "bytecode": "0x610180806040523461017c5773c02aaa39b223fe8d0a0e5c4f27ead9083c756cc26080527399c9fc46f92e8a1c0dec1b1747d010903e884be160a05273735adbbe72226bd52e818e7181953f42e3b0ff2160c05273dc1664458d2f0b6090bea60a8793a4e66c2f1c0060e05261010090733154cf16ccdb4c6d922629664174b904d80f2c35825261012073a0c68c638235ee32657e8f720a23cec1bfc77c7781526101407332400084c286cf3e17e7b677ea9583e60a00032481526101609173d19d4b5d358258f05d7b411e21a1460d11b0876f835261128494856101818639608051858181610114015281816107c401528181610a7f01528181610af40152610da0015260a0518581816101ed01526105fa015260c0518581816103c901526109a1015260e051858181610473015261118c0152518481816102fb015261074a01525183818161066c0152610895015251828181610a1001528181610d6001528181610e960152610f550152518181816106db0152610bc50152f35b5f80fdfe60808060405260048036101561001c575b5050361561001a57005b005b5f915f3560e01c908163019f8e811461114457508063128d5f6814610cba57806336918a9714610aa35780633fc8cef314610a345780635970eafa146109c5578063645b6f111461095657828163b3d5ccc31461076e57508063b745c3f3146106ff578063c04b953414610690578063c80dcc3814610621578063d3cdc8f9146105af5763e88650c40361001057346105ab5760807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126105ab576100e16111b0565b604435916024359163ffffffff841684036102dc57846064359473ffffffffffffffffffffffffffffffffffffffff93847f000000000000000000000000000000000000000000000000000000000000000016803b156102eb57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523384820190815230602082015291820189905290859082908190606001038183865af19081156105a057859161058c575b5050803b156102eb578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528c888401525af1908115610581578491610569575b5050600a87036102ef57847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576102859285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af180156102e0576102c8575b50505b60405192835216917fcde53d24289bf7d0b2baeea6140c533d8388fb574b055364d718f637bedea7a460203393a480f35b6102d1906111d3565b6102dc57845f610294565b8480fd5b6040513d84823e3d90fd5b8380fd5b61210587036103bd57847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af180156102e0576103a9575b5050610297565b6103b2906111d3565b6102dc57845f6103a2565b61868b870361046157847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b909150610120860361050b57908691847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b6064906020604051917f08c379a0000000000000000000000000000000000000000000000000000000008352820152601360248201527f496e76616c6964204f564d20636861696e4964000000000000000000000000006044820152fd5b610572906111d3565b61057d57825f6101e0565b8280fd5b6040513d86823e3d90fd5b610595906111d3565b6102eb57835f610193565b6040513d87823e3d90fd5b5080fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b80fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b8083346109535760407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610953576107a76111b0565b6024359073ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b1561091157604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820186905290879082908190606001038183865af1908115610948578791610934575b5050803b15610911578580916024604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af1908115610929578691610915575b5050817f00000000000000000000000000000000000000000000000000000000000000001690813b1561091157859360249260405196879586947f4faa8a2600000000000000000000000000000000000000000000000000000000865216908401525af180156102e0576109015750f35b61090a906111d3565b61061e5780f35b8580fd5b61091e906111d3565b6102dc578487610890565b6040513d88823e3d90fd5b61093d906111d3565b610911578588610843565b6040513d89823e3d90fd5b50fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b5060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126105ab57610ad66111b0565b826024359173ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b156102eb57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523388820190815230602082015291820187905290859082908190606001038183865af19081156105a0578591610ca6575b5050803b156102eb578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528a8c8401525af1908115610581578491610c92575b5050817f00000000000000000000000000000000000000000000000000000000000000001691610bf03486611214565b833b156102dc57849260849160405195869485937f9f3ce55a00000000000000000000000000000000000000000000000000000000855216809a840152346024840152606060448401528560648401525af180156102e057610c7e575b50506040519081527f61ed67a945fe5f4d777919629ad666c7e81d66dc5fbaf4c143edd000c15d67dd60203392a380f35b610c87906111d3565b61057d57825f610c4d565b610c9b906111d3565b61057d57825f610bc0565b610caf906111d3565b6102eb57835f610b73565b5090346110d85760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126110d857610cf36111b0565b602490604435906064359460843573ffffffffffffffffffffffffffffffffffffffff938482168092036110d8576040517fb473318e0000000000000000000000000000000000000000000000000000000081523a848201528187820152886044820152602081606481897f0000000000000000000000000000000000000000000000000000000000000000165afa80156110cd575f906110dc575b610d9c9150873590611214565b91857f000000000000000000000000000000000000000000000000000000000000000016803b156110d857604080517f23b872dd00000000000000000000000000000000000000000000000000000000815233878201908152306020820152918201869052905f9082908190606001038183865af180156110cd576110ba575b50803b156110b65788809189604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af180156110ab57908991611097575b505060405191602083019280841067ffffffffffffffff85111761106c57899a8460409b98999a9b52878252897f0000000000000000000000000000000000000000000000000000000000000000163b156110685760409a989695949a99979951957feb6724190000000000000000000000000000000000000000000000000000000087528888169087015288358987015260e060448701528960e48701526101048601926064870152608486015261010060a48601525180915261012490818501918160051b860101999189905b828210610fca575050505082809281808b8b979560c4899701520391887f0000000000000000000000000000000000000000000000000000000000000000165af180156102e057610fb6575b50506040519235835216907fa3e601130860a6f97b42655ad74f631ddf0c8e5adaa98402fded9c09bc35a44060203392a380f35b610fbf906111d3565b6102eb578385610f82565b7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedc878d969596030181528b855180518092528c5b828110611052575050808d0160209081018d9052601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016909c018c019b93948401936001929092019101610f36565b808f602082818095870101519201015201610ffe565b8780fd5b886041877f4e487b71000000000000000000000000000000000000000000000000000000005f52525ffd5b6110a0906111d3565b61106857875f610e67565b6040513d8b823e3d90fd5b8880fd5b6110c59199506111d3565b5f975f610e1c565b6040513d5f823e3d90fd5b5f80fd5b5060203d60201161113d575b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f820116820182811067ffffffffffffffff82111761106c576020918391604052810103126110d857610d9c9051610d8f565b503d6110e8565b346110d8575f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126110d85760209073ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b6004359073ffffffffffffffffffffffffffffffffffffffff821682036110d857565b67ffffffffffffffff81116111e757604052565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b9190820180921161122157565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffdfea26469706673582212203eefe636a36d9a9139a964c37df0cfd56a9c05a343902f62aff9ebff3c3a97f664736f6c63430008170033", + "deployedBytecode": "0x60808060405260048036101561001c575b5050361561001a57005b005b5f915f3560e01c908163019f8e811461114457508063128d5f6814610cba57806336918a9714610aa35780633fc8cef314610a345780635970eafa146109c5578063645b6f111461095657828163b3d5ccc31461076e57508063b745c3f3146106ff578063c04b953414610690578063c80dcc3814610621578063d3cdc8f9146105af5763e88650c40361001057346105ab5760807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126105ab576100e16111b0565b604435916024359163ffffffff841684036102dc57846064359473ffffffffffffffffffffffffffffffffffffffff93847f000000000000000000000000000000000000000000000000000000000000000016803b156102eb57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523384820190815230602082015291820189905290859082908190606001038183865af19081156105a057859161058c575b5050803b156102eb578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528c888401525af1908115610581578491610569575b5050600a87036102ef57847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576102859285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af180156102e0576102c8575b50505b60405192835216917fcde53d24289bf7d0b2baeea6140c533d8388fb574b055364d718f637bedea7a460203393a480f35b6102d1906111d3565b6102dc57845f610294565b8480fd5b6040513d84823e3d90fd5b8380fd5b61210587036103bd57847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b03925af180156102e0576103a9575b5050610297565b6103b2906111d3565b6102dc57845f6103a2565b61868b870361046157847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b909150610120860361050b57908691847f00000000000000000000000000000000000000000000000000000000000000001691823b156102eb576103939285888694604051968795869485937f9a2ac6d500000000000000000000000000000000000000000000000000000000855284019060809273ffffffffffffffffffffffffffffffffffffffff63ffffffff92168352166020820152606060408201525f60608201520190565b6064906020604051917f08c379a0000000000000000000000000000000000000000000000000000000008352820152601360248201527f496e76616c6964204f564d20636861696e4964000000000000000000000000006044820152fd5b610572906111d3565b61057d57825f6101e0565b8280fd5b6040513d86823e3d90fd5b610595906111d3565b6102eb57835f610193565b6040513d87823e3d90fd5b5080fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b80fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b8083346109535760407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc360112610953576107a76111b0565b6024359073ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b1561091157604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523387820190815230602082015291820186905290879082908190606001038183865af1908115610948578791610934575b5050803b15610911578580916024604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af1908115610929578691610915575b5050817f00000000000000000000000000000000000000000000000000000000000000001690813b1561091157859360249260405196879586947f4faa8a2600000000000000000000000000000000000000000000000000000000865216908401525af180156102e0576109015750f35b61090a906111d3565b61061e5780f35b8580fd5b61091e906111d3565b6102dc578487610890565b6040513d88823e3d90fd5b61093d906111d3565b610911578588610843565b6040513d89823e3d90fd5b50fd5b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b823461061e57807ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc36011261061e57602060405173ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b5060407ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126105ab57610ad66111b0565b826024359173ffffffffffffffffffffffffffffffffffffffff90817f000000000000000000000000000000000000000000000000000000000000000016803b156102eb57604080517f23b872dd0000000000000000000000000000000000000000000000000000000081523388820190815230602082015291820187905290859082908190606001038183865af19081156105a0578591610ca6575b5050803b156102eb578380916024604051809481937f2e1a7d4d0000000000000000000000000000000000000000000000000000000083528a8c8401525af1908115610581578491610c92575b5050817f00000000000000000000000000000000000000000000000000000000000000001691610bf03486611214565b833b156102dc57849260849160405195869485937f9f3ce55a00000000000000000000000000000000000000000000000000000000855216809a840152346024840152606060448401528560648401525af180156102e057610c7e575b50506040519081527f61ed67a945fe5f4d777919629ad666c7e81d66dc5fbaf4c143edd000c15d67dd60203392a380f35b610c87906111d3565b61057d57825f610c4d565b610c9b906111d3565b61057d57825f610bc0565b610caf906111d3565b6102eb57835f610b73565b5090346110d85760a07ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126110d857610cf36111b0565b602490604435906064359460843573ffffffffffffffffffffffffffffffffffffffff938482168092036110d8576040517fb473318e0000000000000000000000000000000000000000000000000000000081523a848201528187820152886044820152602081606481897f0000000000000000000000000000000000000000000000000000000000000000165afa80156110cd575f906110dc575b610d9c9150873590611214565b91857f000000000000000000000000000000000000000000000000000000000000000016803b156110d857604080517f23b872dd00000000000000000000000000000000000000000000000000000000815233878201908152306020820152918201869052905f9082908190606001038183865af180156110cd576110ba575b50803b156110b65788809189604051809481937f2e1a7d4d000000000000000000000000000000000000000000000000000000008352898b8401525af180156110ab57908991611097575b505060405191602083019280841067ffffffffffffffff85111761106c57899a8460409b98999a9b52878252897f0000000000000000000000000000000000000000000000000000000000000000163b156110685760409a989695949a99979951957feb6724190000000000000000000000000000000000000000000000000000000087528888169087015288358987015260e060448701528960e48701526101048601926064870152608486015261010060a48601525180915261012490818501918160051b860101999189905b828210610fca575050505082809281808b8b979560c4899701520391887f0000000000000000000000000000000000000000000000000000000000000000165af180156102e057610fb6575b50506040519235835216907fa3e601130860a6f97b42655ad74f631ddf0c8e5adaa98402fded9c09bc35a44060203392a380f35b610fbf906111d3565b6102eb578385610f82565b7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffedc878d969596030181528b855180518092528c5b828110611052575050808d0160209081018d9052601f919091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016909c018c019b93948401936001929092019101610f36565b808f602082818095870101519201015201610ffe565b8780fd5b886041877f4e487b71000000000000000000000000000000000000000000000000000000005f52525ffd5b6110a0906111d3565b61106857875f610e67565b6040513d8b823e3d90fd5b8880fd5b6110c59199506111d3565b5f975f610e1c565b6040513d5f823e3d90fd5b5f80fd5b5060203d60201161113d575b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f820116820182811067ffffffffffffffff82111761106c576020918391604052810103126110d857610d9c9051610d8f565b503d6110e8565b346110d8575f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc3601126110d85760209073ffffffffffffffffffffffffffffffffffffffff7f0000000000000000000000000000000000000000000000000000000000000000168152f35b6004359073ffffffffffffffffffffffffffffffffffffffff821682036110d857565b67ffffffffffffffff81116111e757604052565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b9190820180921161122157565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52601160045260245ffdfea26469706673582212203eefe636a36d9a9139a964c37df0cfd56a9c05a343902f62aff9ebff3c3a97f664736f6c63430008170033" } diff --git a/deployments/mainnet/solcInputs/5700d0188c8ea770a99799abe93c0e83.json b/deployments/mainnet/solcInputs/5700d0188c8ea770a99799abe93c0e83.json new file mode 100644 index 000000000..d802a7551 --- /dev/null +++ b/deployments/mainnet/solcInputs/5700d0188c8ea770a99799abe93c0e83.json @@ -0,0 +1,21 @@ +{ + "language": "Solidity", + "sources": { + "contracts/AtomicWethDepositor.sol": { + "content": "// SPDX-License-Identifier: GPL-3.0-only\npragma solidity ^0.8.0;\n\ninterface Weth {\n function withdraw(uint256 _wad) external;\n\n function transferFrom(address _from, address _to, uint256 _wad) external;\n}\n\ninterface OvmL1Bridge {\n function depositETHTo(address _to, uint32 _l2Gas, bytes calldata _data) external payable;\n}\n\ninterface PolygonL1Bridge {\n function depositEtherFor(address _to) external payable;\n}\n\ninterface ZkSyncL1Bridge {\n function requestL2Transaction(\n address _contractL2,\n uint256 _l2Value,\n bytes calldata _calldata,\n uint256 _l2GasLimit,\n uint256 _l2GasPerPubdataByteLimit,\n bytes[] calldata _factoryDeps,\n address _refundRecipient\n ) external payable;\n\n function l2TransactionBaseCost(\n uint256 _gasPrice,\n uint256 _l2GasLimit,\n uint256 _l2GasPerPubdataByteLimit\n ) external pure returns (uint256);\n}\n\ninterface LineaL1MessageService {\n function sendMessage(address _to, uint256 _fee, bytes calldata _calldata) external payable;\n}\n\n/**\n * @notice Contract deployed on Ethereum helps relay bots atomically unwrap and bridge WETH over the canonical chain\n * bridges for Optimism, Base, Boba, ZkSync, Linea, and Polygon. Needed as these chains only support bridging of ETH,\n * not WETH.\n */\n\ncontract AtomicWethDepositor {\n Weth public immutable weth = Weth(0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2);\n OvmL1Bridge public immutable optimismL1Bridge = OvmL1Bridge(0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1);\n OvmL1Bridge public immutable modeL1Bridge = OvmL1Bridge(0x735aDBbE72226BD52e818E7181953f42E3b0FF21);\n OvmL1Bridge public immutable bobaL1Bridge = OvmL1Bridge(0xdc1664458d2f0B6090bEa60A8793A4E66c2F1c00);\n OvmL1Bridge public immutable baseL1Bridge = OvmL1Bridge(0x3154Cf16ccdb4C6d922629664174b904d80F2C35);\n PolygonL1Bridge public immutable polygonL1Bridge = PolygonL1Bridge(0xA0c68C638235ee32657e8f720a23ceC1bFc77C77);\n ZkSyncL1Bridge public immutable zkSyncL1Bridge = ZkSyncL1Bridge(0x32400084C286CF3E17e7B677ea9583e60a000324);\n LineaL1MessageService public immutable lineaL1MessageService =\n LineaL1MessageService(0xd19d4B5d358258f05D7B411E21A1460D11B0876F);\n\n event ZkSyncEthDepositInitiated(address indexed from, address indexed to, uint256 amount);\n event LineaEthDepositInitiated(address indexed from, address indexed to, uint256 amount);\n event OvmEthDepositInitiated(uint256 indexed chainId, address indexed from, address indexed to, uint256 amount);\n\n function bridgeWethToOvm(address to, uint256 amount, uint32 l2Gas, uint256 chainId) public {\n weth.transferFrom(msg.sender, address(this), amount);\n weth.withdraw(amount);\n\n if (chainId == 10) {\n optimismL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \"\");\n } else if (chainId == 8453) {\n baseL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \"\");\n } else if (chainId == 34443) {\n modeL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \"\");\n } else if (chainId == 288) {\n bobaL1Bridge.depositETHTo{ value: amount }(to, l2Gas, \"\");\n } else {\n revert(\"Invalid OVM chainId\");\n }\n\n emit OvmEthDepositInitiated(chainId, msg.sender, to, amount);\n }\n\n function bridgeWethToPolygon(address to, uint256 amount) public {\n weth.transferFrom(msg.sender, address(this), amount);\n weth.withdraw(amount);\n polygonL1Bridge.depositEtherFor{ value: amount }(to);\n }\n\n function bridgeWethToLinea(address to, uint256 amount) public payable {\n weth.transferFrom(msg.sender, address(this), amount);\n weth.withdraw(amount);\n lineaL1MessageService.sendMessage{ value: amount + msg.value }(to, msg.value, \"\");\n // Emit an event that we can easily track in the Linea-related adapters/finalizers\n emit LineaEthDepositInitiated(msg.sender, to, amount);\n }\n\n function bridgeWethToZkSync(\n address to,\n uint256 amount,\n uint256 l2GasLimit,\n uint256 l2GasPerPubdataByteLimit,\n address refundRecipient\n ) public {\n // The ZkSync Mailbox contract checks that the msg.value of the transaction is enough to cover the transaction base\n // cost. The transaction base cost can be queried from the Mailbox by passing in an L1 \"executed\" gas price,\n // which is the priority fee plus base fee. This is the same as calling tx.gasprice on-chain as the Mailbox\n // contract does here:\n // https://github.com/matter-labs/era-contracts/blob/3a4506522aaef81485d8abb96f5a6394bd2ba69e/ethereum/contracts/zksync/facets/Mailbox.sol#L287\n uint256 l2TransactionBaseCost = zkSyncL1Bridge.l2TransactionBaseCost(\n tx.gasprice,\n l2GasLimit,\n l2GasPerPubdataByteLimit\n );\n uint256 valueToSubmitXChainMessage = l2TransactionBaseCost + amount;\n weth.transferFrom(msg.sender, address(this), valueToSubmitXChainMessage);\n weth.withdraw(valueToSubmitXChainMessage);\n zkSyncL1Bridge.requestL2Transaction{ value: valueToSubmitXChainMessage }(\n to,\n amount,\n \"\",\n l2GasLimit,\n l2GasPerPubdataByteLimit,\n new bytes[](0),\n refundRecipient\n );\n\n // Emit an event that we can easily track in the ZkSyncAdapter because otherwise there is no easy event to\n // track ETH deposit initiations.\n emit ZkSyncEthDepositInitiated(msg.sender, to, amount);\n }\n\n fallback() external payable {}\n\n // Included to remove a compilation warning.\n // NOTE: this should not affect behavior.\n receive() external payable {}\n}\n" + } + }, + "settings": { + "optimizer": { + "enabled": true, + "runs": 1000000 + }, + "viaIR": true, + "outputSelection": { + "*": { + "*": ["abi", "evm.bytecode", "evm.deployedBytecode", "evm.methodIdentifiers", "metadata"], + "": ["ast"] + } + } + } +} diff --git a/funding.json b/funding.json new file mode 100644 index 000000000..f86a1ffa6 --- /dev/null +++ b/funding.json @@ -0,0 +1,5 @@ +{ + "opRetro": { + "projectId": "0x72723e07fe409557489a6643b43d9493a94c10ba68230b0527f01834cb6a550f" + } +} diff --git a/hardhat.config.ts b/hardhat.config.ts index 071782b60..6014d8eea 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -23,9 +23,20 @@ dotenv.config(); const solcVersion = "0.8.23"; const mnemonic = getMnemonic(); +const LARGE_CONTRACT_COMPILER_SETTINGS = { + version: solcVersion, + settings: { + optimizer: { enabled: true, runs: 1000000 }, + viaIR: true, + }, +}; + const config: HardhatUserConfig = { solidity: { compilers: [{ version: solcVersion, settings: { optimizer: { enabled: true, runs: 1 }, viaIR: true } }], + overrides: { + "contracts/AtomicWethDepositor.sol": LARGE_CONTRACT_COMPILER_SETTINGS, + }, }, networks: { hardhat: { accounts: { accountsBalance: "1000000000000000000000000" } }, diff --git a/index.ts b/index.ts index 0473315c5..1fa60dae5 100644 --- a/index.ts +++ b/index.ts @@ -1,12 +1,12 @@ import minimist from "minimist"; -import { config, delay, exit, retrieveSignerFromCLIArgs, help, Logger, usage, winston } from "./src/utils"; +import { config, delay, exit, retrieveSignerFromCLIArgs, help, Logger, usage, waitForLogger } from "./src/utils"; import { runRelayer } from "./src/relayer"; import { runDataworker } from "./src/dataworker"; import { runMonitor } from "./src/monitor"; import { runFinalizer } from "./src/finalizer"; import { version } from "./package.json"; -let logger: winston.Logger; +let logger: typeof Logger; let cmd: string; export async function run(args: { [k: string]: boolean | string }): Promise { @@ -70,7 +70,10 @@ if (require.main === module) { args, notificationPath: "across-error", }); - await delay(5); // Wait for transports to flush. May or may not be necessary. }) - .finally(() => exit(exitCode)); + .finally(async () => { + await waitForLogger(logger); + await delay(5); // Wait 5s for logger to flush. + exit(exitCode); + }); } diff --git a/package.json b/package.json index 3f7dec37c..47b08d7be 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { - "name": "relayer-v2", - "version": "0.2.0", - "description": "Across Protocol V3 Relayer Bot", - "repository": "git@github.com:across-protocol/relayer-v2.git", + "name": "@across-protocol/relayer", + "version": "3.0.0", + "description": "Across Protocol Relayer Bot", + "repository": "git@github.com:across-protocol/relayer.git", "author": "UMA Team", "license": "AGPL-3.0-only", "private": true, @@ -10,13 +10,13 @@ "node": ">=16.18.0" }, "dependencies": { - "@across-protocol/constants-v2": "1.0.19", - "@across-protocol/contracts-v2": "2.5.4", - "@across-protocol/sdk-v2": "0.23.8", + "@across-protocol/constants": "^3.0.0", + "@across-protocol/contracts": "^3.0.0", + "@across-protocol/sdk": "^3.0.0", "@arbitrum/sdk": "^3.1.3", "@consensys/linea-sdk": "^0.2.1", "@defi-wonderland/smock": "^2.3.5", - "@eth-optimism/sdk": "^3.2.2", + "@eth-optimism/sdk": "^3.3.1", "@ethersproject/abi": "^5.7.0", "@ethersproject/abstract-provider": "^5.7.0", "@ethersproject/abstract-signer": "^5.7.0", @@ -26,9 +26,9 @@ "@maticnetwork/maticjs-ethers": "^1.0.3", "@openzeppelin/hardhat-upgrades": "^1.28.0", "@uma/common": "2.33.0", - "@uma/financial-templates-lib": "^2.34.1", + "@uma/logger": "1.2.0", "async": "^3.2.4", - "axios": "^1.6.0", + "axios": "^1.6.1", "dotenv": "^16.3.1", "ethers": "^5.7.2", "hardhat": "^2.14.0", diff --git a/scripts/hubpool.ts b/scripts/hubpool.ts index f3b93e138..3c14fe311 100644 --- a/scripts/hubpool.ts +++ b/scripts/hubpool.ts @@ -1,6 +1,6 @@ import minimist from "minimist"; -import { WETH9__factory as WETH9 } from "@across-protocol/contracts-v2"; -import { constants as sdkConsts } from "@across-protocol/sdk-v2"; +import { WETH9__factory as WETH9 } from "@across-protocol/contracts"; +import { constants as sdkConsts } from "@across-protocol/sdk"; import { BigNumber, ethers, Signer } from "ethers"; import { config } from "dotenv"; import { getNetworkName, getSigner } from "../src/utils"; diff --git a/scripts/sendTokens.ts b/scripts/sendTokens.ts index 39220a237..a49446cd1 100644 --- a/scripts/sendTokens.ts +++ b/scripts/sendTokens.ts @@ -62,7 +62,15 @@ export async function run(): Promise { return; } console.log("sending..."); - const tx = await erc20.transfer(recipient, args.amount); + const tx = await erc20.transfer(recipient, args.amount, { + maxFeePerGas: 150000000000, + maxPriorityFeePerGas: 40000000000, + }); + console.log( + `submitted with max fee per gas ${tx.maxFeePerGas.toString()} and priority fee ${tx.maxPriorityFeePerGas.toString()} at nonce ${ + tx.nonce + }` + ); const receipt = await tx.wait(); console.log("Transaction hash:", receipt.transactionHash); } diff --git a/scripts/spokepool.ts b/scripts/spokepool.ts index 96d63016c..75d26ddea 100644 --- a/scripts/spokepool.ts +++ b/scripts/spokepool.ts @@ -4,8 +4,8 @@ import { groupBy } from "lodash"; import { config } from "dotenv"; import { Contract, ethers, Signer } from "ethers"; import { LogDescription } from "@ethersproject/abi"; -import { constants as sdkConsts, utils as sdkUtils } from "@across-protocol/sdk-v2"; -import { ExpandedERC20__factory as ERC20 } from "@across-protocol/contracts-v2"; +import { constants as sdkConsts, utils as sdkUtils } from "@across-protocol/sdk"; +import { ExpandedERC20__factory as ERC20 } from "@across-protocol/contracts"; import { BigNumber, formatFeePct, @@ -383,7 +383,7 @@ async function _fetchDeposit(spokePool: Contract, _depositId: number | string): // @note: Querying over such a large block range typically only works on top-tier providers. // @todo: Narrow the block range for the depositId, subject to this PR: - // https://github.com/across-protocol/sdk-v2/pull/476 + // https://github.com/across-protocol/sdk/pull/476 return await spokePool.queryFilter(filter, deploymentBlockNumber, latestBlockNumber); } diff --git a/scripts/utils.ts b/scripts/utils.ts index 9ad159eda..4efb73f3f 100644 --- a/scripts/utils.ts +++ b/scripts/utils.ts @@ -1,8 +1,8 @@ import assert from "assert"; import { Contract, ethers, utils as ethersUtils } from "ethers"; import readline from "readline"; -import * as contracts from "@across-protocol/contracts-v2"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import * as contracts from "@across-protocol/contracts"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { getDeployedContract, getNodeUrlList, CHAIN_IDs } from "../src/utils"; // https://nodejs.org/api/process.html#exit-codes diff --git a/scripts/zkSyncDemo.ts b/scripts/zkSyncDemo.ts index 1c737653c..65d7b6d69 100644 --- a/scripts/zkSyncDemo.ts +++ b/scripts/zkSyncDemo.ts @@ -15,7 +15,7 @@ import { askYesNoQuestion } from "./utils"; import minimist from "minimist"; import * as zksync from "zksync-web3"; import { CONTRACT_ADDRESSES } from "../src/common"; -import { gasPriceOracle } from "@across-protocol/sdk-v2"; +import { gasPriceOracle } from "@across-protocol/sdk"; const args = minimist(process.argv.slice(2), { string: ["token", "to", "amount", "chainId", "zkSyncChainId"], }); diff --git a/src/caching/RedisCache.ts b/src/caching/RedisCache.ts index ea9341eaa..2dc9f35f7 100644 --- a/src/caching/RedisCache.ts +++ b/src/caching/RedisCache.ts @@ -1,4 +1,4 @@ -import { interfaces, constants } from "@across-protocol/sdk-v2"; +import { interfaces, constants } from "@across-protocol/sdk"; import { RedisClient, setRedisKey } from "../utils"; /** diff --git a/src/clients/AcrossAPIClient.ts b/src/clients/AcrossAPIClient.ts index 47c10401d..46fe5bb5d 100644 --- a/src/clients/AcrossAPIClient.ts +++ b/src/clients/AcrossAPIClient.ts @@ -5,6 +5,7 @@ import { isDefined, winston, BigNumber, + getCurrentTime, getL2TokenAddresses, CHAIN_IDs, TOKEN_SYMBOLS_MAP, @@ -17,10 +18,13 @@ export interface DepositLimits { maxDeposit: BigNumber; } +const API_UPDATE_RETENTION_TIME = 60; // seconds + export class AcrossApiClient { private endpoint = "https://app.across.to/api"; private chainIds: number[]; private limits: { [token: string]: BigNumber } = {}; + private updatedAt = 0; public updatedLimits = false; @@ -40,9 +44,11 @@ export class AcrossApiClient { } async update(ignoreLimits: boolean): Promise { + const now = getCurrentTime(); + const updateAge = now - this.updatedAt; // If no chainIds are specified, the origin chain is assumed to be the HubPool chain, so skip update. - if (ignoreLimits || this.chainIds.length === 0) { - this.logger.debug({ at: "AcrossAPIClient", message: "Skipping querying /limits" }); + if (updateAge < API_UPDATE_RETENTION_TIME || ignoreLimits || this.chainIds.length === 0) { + this.logger.debug({ at: "AcrossAPIClient", message: "Skipping querying /limits", updateAge }); return; } @@ -109,6 +115,7 @@ export class AcrossApiClient { limits: this.limits, }); this.updatedLimits = true; + this.updatedAt = now; } getLimit(originChainId: number, l1Token: string): BigNumber { diff --git a/src/clients/BundleDataClient.ts b/src/clients/BundleDataClient.ts index 3394add67..0c028f149 100644 --- a/src/clients/BundleDataClient.ts +++ b/src/clients/BundleDataClient.ts @@ -32,7 +32,7 @@ import { _buildPoolRebalanceRoot, } from "../dataworker/DataworkerUtils"; import { getWidestPossibleExpectedBlockRange, isChainDisabled } from "../dataworker/PoolRebalanceUtils"; -import { utils } from "@across-protocol/sdk-v2"; +import { utils } from "@across-protocol/sdk"; import { BundleDepositsV3, BundleExcessSlowFills, @@ -302,7 +302,7 @@ export class BundleDataClient { const matchingDeposit = this.spokePoolClients[fill.originChainId].getDeposit(fill.depositId); const hasMatchingDeposit = matchingDeposit !== undefined && - utils.getRelayHashFromEvent(fill) === utils.getRelayHashFromEvent(matchingDeposit); + this.getRelayHashFromEvent(fill) === this.getRelayHashFromEvent(matchingDeposit); return hasMatchingDeposit; }) .forEach((fill) => { @@ -414,7 +414,10 @@ export class BundleDataClient { // If a chain is disabled or doesn't have a spoke pool client, return a range of 0 function getBlockRangeDelta(_pendingBlockRanges: number[][]): number[][] { return widestBundleBlockRanges.map((blockRange, index) => { - const initialBlockRange = _pendingBlockRanges[index]; + // If pending block range doesn't have an entry for the widest range, which is possible when a new chain + // is added to the CHAIN_ID_INDICES list, then simply set the initial block range to the widest block range. + // This will produce a block range delta of 0 where the returned range for this chain is [widest[1], widest[1]]. + const initialBlockRange = _pendingBlockRanges[index] ?? blockRange; // If chain is disabled, return disabled range if (initialBlockRange[0] === initialBlockRange[1]) { return initialBlockRange; @@ -566,6 +569,7 @@ export class BundleDataClient { private async loadArweaveData(blockRangesForChains: number[][]): Promise { const arweaveKey = this.getArweaveClientKey(blockRangesForChains); + // eslint-disable-next-line @typescript-eslint/no-misused-promises if (!this.arweaveDataCache[arweaveKey]) { this.arweaveDataCache[arweaveKey] = this.loadPersistedDataFromArweave(blockRangesForChains); } @@ -582,6 +586,7 @@ export class BundleDataClient { attemptArweaveLoad = false ): Promise { const key = JSON.stringify(blockRangesForChains); + // eslint-disable-next-line @typescript-eslint/no-misused-promises if (!this.loadDataCache[key]) { let arweaveData; if (attemptArweaveLoad) { @@ -604,7 +609,7 @@ export class BundleDataClient { blockRangesForChains: number[][], spokePoolClients: SpokePoolClientsByChain ): Promise { - const start = performance.now(); + let start = performance.now(); const key = JSON.stringify(blockRangesForChains); if (!this.clients.configStoreClient.isUpdated) { @@ -744,6 +749,7 @@ export class BundleDataClient { // - olderDepositHashes: Deposits sent in a prior bundle that newly expired in this bundle const olderDepositHashes: Set = new Set(); + let depositCounter = 0; for (const originChainId of allChainIds) { const originClient = spokePoolClients[originChainId]; const originChainBlockRange = getBlockRangeForChain(blockRangesForChains, originChainId, chainIds); @@ -757,7 +763,8 @@ export class BundleDataClient { .getDepositsForDestinationChain(destinationChainId) .filter((deposit) => deposit.blockNumber <= originChainBlockRange[1]) .forEach((deposit) => { - const relayDataHash = utils.getRelayHashFromEvent(deposit); + depositCounter++; + const relayDataHash = this.getRelayHashFromEvent(deposit); if (v3RelayHashes[relayDataHash]) { // If we've seen this deposit before, then skip this deposit. This can happen if our RPC provider // gives us bad data. @@ -791,11 +798,17 @@ export class BundleDataClient { }); } } + this.logger.debug({ + at: "BundleDataClient#loadData", + message: `Processed ${depositCounter} deposits in ${performance.now() - start}ms.`, + }); + start = performance.now(); // Process fills now that we've populated relay hash dictionary with deposits: const validatedBundleV3Fills: (V3FillWithBlock & { quoteTimestamp: number })[] = []; const validatedBundleSlowFills: V3DepositWithBlock[] = []; const validatedBundleUnexecutableSlowFills: V3DepositWithBlock[] = []; + let fillCounter = 0; for (const originChainId of allChainIds) { const originClient = spokePoolClients[originChainId]; for (const destinationChainId of allChainIds) { @@ -814,7 +827,8 @@ export class BundleDataClient { .getFillsForOriginChain(originChainId) .filter((fill) => fill.blockNumber <= destinationChainBlockRange[1]), async (fill) => { - const relayDataHash = utils.getRelayHashFromEvent(fill); + const relayDataHash = this.getRelayHashFromEvent(fill); + fillCounter++; if (v3RelayHashes[relayDataHash]) { if (!v3RelayHashes[relayDataHash].fill) { @@ -862,7 +876,7 @@ export class BundleDataClient { // object property values against the deposit's, we // sanity check it here by comparing the full relay hashes. If there's an error here then the // historical deposit query is not working as expected. - assert(utils.getRelayHashFromEvent(matchedDeposit) === relayDataHash); + assert(this.getRelayHashFromEvent(matchedDeposit) === relayDataHash); validatedBundleV3Fills.push({ ...fill, quoteTimestamp: matchedDeposit.quoteTimestamp, @@ -881,7 +895,7 @@ export class BundleDataClient { .getSlowFillRequestsForOriginChain(originChainId) .filter((request) => request.blockNumber <= destinationChainBlockRange[1]), async (slowFillRequest: SlowFillRequestWithBlock) => { - const relayDataHash = utils.getRelayHashFromEvent(slowFillRequest); + const relayDataHash = this.getRelayHashFromEvent(slowFillRequest); if (v3RelayHashes[relayDataHash]) { if (!v3RelayHashes[relayDataHash].slowFillRequest) { @@ -949,7 +963,7 @@ export class BundleDataClient { // object property values against the deposit's, we // sanity check it here by comparing the full relay hashes. If there's an error here then the // historical deposit query is not working as expected. - assert(utils.getRelayHashFromEvent(matchedDeposit) === relayDataHash); + assert(this.getRelayHashFromEvent(matchedDeposit) === relayDataHash); v3RelayHashes[relayDataHash].deposit = matchedDeposit; // Note: we don't need to query for a historical fill at this point because a fill @@ -1005,6 +1019,11 @@ export class BundleDataClient { }); } } + this.logger.debug({ + at: "BundleDataClient#loadData", + message: `Processed ${fillCounter} fills in ${performance.now() - start}ms.`, + }); + start = performance.now(); // Go through expired deposits in this bundle and now prune those that we have seen a fill for to construct // the list of expired deposits we need to refund in this bundle. @@ -1087,6 +1106,7 @@ export class BundleDataClient { }); // Batch compute V3 lp fees. + start = performance.now(); const promises = [ validatedBundleV3Fills.length > 0 ? this.clients.hubPoolClient.batchComputeRealizedLpFeePct( @@ -1126,6 +1146,10 @@ export class BundleDataClient { : [], ]; const [v3FillLpFees, v3SlowFillLpFees, v3UnexecutableSlowFillLpFees] = await Promise.all(promises); + this.logger.debug({ + at: "BundleDataClient#loadData", + message: `Computed batch async LP fees in ${performance.now() - start}ms.`, + }); v3FillLpFees.forEach(({ realizedLpFeePct }, idx) => { const fill = validatedBundleV3Fills[idx]; const { chainToSendRefundTo, repaymentToken } = getRefundInformationFromFill( @@ -1178,6 +1202,14 @@ export class BundleDataClient { }; } + // Internal function to uniquely identify a bridge event. This is preferred over `SDK.getRelayDataHash` which returns + // keccak256 hash of the relay data, which can be used as input into the on-chain `fillStatuses()` function in the + // spoke pool contract. However, this internal function is used to uniquely identify a bridging event + // for speed since its easier to build a string from the event data than to hash it. + private getRelayHashFromEvent(event: V3DepositWithBlock | V3FillWithBlock | SlowFillRequestWithBlock): string { + return `${event.depositor}-${event.recipient}-${event.exclusiveRelayer}-${event.inputToken}-${event.outputToken}-${event.inputAmount}-${event.outputAmount}-${event.originChainId}-${event.depositId}-${event.fillDeadline}-${event.exclusivityDeadline}-${event.message}-${event.destinationChainId}`; + } + async getBundleBlockTimestamps( chainIds: number[], blockRangesForChains: number[][], diff --git a/src/clients/ConfigStoreClient.ts b/src/clients/ConfigStoreClient.ts index a9d155ebe..e9c5c8c17 100644 --- a/src/clients/ConfigStoreClient.ts +++ b/src/clients/ConfigStoreClient.ts @@ -1,4 +1,4 @@ -import { clients, constants, utils } from "@across-protocol/sdk-v2"; +import { clients, constants, utils } from "@across-protocol/sdk"; import { Contract, EventSearchConfig, MakeOptional, isDefined, sortEventsDescending, winston } from "../utils"; import { CONFIG_STORE_VERSION } from "../common"; export const GLOBAL_CONFIG_STORE_KEYS = clients.GLOBAL_CONFIG_STORE_KEYS; diff --git a/src/clients/HubPoolClient.ts b/src/clients/HubPoolClient.ts index 9fb9595cb..b26d7155e 100644 --- a/src/clients/HubPoolClient.ts +++ b/src/clients/HubPoolClient.ts @@ -1,8 +1,9 @@ -import { clients, interfaces } from "@across-protocol/sdk-v2"; +import { clients, interfaces } from "@across-protocol/sdk"; import { Contract } from "ethers"; import winston from "winston"; -import { MakeOptional, EventSearchConfig } from "../utils"; +import { MakeOptional, EventSearchConfig, getTokenInfo, getL1TokenInfo, getUsdcSymbol } from "../utils"; import { IGNORED_HUB_EXECUTED_BUNDLES, IGNORED_HUB_PROPOSED_BUNDLES } from "../common"; +import { L1Token } from "../interfaces"; export type LpFeeRequest = clients.LpFeeRequest; @@ -33,6 +34,33 @@ export class HubPoolClient extends clients.HubPoolClient { ); } + /** + * @dev If tokenAddress + chain do not exist in TOKEN_SYMBOLS_MAP then this will throw. + * @param tokenAddress Token address on `chain` + * @param chain Chain where the `tokenAddress` exists in TOKEN_SYMBOLS_MAP. + * @returns Token info for the given token address on the L2 chain including symbol and decimal. + */ + getTokenInfoForAddress(tokenAddress: string, chain: number): L1Token { + const tokenInfo = getTokenInfo(tokenAddress, chain); + // @dev Temporarily handle case where an L2 token for chain ID can map to more than one TOKEN_SYMBOLS_MAP + // entry. For example, L2 Bridged USDC maps to both the USDC and USDC.e/USDbC entries in TOKEN_SYMBOLS_MAP. + if (tokenInfo.symbol.toLowerCase() === "usdc" && chain !== this.chainId) { + tokenInfo.symbol = getUsdcSymbol(tokenAddress, chain) ?? "UNKNOWN"; + } + return tokenInfo; + } + + /** + * @dev If tokenAddress + chain do not exist in TOKEN_SYMBOLS_MAP then this will throw. + * @dev if the token matched in TOKEN_SYMBOLS_MAP does not have an L1 token address then this will throw. + * @param tokenAddress Token address on `chain` + * @param chain Chain where the `tokenAddress` exists in TOKEN_SYMBOLS_MAP. + * @returns Token info for the given token address on the Hub chain including symbol and decimal and L1 address. + */ + getL1TokenInfoForAddress(tokenAddress: string, chain: number): L1Token { + return getL1TokenInfo(tokenAddress, chain); + } + async computeRealizedLpFeePct(deposit: LpFeeRequest): Promise { if (deposit.quoteTimestamp > this.currentTime) { throw new Error( diff --git a/src/clients/InventoryClient.ts b/src/clients/InventoryClient.ts index 84b4d9bd8..dc97bc0a9 100644 --- a/src/clients/InventoryClient.ts +++ b/src/clients/InventoryClient.ts @@ -1,4 +1,4 @@ -import { constants, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { constants, utils as sdkUtils } from "@across-protocol/sdk"; import { bnZero, BigNumber, @@ -21,19 +21,24 @@ import { MAX_UINT_VAL, toBNWei, assert, + compareAddressesSimple, + getUsdcSymbol, } from "../utils"; import { HubPoolClient, TokenClient, BundleDataClient } from "."; import { AdapterManager, CrossChainTransferClient } from "./bridges"; -import { InventoryConfig, V3Deposit } from "../interfaces"; +import { V3Deposit } from "../interfaces"; +import { InventoryConfig, isAliasConfig, TokenBalanceConfig } from "../interfaces/InventoryManagement"; import lodash from "lodash"; import { CONTRACT_ADDRESSES, SLOW_WITHDRAWAL_CHAINS } from "../common"; import { CombinedRefunds } from "../dataworker/DataworkerUtils"; -type TokenDistributionPerL1Token = { [l1Token: string]: { [chainId: number]: BigNumber } }; +type TokenDistribution = { [l2Token: string]: BigNumber }; +type TokenDistributionPerL1Token = { [l1Token: string]: { [chainId: number]: TokenDistribution } }; export type Rebalance = { chainId: number; l1Token: string; + l2Token: string; thresholdPct: BigNumber; targetPct: BigNumber; currentAllocPct: BigNumber; @@ -68,50 +73,101 @@ export class InventoryClient { this.formatWei = createFormatFunction(2, 4, false, 18); } - // Get the total balance across all chains, considering any outstanding cross chain transfers as a virtual balance on that chain. + /** + * Resolve the token balance configuration for `l1Token` on `chainId`. If `l1Token` maps to multiple tokens on + * `chainId` then `l2Token` must be supplied. + * @param l1Token L1 token address to query. + * @param chainId Chain ID to query on + * @param l2Token Optional L2 token address when l1Token maps to multiple l2Token addresses. + */ + getTokenConfig(l1Token: string, chainId: number, l2Token?: string): TokenBalanceConfig | undefined { + const tokenConfig = this.inventoryConfig.tokenConfig[l1Token]; + assert(isDefined(tokenConfig), `getTokenConfig: No token config found for ${l1Token}.`); + + if (isAliasConfig(tokenConfig)) { + assert(isDefined(l2Token), `Cannot resolve ambiguous ${getNetworkName(chainId)} token config for ${l1Token}`); + return tokenConfig[l2Token]?.[chainId]; + } else { + return tokenConfig[chainId]; + } + } + + /* + * Get the total balance for an L1 token across all chains, considering any outstanding cross chain transfers as a + * virtual balance on that chain. + * @param l1Token L1 token address to query. + * returns Cumulative balance of l1Token across all inventory-managed chains. + */ getCumulativeBalance(l1Token: string): BigNumber { return this.getEnabledChains() - .map((chainId) => this.getBalanceOnChainForL1Token(chainId, l1Token)) + .map((chainId) => this.getBalanceOnChain(chainId, l1Token)) .reduce((acc, curr) => acc.add(curr), bnZero); } - // Get the balance of a given l1 token on a target chain, considering any outstanding cross chain transfers as a virtual balance on that chain. - getBalanceOnChainForL1Token(chainId: number | string, l1Token: string): BigNumber { - // We want to skip any l2 token that is not present in the inventory config. - chainId = Number(chainId); - if (chainId !== this.hubPoolClient.chainId && !this._l1TokenEnabledForChain(l1Token, chainId)) { - return bnZero; + /** + * Determine the effective/virtual balance of an l1 token that has been deployed to another chain. + * Includes both the actual balance on the chain and any pending inbound transfers to the target chain. + * If l2Token is supplied, return its balance on the specified chain. Otherwise, return the total allocation + * of l1Token on the specified chain. + * @param chainId Chain to query token balance on. + * @param l1Token L1 token to query on chainId (after mapping). + * @param l2Token Optional l2 token address to narrow the balance reporting. + * @returns Balance of l1Token on chainId. + */ + getBalanceOnChain(chainId: number, l1Token: string, l2Token?: string): BigNumber { + const { crossChainTransferClient, relayer, tokenClient } = this; + let balance: BigNumber; + + // Return the balance for a specific l2 token on the remote chain. + if (isDefined(l2Token)) { + balance = tokenClient.getBalance(chainId, l2Token); + return balance.add( + crossChainTransferClient.getOutstandingCrossChainTransferAmount(relayer, chainId, l1Token, l2Token) + ); } - // If the chain does not have this token (EG BOBA on Optimism) then 0. - const balance = - this.tokenClient.getBalance(chainId, this.getDestinationTokenForL1Token(l1Token, chainId)) || bnZero; + const l2Tokens = this.getRemoteTokensForL1Token(l1Token, chainId); + balance = l2Tokens + .map((l2Token) => tokenClient.getBalance(chainId, l2Token)) + .reduce((acc, curr) => acc.add(curr), bnZero); - // Consider any L1->L2 transfers that are currently pending in the canonical bridge. - return balance.add( - this.crossChainTransferClient.getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token) - ); + return balance.add(crossChainTransferClient.getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token)); } - // Get the fraction of funds allocated on each chain. - getChainDistribution(l1Token: string): { [chainId: number]: BigNumber } { + /** + * Determine the allocation of an l1 token across all configured remote chain IDs. + * @param l1Token L1 token to query. + * @returns Distribution of l1Token by chain ID and l2Token. + */ + getChainDistribution(l1Token: string): { [chainId: number]: TokenDistribution } { const cumulativeBalance = this.getCumulativeBalance(l1Token); - const distribution: { [chainId: number]: BigNumber } = {}; + const distribution: { [chainId: number]: TokenDistribution } = {}; + this.getEnabledChains().forEach((chainId) => { // If token doesn't have entry on chain, skip creating an entry for it since we'll likely run into an error // later trying to grab the chain equivalent of the L1 token via the HubPoolClient. if (chainId === this.hubPoolClient.chainId || this._l1TokenEnabledForChain(l1Token, chainId)) { - if (cumulativeBalance.gt(bnZero)) { - distribution[chainId] = this.getBalanceOnChainForL1Token(chainId, l1Token) - .mul(this.scalar) - .div(cumulativeBalance); + if (cumulativeBalance.eq(bnZero)) { + return; } + + distribution[chainId] ??= {}; + const l2Tokens = this.getRemoteTokensForL1Token(l1Token, chainId); + l2Tokens.forEach((l2Token) => { + // The effective balance is the current balance + inbound bridge transfers. + const effectiveBalance = this.getBalanceOnChain(chainId, l1Token, l2Token); + distribution[chainId][l2Token] = effectiveBalance.mul(this.scalar).div(cumulativeBalance); + }); } }); return distribution; } - // Get the distribution of all tokens, spread over all chains. + /** + * Determine the allocation of an l1 token across all configured remote chain IDs. + * @param l1Token L1 token to query. + * @returns Distribution of l1Token by chain ID and l2Token. + */ getTokenDistributionPerL1Token(): TokenDistributionPerL1Token { const distributionPerL1Token: TokenDistributionPerL1Token = {}; this.getL1Tokens().forEach((l1Token) => (distributionPerL1Token[l1Token] = this.getChainDistribution(l1Token))); @@ -119,26 +175,54 @@ export class InventoryClient { } // Get the balance of a given token on a given chain, including shortfalls and any pending cross chain transfers. - getCurrentAllocationPct(l1Token: string, chainId: number): BigNumber { + getCurrentAllocationPct(l1Token: string, chainId: number, l2Token: string): BigNumber { // If there is nothing over all chains, return early. const cumulativeBalance = this.getCumulativeBalance(l1Token); if (cumulativeBalance.eq(bnZero)) { return bnZero; } - const shortfall = this.getTokenShortFall(l1Token, chainId); - const currentBalance = this.getBalanceOnChainForL1Token(chainId, l1Token).sub(shortfall); + const shortfall = this.tokenClient.getShortfallTotalRequirement(chainId, l2Token); + const currentBalance = this.getBalanceOnChain(chainId, l1Token, l2Token).sub(shortfall); + // Multiply by scalar to avoid rounding errors. return currentBalance.mul(this.scalar).div(cumulativeBalance); } - // Find how short a given chain is for a desired L1Token. - getTokenShortFall(l1Token: string, chainId: number): BigNumber { - return this.tokenClient.getShortfallTotalRequirement(chainId, this.getDestinationTokenForL1Token(l1Token, chainId)); + getRepaymentTokenForL1Token(l1Token: string, chainId: number | string): string | undefined { + // @todo: Update HubPoolClient.getL2TokenForL1TokenAtBlock() such that it returns `undefined` instead of throwing. + try { + return this.hubPoolClient.getL2TokenForL1TokenAtBlock(l1Token, Number(chainId)); + } catch { + return undefined; + } } - getDestinationTokenForL1Token(l1Token: string, chainId: number | string): string { - return this.hubPoolClient.getL2TokenForL1TokenAtBlock(l1Token, Number(chainId)); + /** + * From an L1Token and remote chain ID, resolve all supported corresponding tokens. + * This should include at least the relevant repayment token on the relevant chain, but may also include other + * "equivalent" tokens (i.e. as with Bridged & Native USDC). + * @param l1Token Mainnet token to query. + * @param chainId Remove chain to query. + * @returns An array of supported tokens on chainId that map back to l1Token on mainnet. + */ + getRemoteTokensForL1Token(l1Token: string, chainId: number | string): string[] { + if (chainId === this.hubPoolClient.chainId) { + return [l1Token]; + } + + const tokenConfig = this.inventoryConfig.tokenConfig[l1Token]; + + if (isAliasConfig(tokenConfig)) { + return Object.keys(tokenConfig).filter((k) => isDefined(tokenConfig[k][chainId])); + } + + const destinationToken = this.getRepaymentTokenForL1Token(l1Token, chainId); + if (!isDefined(destinationToken)) { + return []; + } + + return [destinationToken]; } getEnabledChains(): number[] { @@ -158,9 +242,15 @@ export class InventoryClient { } // Decrement Tokens Balance And Increment Cross Chain Transfer - trackCrossChainTransfer(l1Token: string, rebalance: BigNumber, chainId: number | string): void { + trackCrossChainTransfer(l1Token: string, l2Token: string, rebalance: BigNumber, chainId: number | string): void { this.tokenClient.decrementLocalBalance(this.hubPoolClient.chainId, l1Token, rebalance); - this.crossChainTransferClient.increaseOutstandingTransfer(this.relayer, l1Token, rebalance, Number(chainId)); + this.crossChainTransferClient.increaseOutstandingTransfer( + this.relayer, + l1Token, + l2Token, + rebalance, + Number(chainId) + ); } async getAllBundleRefunds(): Promise { @@ -203,7 +293,7 @@ export class InventoryClient { // Increase virtual balance by pending relayer refunds from the latest valid bundle and the // upcoming bundle. We can assume that all refunds from the second latest valid bundle have already // been executed. - let startTimer; + let startTimer: number; if (!isDefined(this.bundleRefundsPromise)) { startTimer = performance.now(); // @dev Save this as a promise so that other parallel calls to this function don't make the same call. @@ -213,9 +303,9 @@ export class InventoryClient { const totalRefundsPerChain = this.getEnabledChains().reduce( (refunds: { [chainId: string]: BigNumber }, chainId) => { if (!this.hubPoolClient.l2TokenEnabledForL1Token(l1Token, chainId)) { - refunds[chainId] = toBN(0); + refunds[chainId] = bnZero; } else { - const destinationToken = this.getDestinationTokenForL1Token(l1Token, chainId); + const destinationToken = this.getRepaymentTokenForL1Token(l1Token, chainId); refunds[chainId] = this.bundleDataClient.getTotalRefund( refundsToConsider, this.relayer, @@ -260,41 +350,78 @@ export class InventoryClient { return chainIds; } - // Work out where a relay should be refunded to optimally manage the bots inventory. If the inventory management logic - // not enabled then return funds on the chain the deposit was filled on Else, use the following algorithm for each - // of the origin and destination chain: - // a) Find the chain virtual balance (current balance + pending relays + pending refunds) minus current shortfall. - // b) Find the cumulative virtual balance, including the total refunds on all chains and excluding current shortfall. - // c) Consider the size of a and b post relay (i.e after the relay is paid and all current transfers are settled what - // will the balances be on the target chain and the overall cumulative balance). - // d) Use c to compute what the post relay post current in-flight transactions allocation would be. Compare this - // number to the target threshold and: - // If this number is less than the target for the destination chain + rebalance then select destination chain. We - // slightly prefer destination to origin chain to support relayer capital efficiency. - // Else, if this number is less than the target for the origin chain + rebalance then select origin - // chain. - // Else, take repayment on the Hub chain for ease of transferring out of L1 to any L2. - async determineRefundChainId(deposit: V3Deposit, l1Token?: string): Promise { + /** + * Returns true if the depositor-specified output token is supported by the this inventory client. + * @param deposit V3 Deposit to consider + * @returns boolean True if output and input tokens are equivalent or if input token is USDC and output token + * is Bridged USDC. + */ + validateOutputToken(deposit: V3Deposit): boolean { + const { inputToken, outputToken, originChainId, destinationChainId } = deposit; + + // Return true if input and output tokens are mapped to the same L1 token via PoolRebalanceRoutes + const equivalentTokens = this.hubPoolClient.areTokensEquivalent( + inputToken, + originChainId, + outputToken, + destinationChainId + ); + if (equivalentTokens) { + return true; + } + + // Return true if input token is Native USDC token and output token is Bridged USDC or if input token + // is Bridged USDC and the output token is Native USDC. + // @dev getUsdcSymbol() returns defined if the token on the origin chain is either USDC, USDC.e or USDbC. + // The contracts should only allow deposits where the input token is the Across-supported USDC variant, so this + // check specifically handles the case where the input token is Bridged/Native and the output token Native/Bridged. + const isInputTokenUSDC = isDefined(getUsdcSymbol(inputToken, originChainId)); + const isOutputTokenBridgedUSDC = compareAddressesSimple( + outputToken, + TOKEN_SYMBOLS_MAP[destinationChainId === CHAIN_IDs.BASE ? "USDbC" : "USDC.e"].addresses?.[destinationChainId] + ); + return isInputTokenUSDC && isOutputTokenBridgedUSDC; + } + + /* + * Return all eligible repayment chains for a deposit. If inventory management is enabled, then this function will + * only choose chains where the post-relay balance allocation for a potential repayment chain is under the maximum + * allowed allocation on that chain. Origin, Destination, and HubChains are always evaluated as potential + * repayment chains in addition to "Slow Withdrawal chains" such as Base, Optimism and Arbitrum for which + * taking repayment would reduce HubPool utilization. Post-relay allocation percentages take into + * account pending cross-chain inventory-management transfers, upcoming bundle refunds, token shortfalls + * needed to cover other unfilled deposits in addition to current token balances. Slow withdrawal chains are only + * selected if the SpokePool's running balance for that chain is over the system's desired target. + * @dev The HubChain is always evaluated as a fallback option if the inventory management is enabled and all other + * chains are over-allocated. + * @dev If inventory management is disabled, then destinationChain is used as a default. + * @param deposit Deposit to determine repayment chains for. + * @param l1Token L1Token linked with deposited inputToken and repayement chain refund token. + * @returns list of chain IDs that are possible repayment chains for the deposit, sorted from highest + * to lowest priority. + */ + async determineRefundChainId(deposit: V3Deposit, l1Token?: string): Promise { const { originChainId, destinationChainId, inputToken, outputToken, outputAmount, inputAmount } = deposit; const hubChainId = this.hubPoolClient.chainId; if (!this.isInventoryManagementEnabled()) { - return destinationChainId; + return [destinationChainId]; } // The InventoryClient assumes 1:1 equivalency between input and output tokens. At the moment there is no support - // for disparate output tokens, so if one appears here then something is wrong. Throw hard and fast in that case. + // for disparate output tokens (unless the output token is USDC.e and the input token is USDC), + // so if one appears here then something is wrong. Throw hard and fast in that case. // In future, fills for disparate output tokens should probably just take refunds on the destination chain and // outsource inventory management to the operator. - if (!this.hubPoolClient.areTokensEquivalent(inputToken, originChainId, outputToken, destinationChainId)) { + if (!this.validateOutputToken(deposit)) { const [srcChain, dstChain] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; throw new Error( `Unexpected ${dstChain} output token on ${srcChain} deposit ${deposit.depositId}` + ` (${inputToken} != ${outputToken})` ); } - l1Token ??= this.hubPoolClient.getL1TokenForL2TokenAtBlock(outputToken, destinationChainId); - const tokenConfig = this.inventoryConfig?.tokenConfig?.[l1Token]; + + l1Token ??= this.hubPoolClient.getL1TokenForL2TokenAtBlock(inputToken, originChainId); // Consider any refunds from executed and to-be executed bundles. If bundle data client doesn't return in // time, return an object with zero refunds for all chains. @@ -345,21 +472,28 @@ export class InventoryClient { chainsToEvaluate.push(originChainId); } + const eligibleRefundChains: number[] = []; // At this point, all chains to evaluate have defined token configs and are sorted in order of // highest priority to take repayment on, assuming the chain is under-allocated. for (const _chain of chainsToEvaluate) { assert(this._l1TokenEnabledForChain(l1Token, _chain), `Token ${l1Token} not enabled for chain ${_chain}`); // Destination chain: - const chainShortfall = this.getTokenShortFall(l1Token, _chain); - const chainVirtualBalance = this.getBalanceOnChainForL1Token(_chain, l1Token); + const repaymentToken = this.getRepaymentTokenForL1Token(l1Token, _chain); + const chainShortfall = this.tokenClient.getShortfallTotalRequirement(_chain, repaymentToken); + const chainVirtualBalance = this.getBalanceOnChain(_chain, l1Token, repaymentToken); const chainVirtualBalanceWithShortfall = chainVirtualBalance.sub(chainShortfall); let cumulativeVirtualBalanceWithShortfall = cumulativeVirtualBalance.sub(chainShortfall); // @dev No need to factor in outputAmount when computing origin chain balance since funds only leave relayer // on destination chain + // @dev Do not subtract outputAmount from virtual balance if output token and input token are not equivalent. + // This is possible when the output token is USDC.e and the input token is USDC which would still cause + // validateOutputToken() to return true above. let chainVirtualBalanceWithShortfallPostRelay = - _chain === destinationChainId + _chain === destinationChainId && + this.hubPoolClient.areTokensEquivalent(inputToken, originChainId, outputToken, destinationChainId) ? chainVirtualBalanceWithShortfall.sub(outputAmount) : chainVirtualBalanceWithShortfall; + // Add upcoming refunds: chainVirtualBalanceWithShortfallPostRelay = chainVirtualBalanceWithShortfallPostRelay.add( totalRefundsPerChain[_chain] @@ -376,8 +510,13 @@ export class InventoryClient { .div(cumulativeVirtualBalanceWithShortfallPostRelay); // Consider configured buffer for target to allow relayer to support slight overages. - const thresholdPct = toBN(this.inventoryConfig.tokenConfig[l1Token][_chain].targetPct) - .mul(tokenConfig[_chain].targetOverageBuffer ?? toBNWei("1")) + const tokenConfig = this.getTokenConfig(l1Token, _chain, repaymentToken); + assert( + isDefined(tokenConfig), + `No ${outputToken} tokenConfig in the Inventory Config for ${l1Token} on ${_chain} with a repaymentToken of ${repaymentToken}.` + ); + const thresholdPct = toBN(tokenConfig.targetPct) + .mul(tokenConfig.targetOverageBuffer ?? toBNWei("1")) .div(fixedPointAdjustment); this.log( `Evaluated taking repayment on ${ @@ -403,15 +542,16 @@ export class InventoryClient { } ); if (expectedPostRelayAllocation.lte(thresholdPct)) { - return _chain; + eligibleRefundChains.push(_chain); } } - // None of the chain allocation percentages are lower than their target so take - // repayment on the hub chain by default. The caller has also set a token config so they are not expecting - // repayments to default to destination chain. If caller wanted repayments to default to destination - // chain, then they should not set a token config. - return hubChainId; + // Always add hubChain as a fallback option if inventory management is enabled. If none of the chainsToEvaluate + // were selected, then this function will return just the hub chain as a fallback option. + if (!eligibleRefundChains.includes(hubChainId)) { + eligibleRefundChains.push(hubChainId); + } + return eligibleRefundChains; } /** @@ -447,23 +587,23 @@ export class InventoryClient { } else { runningBalanceForToken = leaf.runningBalances[l1TokenIndex]; } + const l2Token = this.hubPoolClient.getL2TokenForL1TokenAtBlock(l1Token, Number(chainId)); + // Approximate latest running balance as last known proposed running balance... // - minus total deposit amount on chain since the latest end block proposed // - plus total refund amount on chain since the latest end block proposed - const upcomingDeposits = this.bundleDataClient.getUpcomingDepositAmount( - chainId, - this.getDestinationTokenForL1Token(l1Token, chainId), - blockRange[1] - ); + const upcomingDeposits = this.bundleDataClient.getUpcomingDepositAmount(chainId, l2Token, blockRange[1]); + // Grab refunds that are not included in any bundle proposed on-chain. These are refunds that have not // been accounted for in the latest running balance set in `runningBalanceForToken`. const allBundleRefunds = lodash.cloneDeep(await this.bundleRefundsPromise); const upcomingRefunds = allBundleRefunds.pop(); // @dev upcoming refunds are always pushed last into this list. // If a chain didn't exist in the last bundle or a spoke pool client isn't defined, then // one of the refund entries for a chain can be undefined. - const upcomingRefundForChain = Object.values( - upcomingRefunds?.[chainId]?.[this.getDestinationTokenForL1Token(l1Token, chainId)] ?? {} - ).reduce((acc, curr) => acc.add(curr), bnZero); + const upcomingRefundForChain = Object.values(upcomingRefunds?.[chainId]?.[l2Token] ?? {}).reduce( + (acc, curr) => acc.add(curr), + bnZero + ); // Updated running balance is last known running balance minus deposits plus upcoming refunds. const latestRunningBalance = runningBalanceForToken.sub(upcomingDeposits).add(upcomingRefundForChain); @@ -567,37 +707,38 @@ export class InventoryClient { } getPossibleRebalances(): Rebalance[] { - const tokenDistributionPerL1Token = this.getTokenDistributionPerL1Token(); - return this._getPossibleRebalances(tokenDistributionPerL1Token); - } - - _getPossibleRebalances(tokenDistributionPerL1Token: TokenDistributionPerL1Token): Rebalance[] { + const chainIds = this.getEnabledL2Chains(); const rebalancesRequired: Rebalance[] = []; - // First, compute the rebalances that we would do assuming we have sufficient tokens on L1. - for (const l1Token of Object.keys(tokenDistributionPerL1Token)) { + for (const l1Token of this.getL1Tokens()) { const cumulativeBalance = this.getCumulativeBalance(l1Token); if (cumulativeBalance.eq(bnZero)) { continue; } - for (const chainId of this.getEnabledL2Chains()) { - // Skip if there's no configuration for l1Token on chainId. This is the case for BOBA and BADGER - // as they're not present on all L2s. + chainIds.forEach((chainId) => { + // Skip if there's no configuration for l1Token on chainId. if (!this._l1TokenEnabledForChain(l1Token, chainId)) { - continue; + return; } - const currentAllocPct = this.getCurrentAllocationPct(l1Token, chainId); - const { thresholdPct, targetPct } = this.inventoryConfig.tokenConfig[l1Token][chainId]; - if (currentAllocPct.lt(thresholdPct)) { + const l2Tokens = this.getRemoteTokensForL1Token(l1Token, chainId); + l2Tokens.forEach((l2Token) => { + const currentAllocPct = this.getCurrentAllocationPct(l1Token, chainId, l2Token); + const tokenConfig = this.getTokenConfig(l1Token, chainId, l2Token); + const { thresholdPct, targetPct } = tokenConfig; + + if (currentAllocPct.gte(thresholdPct)) { + return; + } + const deltaPct = targetPct.sub(currentAllocPct); const amount = deltaPct.mul(cumulativeBalance).div(this.scalar); - const balance = this.tokenClient.getBalance(1, l1Token); - // Divide by scalar because allocation percent was multiplied by it to avoid rounding errors. + const balance = this.tokenClient.getBalance(this.hubPoolClient.chainId, l1Token); rebalancesRequired.push({ chainId, l1Token, + l2Token, currentAllocPct, thresholdPct, targetPct, @@ -605,9 +746,10 @@ export class InventoryClient { cumulativeBalance, amount, }); - } - } + }); + }); } + return rebalancesRequired; } @@ -627,16 +769,15 @@ export class InventoryClient { const tokenDistributionPerL1Token = this.getTokenDistributionPerL1Token(); this.constructConsideringRebalanceDebugLog(tokenDistributionPerL1Token); - const rebalancesRequired = this._getPossibleRebalances(tokenDistributionPerL1Token); + const rebalancesRequired = this.getPossibleRebalances(); if (rebalancesRequired.length === 0) { this.log("No rebalances required"); return; } // Next, evaluate if we have enough tokens on L1 to actually do these rebalances. - for (const rebalance of rebalancesRequired) { - const { balance, amount, l1Token, chainId } = rebalance; + const { balance, amount, l1Token, l2Token, chainId } = rebalance; // This is the balance left after any assumed rebalances from earlier loop iterations. const unallocatedBalance = this.tokenClient.getBalance(this.hubPoolClient.chainId, l1Token); @@ -650,27 +791,17 @@ export class InventoryClient { // RPC's returning slowly, leading to concurrent/overlapping instances of the bot running. const tokenContract = new Contract(l1Token, ERC20.abi, this.hubPoolClient.hubPool.signer); const currentBalance = await tokenContract.balanceOf(this.relayer); - if (!balance.eq(currentBalance)) { - this.logger.warn({ - at: "InventoryClient", - message: "🚧 Token balance on Ethereum changed before sending transaction, skipping rebalance", - l1Token, - l2ChainId: chainId, - balance, - currentBalance, - }); - continue; - } else { - this.logger.debug({ - at: "InventoryClient", - message: "Token balance in relayer on Ethereum is as expected, sending cross chain transfer", - l1Token, - l2ChainId: chainId, - balance, - }); + + const balanceChanged = !balance.eq(currentBalance); + const [message, log] = balanceChanged + ? ["🚧 Token balance on mainnet changed, skipping rebalance", this.logger.warn] + : ["Token balance in relayer on mainnet is as expected, sending cross chain transfer", this.logger.debug]; + log({ at: "InventoryClient", message, l1Token, l2Token, l2ChainId: chainId, balance, currentBalance }); + + if (!balanceChanged) { possibleRebalances.push(rebalance); // Decrement token balance in client for this chain and increment cross chain counter. - this.trackCrossChainTransfer(l1Token, amount, chainId); + this.trackCrossChainTransfer(l1Token, l2Token, amount, chainId); } } else { // Extract unexecutable rebalances for logging. @@ -688,8 +819,8 @@ export class InventoryClient { // sends each transaction one after the other with incrementing nonce. this will be left for a follow on PR as this // is already complex logic and most of the time we'll not be sending batches of rebalance transactions. for (const rebalance of possibleRebalances) { - const { chainId, l1Token, amount } = rebalance; - const { hash } = await this.sendTokenCrossChain(chainId, l1Token, amount, this.simMode); + const { chainId, l1Token, l2Token, amount } = rebalance; + const { hash } = await this.sendTokenCrossChain(chainId, l1Token, amount, this.simMode, l2Token); executedTransactions.push({ ...rebalance, hash }); } @@ -700,10 +831,10 @@ export class InventoryClient { for (const [_chainId, rebalances] of Object.entries(groupedRebalances)) { const chainId = Number(_chainId); mrkdwn += `*Rebalances sent to ${getNetworkName(chainId)}:*\n`; - for (const { l1Token, amount, targetPct, thresholdPct, cumulativeBalance, hash } of rebalances) { - const tokenInfo = this.hubPoolClient.getTokenInfoForL1Token(l1Token); + for (const { l2Token, amount, targetPct, thresholdPct, cumulativeBalance, hash, chainId } of rebalances) { + const tokenInfo = this.hubPoolClient.getTokenInfoForAddress(l2Token, chainId); if (!tokenInfo) { - throw new Error(`InventoryClient::rebalanceInventoryIfNeeded no L1 token info for token ${l1Token}`); + `InventoryClient::rebalanceInventoryIfNeeded no token info for L2 token ${l2Token} on chain ${chainId}`; } const { symbol, decimals } = tokenInfo; const formatter = createFormatFunction(2, 4, false, decimals); @@ -714,7 +845,7 @@ export class InventoryClient { `${formatter( cumulativeBalance.toString() )} ${symbol} over all chains (ignoring hubpool repayments). This chain has a shortfall of ` + - `${formatter(this.getTokenShortFall(l1Token, chainId).toString())} ${symbol} ` + + `${formatter(this.tokenClient.getShortfallTotalRequirement(chainId, l2Token).toString())} ${symbol} ` + `tx: ${blockExplorerLink(hash, this.hubPoolClient.chainId)}\n`; } } @@ -723,25 +854,28 @@ export class InventoryClient { for (const [_chainId, rebalances] of Object.entries(groupedUnexecutedRebalances)) { const chainId = Number(_chainId); mrkdwn += `*Insufficient amount to rebalance to ${getNetworkName(chainId)}:*\n`; - for (const { l1Token, balance, cumulativeBalance, amount } of rebalances) { - const tokenInfo = this.hubPoolClient.getTokenInfoForL1Token(l1Token); + for (const { l1Token, l2Token, balance, cumulativeBalance, amount } of rebalances) { + const tokenInfo = this.hubPoolClient.getTokenInfoForAddress(l2Token, chainId); if (!tokenInfo) { - throw new Error(`InventoryClient::rebalanceInventoryIfNeeded no L1 token info for token ${l1Token}`); + throw new Error( + `InventoryClient::rebalanceInventoryIfNeeded no token info for L2 token ${l2Token} on chain ${chainId}` + ); } const { symbol, decimals } = tokenInfo; const formatter = createFormatFunction(2, 4, false, decimals); + const distributionPct = tokenDistributionPerL1Token[l1Token][chainId][l2Token].mul(100); mrkdwn += `- ${symbol} transfer blocked. Required to send ` + `${formatter(amount.toString())} but relayer has ` + `${formatter(balance.toString())} on L1. There is currently ` + - `${formatter(this.getBalanceOnChainForL1Token(chainId, l1Token).toString())} ${symbol} on ` + + `${formatter(this.getBalanceOnChain(chainId, l1Token, l2Token).toString())} ${symbol} on ` + `${getNetworkName(chainId)} which is ` + - `${this.formatWei(tokenDistributionPerL1Token[l1Token][chainId].mul(100).toString())}% of the total ` + + `${this.formatWei(distributionPct.toString())}% of the total ` + `${formatter(cumulativeBalance.toString())} ${symbol}.` + " This chain's pending L1->L2 transfer amount is " + `${formatter( this.crossChainTransferClient - .getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token) + .getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token, l2Token) .toString() )}.\n`; } @@ -760,9 +894,14 @@ export class InventoryClient { } async unwrapWeth(): Promise { + if (!this.isInventoryManagementEnabled()) { + return; + } + // Note: these types are just used inside this method, so they are declared in-line. type ChainInfo = { chainId: number; + weth: string; unwrapWethThreshold: BigNumber; unwrapWethTarget: BigNumber; balance: BigNumber; @@ -775,22 +914,23 @@ export class InventoryClient { const executedTransactions: ExecutedUnwrap[] = []; try { - if (!this.isInventoryManagementEnabled()) { - return; - } const l1Weth = TOKEN_SYMBOLS_MAP.WETH.addresses[this.hubPoolClient.chainId]; const chains = await Promise.all( this.getEnabledChains() .map((chainId) => { - const unwrapWethThreshold = - this.inventoryConfig.tokenConfig?.[l1Weth]?.[chainId.toString()]?.unwrapWethThreshold; - const unwrapWethTarget = this.inventoryConfig.tokenConfig?.[l1Weth]?.[chainId.toString()]?.unwrapWethTarget; + const tokenConfig = this.getTokenConfig(l1Weth, chainId); + assert(isDefined(tokenConfig)); + + const { unwrapWethThreshold, unwrapWethTarget } = tokenConfig; // Ignore chains where ETH isn't the native gas token. Returning null will result in these being filtered. if (chainId === CHAIN_IDs.POLYGON || unwrapWethThreshold === undefined || unwrapWethTarget === undefined) { return null; } - return { chainId, unwrapWethThreshold, unwrapWethTarget }; + const weth = TOKEN_SYMBOLS_MAP.WETH.addresses[chainId]; + assert(isDefined(weth), `No WETH definition for ${getNetworkName(chainId)}`); + + return { chainId, weth, unwrapWethThreshold, unwrapWethTarget }; }) // This filters out all nulls, which removes any chains that are meant to be ignored. .filter(isDefined) @@ -806,8 +946,8 @@ export class InventoryClient { this.log("Checking WETH unwrap thresholds for chains with thresholds set", { chains }); chains.forEach((chainInfo) => { - const { chainId, unwrapWethThreshold, unwrapWethTarget, balance } = chainInfo; - const l2WethBalance = this.tokenClient.getBalance(chainId, this.getDestinationTokenForL1Token(l1Weth, chainId)); + const { chainId, weth, unwrapWethThreshold, unwrapWethTarget, balance } = chainInfo; + const l2WethBalance = this.tokenClient.getBalance(chainId, weth); if (balance.lt(unwrapWethThreshold)) { const amountToUnwrap = unwrapWethTarget.sub(balance); @@ -835,10 +975,9 @@ export class InventoryClient { // sends each transaction one after the other with incrementing nonce. this will be left for a follow on PR as this // is already complex logic and most of the time we'll not be sending batches of rebalance transactions. for (const { chainInfo, amount } of unwrapsRequired) { - const { chainId } = chainInfo; - const l2Weth = this.getDestinationTokenForL1Token(l1Weth, chainId); - this.tokenClient.decrementLocalBalance(chainId, l2Weth, amount); - const receipt = await this._unwrapWeth(chainId, l2Weth, amount); + const { chainId, weth } = chainInfo; + this.tokenClient.decrementLocalBalance(chainId, weth, amount); + const receipt = await this._unwrapWeth(chainId, weth, amount); executedTransactions.push({ chainInfo, amount, hash: receipt.hash }); } @@ -858,15 +997,13 @@ export class InventoryClient { } for (const { chainInfo, amount } of unexecutedUnwraps) { - const { chainId } = chainInfo; + const { chainId, weth } = chainInfo; mrkdwn += `*Insufficient amount to unwrap WETH on ${getNetworkName(chainId)}:*\n`; const formatter = createFormatFunction(2, 4, false, 18); mrkdwn += "- WETH unwrap blocked. Required to send " + `${formatter(amount.toString())} but relayer has ` + - `${formatter( - this.tokenClient.getBalance(chainId, this.getDestinationTokenForL1Token(l1Weth, chainId)).toString() - )} WETH balance.\n`; + `${formatter(this.tokenClient.getBalance(chainId, weth).toString())} WETH balance.\n`; } if (mrkdwn) { @@ -881,15 +1018,17 @@ export class InventoryClient { } } - constructConsideringRebalanceDebugLog(distribution: { [l1Token: string]: { [chainId: number]: BigNumber } }): void { + constructConsideringRebalanceDebugLog(distribution: TokenDistributionPerL1Token): void { const logData: { [symbol: string]: { [chainId: number]: { - actualBalanceOnChain: string; - virtualBalanceOnChain: string; - outstandingTransfers: string; - tokenShortFalls: string; - proRataShare: string; + [l2TokenAddress: string]: { + actualBalanceOnChain: string; + virtualBalanceOnChain: string; + outstandingTransfers: string; + tokenShortFalls: string; + proRataShare: string; + }; }; }; } = {}; @@ -906,23 +1045,27 @@ export class InventoryClient { cumulativeBalances[symbol] = formatter(this.getCumulativeBalance(l1Token).toString()); logData[symbol] ??= {}; - Object.entries(distributionForToken).forEach(([_chainId, amount]) => { + Object.keys(distributionForToken).forEach((_chainId) => { const chainId = Number(_chainId); - logData[symbol][chainId] = { - actualBalanceOnChain: formatter( - this.getBalanceOnChainForL1Token(chainId, l1Token) - .sub(this.crossChainTransferClient.getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token)) - .toString() - ), - virtualBalanceOnChain: formatter(this.getBalanceOnChainForL1Token(chainId, l1Token).toString()), - outstandingTransfers: formatter( - this.crossChainTransferClient - .getOutstandingCrossChainTransferAmount(this.relayer, chainId, l1Token) - .toString() - ), - tokenShortFalls: formatter(this.getTokenShortFall(l1Token, chainId).toString()), - proRataShare: this.formatWei(amount.mul(100).toString()) + "%", - }; + logData[symbol][chainId] ??= {}; + + Object.entries(distributionForToken[chainId]).forEach(([l2Token, amount]) => { + const balanceOnChain = this.getBalanceOnChain(chainId, l1Token, l2Token); + const transfers = this.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + this.relayer, + chainId, + l1Token, + l2Token + ); + const actualBalanceOnChain = this.tokenClient.getBalance(chainId, l2Token); + logData[symbol][chainId][l2Token] = { + actualBalanceOnChain: formatter(actualBalanceOnChain.toString()), + virtualBalanceOnChain: formatter(balanceOnChain.toString()), + outstandingTransfers: formatter(transfers.toString()), + tokenShortFalls: formatter(this.tokenClient.getShortfallTotalRequirement(chainId, l2Token).toString()), + proRataShare: this.formatWei(amount.mul(100).toString()) + "%", + }; + }); }); }); @@ -933,20 +1076,21 @@ export class InventoryClient { }); } - async sendTokenCrossChain( + sendTokenCrossChain( chainId: number | string, l1Token: string, amount: BigNumber, - simMode = false + simMode = false, + l2Token?: string ): Promise { - return await this.adapterManager.sendTokenCrossChain(this.relayer, Number(chainId), l1Token, amount, simMode); + return this.adapterManager.sendTokenCrossChain(this.relayer, Number(chainId), l1Token, amount, simMode, l2Token); } - async _unwrapWeth(chainId: number, _l2Weth: string, amount: BigNumber): Promise { + _unwrapWeth(chainId: number, _l2Weth: string, amount: BigNumber): Promise { const l2Signer = this.tokenClient.spokePoolClients[chainId].spokePool.signer; const l2Weth = new Contract(_l2Weth, CONTRACT_ADDRESSES[1].weth.abi, l2Signer); this.log("Unwrapping WETH", { amount: amount.toString() }); - return await runTransaction(this.logger, l2Weth, "withdraw", [amount]); + return runTransaction(this.logger, l2Weth, "withdraw", [amount]); } async setL1TokenApprovals(): Promise { @@ -968,11 +1112,12 @@ export class InventoryClient { await this.adapterManager.wrapEthIfAboveThreshold(this.inventoryConfig, this.simMode); } - async update(): Promise { + update(chainIds?: number[]): Promise { if (!this.isInventoryManagementEnabled()) { return; } - await this.crossChainTransferClient.update(this.getL1Tokens()); + + return this.crossChainTransferClient.update(this.getL1Tokens(), chainIds); } isInventoryManagementEnabled(): boolean { @@ -988,7 +1133,18 @@ export class InventoryClient { } _l1TokenEnabledForChain(l1Token: string, chainId: number): boolean { - return this.inventoryConfig.tokenConfig?.[l1Token]?.[String(chainId)] !== undefined; + const tokenConfig = this.inventoryConfig?.tokenConfig?.[l1Token]; + if (!isDefined(tokenConfig)) { + return false; + } + + // If tokenConfig directly references chainId, token is enabled. + if (!isAliasConfig(tokenConfig) && isDefined(tokenConfig[chainId])) { + return true; + } + + // If any of the mapped symbols reference chainId, token is enabled. + return Object.keys(tokenConfig).some((symbol) => isDefined(tokenConfig[symbol][chainId])); } /** diff --git a/src/clients/MultiCallerClient.ts b/src/clients/MultiCallerClient.ts index 045fc87df..10b3442dd 100644 --- a/src/clients/MultiCallerClient.ts +++ b/src/clients/MultiCallerClient.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { BigNumber } from "ethers"; import { DEFAULT_MULTICALL_CHUNK_SIZE, DEFAULT_CHAIN_MULTICALL_CHUNK_SIZE, Multicall2Call } from "../common"; import { @@ -113,13 +113,6 @@ export class MultiCallerClient { } } - async executeTransactionQueue(simulate = false): Promise { - // For compatibility with the existing implementation, flatten all txn hashes into a single array. - // To be resolved once the legacy implementation is removed and the callers have been updated. - const txnHashes: { [chainId: number]: string[] } = await this.executeTxnQueues(simulate); - return Object.values(txnHashes).flat(); - } - // For each chain, collate the enqueued transactions and process them in parallel. async executeTxnQueues(simulate = false, chainIds: number[] = []): Promise> { if (chainIds.length === 0) { @@ -129,25 +122,16 @@ export class MultiCallerClient { ]); } - // One promise per chain for parallel execution. - const resultsByChain = await Promise.allSettled( - chainIds.map((chainId) => { - const txns: AugmentedTransaction[] | undefined = this.txns[chainId]; - const valueTxns: AugmentedTransaction[] | undefined = this.valueTxns[chainId]; - - this.clearTransactionQueue(chainId); - return this.executeChainTxnQueue(chainId, txns, valueTxns, simulate); - }) - ); + const results = await Promise.allSettled(chainIds.map((chainId) => this.executeTxnQueue(chainId, simulate))); // Collate the results for each chain. const txnHashes: Record = Object.fromEntries( - resultsByChain.map((chainResult, idx) => { + results.map((result, idx) => { const chainId = chainIds[idx]; - if (isPromiseFulfilled(chainResult)) { - return [chainId, { result: chainResult.value.map((txnResponse) => txnResponse.hash), isError: false }]; + if (isPromiseFulfilled(result)) { + return [chainId, { result: result.value.map((txnResponse) => txnResponse.hash), isError: false }]; } else { - return [chainId, { result: chainResult.reason, isError: true }]; + return [chainId, { result: result.reason, isError: true }]; } }) ); @@ -175,10 +159,18 @@ export class MultiCallerClient { return Object.fromEntries(Object.entries(txnHashes).map(([chainId, { result }]) => [chainId, result])); } + // For a single chain, take any enqueued transactions and attempt to execute them. + async executeTxnQueue(chainId: number, simulate = false): Promise { + const txns: AugmentedTransaction[] | undefined = this.txns[chainId]; + const valueTxns: AugmentedTransaction[] | undefined = this.valueTxns[chainId]; + this.clearTransactionQueue(chainId); + return this._executeTxnQueue(chainId, txns, valueTxns, simulate); + } + // For a single chain, simulate all potential multicall txns and group the ones that pass into multicall bundles. // Then, submit a concatenated list of value txns + multicall bundles. If simulation was requested, log the results // and return early. - async executeChainTxnQueue( + protected async _executeTxnQueue( chainId: number, txns: AugmentedTransaction[] = [], valueTxns: AugmentedTransaction[] = [], diff --git a/src/clients/ProfitClient.ts b/src/clients/ProfitClient.ts index 4854f87ec..61c357601 100644 --- a/src/clients/ProfitClient.ts +++ b/src/clients/ProfitClient.ts @@ -6,7 +6,7 @@ import { relayFeeCalculator, typeguards, utils as sdkUtils, -} from "@across-protocol/sdk-v2"; +} from "@across-protocol/sdk"; import * as constants from "../common/Constants"; import { assert, @@ -25,6 +25,7 @@ import { assign, CHAIN_IDs, TOKEN_SYMBOLS_MAP, + TOKEN_EQUIVALENCE_REMAPPING, ZERO_ADDRESS, } from "../utils"; import { @@ -83,35 +84,6 @@ type UnprofitableFill = { // relayer's own address. The specified address is deliberately setup by RL to have a 0 token balance. const TEST_RECIPIENT = "0xBb23Cd0210F878Ea4CcA50e9dC307fb0Ed65Cf6B"; -// These are used to simulate fills on L2s to return estimated gas costs. -// Note: the type here assumes that all of these classes take the same constructor parameters. -const QUERY_HANDLERS: { - [chainId: number]: new ( - ...args: ConstructorParameters - ) => relayFeeCalculator.QueryInterface; -} = { - 1: relayFeeCalculator.EthereumQueries, - 10: relayFeeCalculator.OptimismQueries, - 137: relayFeeCalculator.PolygonQueries, - 288: relayFeeCalculator.BobaQueries, - 324: relayFeeCalculator.ZkSyncQueries, - 8453: relayFeeCalculator.BaseQueries, - 42161: relayFeeCalculator.ArbitrumQueries, - 59144: relayFeeCalculator.LineaQueries, - // Testnets: - 5: relayFeeCalculator.EthereumGoerliQueries, - 280: relayFeeCalculator.zkSyncGoerliQueries, - 420: relayFeeCalculator.OptimismGoerliQueries, - 59140: relayFeeCalculator.LineaGoerliQueries, - 80001: relayFeeCalculator.PolygonMumbaiQueries, - 84531: relayFeeCalculator.BaseGoerliQueries, - 84532: relayFeeCalculator.BaseSepoliaQueries, - 421613: relayFeeCalculator.ArbitrumGoerliQueries, - 421614: relayFeeCalculator.ArbitrumSepoliaQueries, - 11155111: relayFeeCalculator.EthereumSepoliaQueries, - 11155420: relayFeeCalculator.OptimismSepoliaQueries, -}; - const { PriceClient } = priceClient; const { acrossApi, coingecko, defiLlama } = priceClient.adapters; @@ -204,8 +176,15 @@ export class ProfitClient { * @returns Address corresponding to token. */ resolveTokenAddress(token: string): string { - const address = ethersUtils.isAddress(token) ? token : this.tokenSymbolMap[token]; - assert(isDefined(address), `Unable to resolve address for token ${token}`); + if (ethersUtils.isAddress(token)) { + return token; + } + const remappedTokenSymbol = TOKEN_EQUIVALENCE_REMAPPING[token] ?? token; + const address = this.tokenSymbolMap[remappedTokenSymbol]; + assert( + isDefined(address), + `ProfitClient#resolveTokenAddress: Unable to resolve address for token ${token} (using remapped symbol ${remappedTokenSymbol})` + ); return address; } @@ -338,9 +317,28 @@ export class ProfitClient { const scaledInputAmount = deposit.inputAmount.mul(inputTokenScalar); const inputAmountUsd = scaledInputAmount.mul(inputTokenPriceUsd).div(fixedPoint); - const outputTokenInfo = hubPoolClient.getL1TokenInfoForL2Token(deposit.outputToken, deposit.destinationChainId); - const outputTokenPriceUsd = this.getPriceOfToken(outputTokenInfo.symbol); - const outputTokenScalar = toBNWei(1, 18 - outputTokenInfo.decimals); + // Unlike the input token, output token is not always resolvable via HubPoolClient since outputToken + // can be any arbitrary token. + let outputTokenSymbol: string, outputTokenDecimals: number; + // If the output token and the input token are equivalent, then we can look up the token info + // via the HubPoolClient since the output token is mapped via PoolRebalanceRoute to the HubPool. + // If not, then we should look up outputToken in the TOKEN_SYMBOLS_MAP for the destination chain. + const matchingTokens = + TOKEN_SYMBOLS_MAP[inputTokenInfo.symbol]?.addresses[deposit.destinationChainId] === deposit.outputToken; + if (matchingTokens) { + ({ symbol: outputTokenSymbol, decimals: outputTokenDecimals } = hubPoolClient.getL1TokenInfoForL2Token( + deposit.outputToken, + deposit.destinationChainId + )); + } else { + // This function will throw if the token is not found in the TOKEN_SYMBOLS_MAP for the destination chain. + ({ symbol: outputTokenSymbol, decimals: outputTokenDecimals } = hubPoolClient.getTokenInfoForAddress( + deposit.outputToken, + deposit.destinationChainId + )); + } + const outputTokenPriceUsd = this.getPriceOfToken(outputTokenSymbol); + const outputTokenScalar = toBNWei(1, 18 - outputTokenDecimals); const effectiveOutputAmount = min(deposit.outputAmount, deposit.updatedOutputAmount ?? deposit.outputAmount); const scaledOutputAmount = effectiveOutputAmount.mul(outputTokenScalar); const outputAmountUsd = scaledOutputAmount.mul(outputTokenPriceUsd).div(fixedPoint); @@ -367,14 +365,9 @@ export class ProfitClient { ? netRelayerFeeUsd.mul(fixedPoint).div(outputAmountUsd) : bnZero; - // If either token prices are unknown, assume the relay is unprofitable. Force non-equivalent tokens - // to be unprofitable for now. The relayer may be updated in future to support in-protocol swaps. - const equivalentTokens = outputTokenInfo.address === inputTokenInfo.address; + // If either token prices are unknown, assume the relay is unprofitable. const profitable = - equivalentTokens && - inputTokenPriceUsd.gt(bnZero) && - outputTokenPriceUsd.gt(bnZero) && - netRelayerFeePct.gte(minRelayerFeePct); + inputTokenPriceUsd.gt(bnZero) && outputTokenPriceUsd.gt(bnZero) && netRelayerFeePct.gte(minRelayerFeePct); return { totalFeePct, @@ -397,7 +390,7 @@ export class ProfitClient { } // Return USD amount of fill amount for deposited token, should always return in wei as the units. - getFillAmountInUsd(deposit: Deposit, fillAmount: BigNumber): BigNumber { + getFillAmountInUsd(deposit: Deposit, fillAmount = deposit.outputAmount): BigNumber { const l1TokenInfo = this.hubPoolClient.getTokenInfoForDeposit(deposit); if (!l1TokenInfo) { const { inputToken } = deposit; @@ -409,17 +402,22 @@ export class ProfitClient { return fillAmount.mul(tokenPriceInUsd).div(bn10.pow(l1TokenInfo.decimals)); } - async getFillProfitability(deposit: V3Deposit, lpFeePct: BigNumber, l1Token: L1Token): Promise { + async getFillProfitability( + deposit: V3Deposit, + lpFeePct: BigNumber, + l1Token: L1Token, + repaymentChainId: number + ): Promise { const minRelayerFeePct = this.minRelayerFeePct(l1Token.symbol, deposit.originChainId, deposit.destinationChainId); const fill = await this.calculateFillProfitability(deposit, lpFeePct, minRelayerFeePct); if (!fill.profitable || this.debugProfitability) { - const { depositId, originChainId } = deposit; + const { depositId } = deposit; const profitable = fill.profitable ? "profitable" : "unprofitable"; this.logger.debug({ at: "ProfitClient#getFillProfitability", - message: `${l1Token.symbol} v3 deposit ${depositId} on chain ${originChainId} is ${profitable}`, + message: `${l1Token.symbol} v3 deposit ${depositId} with repayment on ${repaymentChainId} is ${profitable}`, deposit, inputTokenPriceUsd: formatEther(fill.inputTokenPriceUsd), inputTokenAmountUsd: formatEther(fill.inputAmountUsd), @@ -448,17 +446,19 @@ export class ProfitClient { async isFillProfitable( deposit: V3Deposit, lpFeePct: BigNumber, - l1Token: L1Token - ): Promise> { + l1Token: L1Token, + repaymentChainId: number + ): Promise> { let profitable = false; - let grossRelayerFeePct = bnZero; + let netRelayerFeePct = bnZero; let nativeGasCost = uint256Max; let tokenGasCost = uint256Max; try { - ({ profitable, grossRelayerFeePct, nativeGasCost, tokenGasCost } = await this.getFillProfitability( + ({ profitable, netRelayerFeePct, nativeGasCost, tokenGasCost } = await this.getFillProfitability( deposit, lpFeePct, - l1Token + l1Token, + repaymentChainId )); } catch (err) { this.logger.debug({ @@ -473,7 +473,7 @@ export class ProfitClient { profitable: profitable || (this.isTestnet && nativeGasCost.lt(uint256Max)), nativeGasCost, tokenGasCost, - grossRelayerFeePct, + netRelayerFeePct, }; } @@ -619,7 +619,9 @@ export class ProfitClient { const coingeckoProApiKey = undefined; // TODO: Set this once we figure out gas markup on the API side. const gasMarkup = 0; - return new QUERY_HANDLERS[chainId]( + // Call the factory to create a new QueryBase instance. + return relayFeeCalculator.QueryBase__factory.create( + chainId, provider, undefined, // symbolMapping undefined, // spokePoolAddress diff --git a/src/clients/SpokePoolClient.ts b/src/clients/SpokePoolClient.ts index b55a06dab..e269f186b 100644 --- a/src/clients/SpokePoolClient.ts +++ b/src/clients/SpokePoolClient.ts @@ -1,12 +1,18 @@ import assert from "assert"; -import { ChildProcess } from "child_process"; +import { ChildProcess, spawn } from "child_process"; import { Contract, Event } from "ethers"; -import { clients, utils as sdkUtils } from "@across-protocol/sdk-v2"; -import { getNetworkName, isDefined, winston } from "../utils"; +import { clients, utils as sdkUtils } from "@across-protocol/sdk"; +import { CHAIN_MAX_BLOCK_LOOKBACK, RELAYER_DEFAULT_SPOKEPOOL_INDEXER } from "../common/Constants"; +import { EventSearchConfig, getNetworkName, isDefined, MakeOptional, winston } from "../utils"; import { EventsAddedMessage, EventRemovedMessage } from "../utils/SuperstructUtils"; export type SpokePoolClient = clients.SpokePoolClient; +export type IndexerOpts = { + finality: number; + path?: string; +}; + type SpokePoolEventRemoved = { event: string; }; @@ -30,8 +36,11 @@ export function isSpokePoolEventRemoved(message: unknown): message is SpokePoolE } export class IndexedSpokePoolClient extends clients.SpokePoolClient { - public chain: string; + public readonly chain: string; + public readonly finality: number; + public readonly indexerPath: string; + private worker: ChildProcess; private pendingBlockNumber: number; private pendingCurrentTime: number; private pendingOldestTime: number; @@ -45,28 +54,50 @@ export class IndexedSpokePoolClient extends clients.SpokePoolClient { readonly hubPoolClient: clients.HubPoolClient | null, readonly chainId: number, public deploymentBlock: number, - readonly worker?: ChildProcess + eventSearchConfig: MakeOptional = { + fromBlock: deploymentBlock, + maxBlockLookBack: CHAIN_MAX_BLOCK_LOOKBACK[chainId], + }, + readonly opts: IndexerOpts ) { - // EventSearchConfig isn't required for this SpokePoolClient specialisation, so sub in safe defaults. - const eventSearchConfig = { fromBlock: deploymentBlock, maxBlockLookBack: 5_000 }; super(logger, spokePool, hubPoolClient, chainId, deploymentBlock, eventSearchConfig); this.chain = getNetworkName(chainId); + this.finality = opts.finality; + this.indexerPath = opts.path ?? RELAYER_DEFAULT_SPOKEPOOL_INDEXER; + this.pendingBlockNumber = deploymentBlock; this.pendingCurrentTime = 0; this.pendingEvents = this.queryableEventNames.map(() => []); this.pendingEventsRemoved = []; + + this.startWorker(); } /** - * Listen for indexer updates. + * Fork a child process to independently scrape events. * @returns void */ - init(): void { - if (isDefined(this.worker)) { - this.worker.on("message", (message) => this.indexerUpdate(message)); - this.logger.debug({ at: "SpokePoolClient#init", message: `Listening for ${this.chain} events.` }); - } + protected startWorker(): void { + const { + finality, + eventSearchConfig: { fromBlock, maxBlockLookBack: blockRange }, + } = this; + const opts = { finality, blockRange, lookback: `@${fromBlock}` }; + + const args = Object.entries(opts) + .map(([k, v]) => [`--${k}`, `${v}`]) + .flat(); + this.worker = spawn("node", [this.indexerPath, "--chainId", this.chainId.toString(), ...args], { + stdio: ["ignore", "inherit", "inherit", "ipc"], + }); + + this.worker.on("message", (message) => this.indexerUpdate(message)); + this.logger.debug({ + at: "SpokePoolClient#startWorker", + message: `Spawned ${this.chain} SpokePool indexer.`, + args: this.worker.spawnargs, + }); } /** @@ -219,6 +250,7 @@ export class IndexedSpokePoolClient extends clients.SpokePoolClient { latestDepositId, searchEndBlock: this.pendingBlockNumber, events, + hasCCTPBridgingEnabled: false, // @todo: Update indexer to query this. }; } } diff --git a/src/clients/TokenClient.ts b/src/clients/TokenClient.ts index f8d513470..6b7fd8860 100644 --- a/src/clients/TokenClient.ts +++ b/src/clients/TokenClient.ts @@ -1,24 +1,27 @@ -import { Signer } from "ethers"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { HubPoolClient, SpokePoolClient } from "."; -import { CachingMechanismInterface, V3Deposit } from "../interfaces"; +import { CachingMechanismInterface, L1Token, V3Deposit } from "../interfaces"; import { BigNumber, bnZero, Contract, + dedupArray, ERC20, isDefined, MAX_SAFE_ALLOWANCE, MAX_UINT_VAL, assign, blockExplorerLink, + getCurrentTime, getNetworkName, runTransaction, toBN, winston, getRedisCache, + TOKEN_SYMBOLS_MAP, } from "../utils"; -type TokenDataType = { [chainId: number]: { [token: string]: { balance: BigNumber; allowance: BigNumber } } }; +export type TokenDataType = { [chainId: number]: { [token: string]: { balance: BigNumber; allowance: BigNumber } } }; type TokenShortfallType = { [chainId: number]: { [token: string]: { deposits: number[]; totalRequirement: BigNumber } }; }; @@ -26,7 +29,6 @@ type TokenShortfallType = { export class TokenClient { tokenData: TokenDataType = {}; tokenShortfall: TokenShortfallType = {}; - bondToken: Contract | undefined; constructor( readonly logger: winston.Logger, @@ -150,41 +152,105 @@ export class TokenClient { } async setBondTokenAllowance(): Promise { - if (!this.bondToken) { - throw new Error("TokenClient::setBondTokenAllowance bond token not initialized"); - } - const ownerAddress = await this.hubPoolClient.hubPool.signer.getAddress(); - const currentCollateralAllowance: BigNumber = await this.bondToken.allowance( - ownerAddress, - this.hubPoolClient.hubPool.address - ); + const { hubPool } = this.hubPoolClient; + const { signer } = hubPool; + const [_bondToken, ownerAddress] = await Promise.all([this._getBondToken(), signer.getAddress()]); + const bondToken = new Contract(_bondToken, ERC20.abi, signer); + + const currentCollateralAllowance: BigNumber = await bondToken.allowance(ownerAddress, hubPool.address); if (currentCollateralAllowance.lt(toBN(MAX_SAFE_ALLOWANCE))) { - const tx = await runTransaction(this.logger, this.bondToken, "approve", [ - this.hubPoolClient.hubPool.address, - MAX_UINT_VAL, - ]); + const tx = await runTransaction(this.logger, bondToken, "approve", [hubPool.address, MAX_UINT_VAL]); + const { chainId } = this.hubPoolClient; const mrkdwn = - ` - Approved HubPool ${blockExplorerLink(this.hubPoolClient.hubPool.address, 1)} ` + - `to spend ${await this.bondToken.symbol()} ${blockExplorerLink(this.bondToken.address, 1)}. ` + - `tx ${blockExplorerLink(tx.hash, 1)}\n`; + ` - Approved HubPool ${blockExplorerLink(hubPool.address, chainId)} ` + + `to spend ${await bondToken.symbol()} ${blockExplorerLink(bondToken.address, chainId)}. ` + + `tx ${blockExplorerLink(tx.hash, chainId)}\n`; this.logger.info({ at: "hubPoolClient", message: "Approved bond tokens! 💰", mrkdwn }); } else { this.logger.debug({ at: "hubPoolClient", message: "Bond token approval set" }); } } + resolveRemoteTokens(chainId: number, hubPoolTokens: L1Token[]): Contract[] { + const { signer } = this.spokePoolClients[chainId].spokePool; + + if (chainId === this.hubPoolClient.chainId) { + return hubPoolTokens.map(({ address }) => new Contract(address, ERC20.abi, signer)); + } + + const tokens = hubPoolTokens + .map(({ symbol, address }) => { + let tokenAddrs: string[] = []; + try { + const spokePoolToken = this.hubPoolClient.getL2TokenForL1TokenAtBlock(address, chainId); + tokenAddrs.push(spokePoolToken); + } catch { + // No known deployment for this token on the SpokePool. + // note: To be overhauled subject to https://github.com/across-protocol/sdk/pull/643 + } + + // If the HubPool token is USDC then it might map to multiple tokens on the destination chain. + if (symbol === "USDC") { + const usdcAliases = ["USDC", "USDC.e", "USDbC"]; + usdcAliases + .map((symbol) => TOKEN_SYMBOLS_MAP[symbol]?.addresses[chainId]) + .filter(isDefined) + .forEach((address) => tokenAddrs.push(address)); + tokenAddrs = dedupArray(tokenAddrs); + } + + return tokenAddrs.filter(isDefined).map((address) => new Contract(address, ERC20.abi, signer)); + }) + .flat(); + + return tokens; + } + + async updateChain( + chainId: number, + hubPoolTokens: L1Token[] + ): Promise> { + const { spokePool } = this.spokePoolClients[chainId]; + + const multicall3 = await sdkUtils.getMulticall3(chainId, spokePool.provider); + if (!isDefined(multicall3)) { + return this.fetchTokenData(chainId, hubPoolTokens); + } + + const { relayerAddress } = this; + const balances: sdkUtils.Call3[] = []; + const allowances: sdkUtils.Call3[] = []; + this.resolveRemoteTokens(chainId, hubPoolTokens).forEach((token) => { + balances.push({ contract: token, method: "balanceOf", args: [relayerAddress] }); + allowances.push({ contract: token, method: "allowance", args: [relayerAddress, spokePool.address] }); + }); + + const calls = [...balances, ...allowances]; + const results = await sdkUtils.aggregate(multicall3, calls); + + const allowanceOffset = balances.length; + const balanceInfo = Object.fromEntries( + balances.map(({ contract: { address } }, idx) => { + return [address, { balance: results[idx][0], allowance: results[allowanceOffset + idx][0] }]; + }) + ); + + return balanceInfo; + } + async update(): Promise { + const start = getCurrentTime(); this.logger.debug({ at: "TokenBalanceClient", message: "Updating TokenBalance client" }); const { hubPoolClient } = this; - const hubPoolTokens = hubPoolClient.getL1Tokens().map(({ address }) => address); + const hubPoolTokens = hubPoolClient.getL1Tokens(); const chainIds = Object.values(this.spokePoolClients).map(({ chainId }) => chainId); - const balanceQueries = chainIds.map((chainId) => - this.fetchTokenData(chainId, hubPoolTokens, this.spokePoolClients[chainId].spokePool.signer) - ); - const [bondToken, ...balanceInfo] = await Promise.all([this._getBondToken(), ...balanceQueries]); - this.bondToken = new Contract(bondToken, ERC20.abi, this.hubPoolClient.hubPool.signer); + const balanceInfo = await Promise.all( + chainIds + .filter((chainId) => isDefined(this.spokePoolClients[chainId])) + .map((chainId) => this.updateChain(chainId, hubPoolTokens)) + ); balanceInfo.forEach((tokenData, idx) => { const chainId = chainIds[idx]; @@ -206,33 +272,27 @@ export class TokenClient { ]; }) ); - this.logger.debug({ at: "TokenBalanceClient", message: "TokenBalance client updated!", balanceData }); + + this.logger.debug({ + at: "TokenBalanceClient", + message: `Updated TokenBalance client in ${getCurrentTime() - start} seconds.`, + balanceData, + }); } async fetchTokenData( chainId: number, - hubPoolTokens: string[], - signer: Signer + hubPoolTokens: L1Token[] ): Promise> { - const tokens = hubPoolTokens - .map((address) => { - try { - const spokePoolToken = this.hubPoolClient.getL2TokenForL1TokenAtBlock(address, chainId); - return new Contract(spokePoolToken, ERC20.abi, signer); - } catch { - return undefined; - } - }) - .filter(isDefined); + const spokePoolClient = this.spokePoolClients[chainId]; + const { relayerAddress } = this; const tokenData = Object.fromEntries( - await Promise.all( - tokens.map(async (token) => { - const balance: BigNumber = await token.balanceOf(this.relayerAddress); - const allowance = await this._getAllowance(this.spokePoolClients[chainId], token); - return [token.address, { balance, allowance }]; - }) - ) + await sdkUtils.mapAsync(this.resolveRemoteTokens(chainId, hubPoolTokens), async (token: Contract) => { + const balance: BigNumber = await token.balanceOf(relayerAddress); + const allowance = await this._getAllowance(spokePoolClient, token); + return [token.address, { balance, allowance }]; + }) ); return tokenData; diff --git a/src/clients/TransactionClient.ts b/src/clients/TransactionClient.ts index 0b6b7be6a..ffb38b1f5 100644 --- a/src/clients/TransactionClient.ts +++ b/src/clients/TransactionClient.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ -import { utils as sdkUtils, typeguards } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils, typeguards } from "@across-protocol/sdk"; import { winston, getNetworkName, diff --git a/src/clients/bridges/AdapterManager.ts b/src/clients/bridges/AdapterManager.ts index 91f682b38..1e228bb34 100644 --- a/src/clients/bridges/AdapterManager.ts +++ b/src/clients/bridges/AdapterManager.ts @@ -1,12 +1,20 @@ import { BigNumber, isDefined, winston, Signer, getL2TokenAddresses, TransactionResponse, assert } from "../../utils"; import { SpokePoolClient, HubPoolClient } from "../"; -import { OptimismAdapter, ArbitrumAdapter, PolygonAdapter, BaseAdapter, ZKSyncAdapter } from "./"; +import { + OptimismAdapter, + ArbitrumAdapter, + PolygonAdapter, + BaseAdapter, + ZKSyncAdapter, + BaseChainAdapter, + LineaAdapter, + ModeAdapter, +} from "./"; import { InventoryConfig, OutstandingTransfers } from "../../interfaces"; -import { utils } from "@across-protocol/sdk-v2"; -import { CHAIN_IDs } from "@across-protocol/constants-v2"; -import { BaseChainAdapter } from "./op-stack/base/BaseChainAdapter"; +import { utils } from "@across-protocol/sdk"; +import { CHAIN_IDs } from "@across-protocol/constants"; import { spokesThatHoldEthAndWeth } from "../../common/Constants"; -import { LineaAdapter } from "./LineaAdapter"; + export class AdapterManager { public adapters: { [chainId: number]: BaseAdapter } = {}; @@ -25,23 +33,38 @@ export class AdapterManager { if (!spokePoolClients) { return; } + const spokePoolAddresses = Object.values(spokePoolClients).map((client) => client.spokePool.address); + + // The adapters are only set up to monitor EOA's and the HubPool and SpokePool address, so remove + // spoke pool addresses from other chains. + const filterMonitoredAddresses = (chainId: number) => { + return monitoredAddresses.filter( + (address) => + this.hubPoolClient.hubPool.address === address || + this.spokePoolClients[chainId].spokePool.address === address || + !spokePoolAddresses.includes(address) + ); + }; if (this.spokePoolClients[10] !== undefined) { - this.adapters[10] = new OptimismAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[10] = new OptimismAdapter(logger, spokePoolClients, filterMonitoredAddresses(10)); } if (this.spokePoolClients[137] !== undefined) { - this.adapters[137] = new PolygonAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[137] = new PolygonAdapter(logger, spokePoolClients, filterMonitoredAddresses(137)); } if (this.spokePoolClients[42161] !== undefined) { - this.adapters[42161] = new ArbitrumAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[42161] = new ArbitrumAdapter(logger, spokePoolClients, filterMonitoredAddresses(42161)); } if (this.spokePoolClients[324] !== undefined) { - this.adapters[324] = new ZKSyncAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[324] = new ZKSyncAdapter(logger, spokePoolClients, filterMonitoredAddresses(324)); } if (this.spokePoolClients[8453] !== undefined) { - this.adapters[8453] = new BaseChainAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[8453] = new BaseChainAdapter(logger, spokePoolClients, filterMonitoredAddresses(8453)); } if (this.spokePoolClients[59144] !== undefined) { - this.adapters[59144] = new LineaAdapter(logger, spokePoolClients, monitoredAddresses); + this.adapters[59144] = new LineaAdapter(logger, spokePoolClients, filterMonitoredAddresses(59144)); + } + if (this.spokePoolClients[34443] !== undefined) { + this.adapters[34443] = new ModeAdapter(logger, spokePoolClients, filterMonitoredAddresses(34443)); } logger.debug({ @@ -59,32 +82,33 @@ export class AdapterManager { return Object.keys(this.adapters).map((chainId) => Number(chainId)); } - async getOutstandingCrossChainTokenTransferAmount( - chainId: number, - l1Tokens: string[] - ): Promise { + getOutstandingCrossChainTokenTransferAmount(chainId: number, l1Tokens: string[]): Promise { const adapter = this.adapters[chainId]; + // @dev The adapter should filter out tokens that are not supported by the adapter, but we do it here as well. + const adapterSupportedL1Tokens = l1Tokens.filter((token) => + adapter.supportedTokens.includes(this.hubPoolClient.getTokenInfo(CHAIN_IDs.MAINNET, token).symbol) + ); this.logger.debug({ at: "AdapterManager", - message: "Getting outstandingCrossChainTransfers", - chainId, - l1Tokens, + message: `Getting outstandingCrossChainTransfers for ${chainId}`, + adapterSupportedL1Tokens, searchConfigs: adapter.getUpdatedSearchConfigs(), }); - return await this.adapters[chainId].getOutstandingCrossChainTransfers(l1Tokens); + return this.adapters[chainId].getOutstandingCrossChainTransfers(adapterSupportedL1Tokens); } - async sendTokenCrossChain( + sendTokenCrossChain( address: string, chainId: number | string, l1Token: string, amount: BigNumber, - simMode = false + simMode = false, + l2Token?: string ): Promise { chainId = Number(chainId); // Ensure chainId is a number before using. this.logger.debug({ at: "AdapterManager", message: "Sending token cross-chain", chainId, l1Token, amount }); - const l2Token = this.l2TokenForL1Token(l1Token, Number(chainId)); - return await this.adapters[chainId].sendTokenToTargetChain(address, l1Token, l2Token, amount, simMode); + l2Token ??= this.l2TokenForL1Token(l1Token, Number(chainId)); + return this.adapters[chainId].sendTokenToTargetChain(address, l1Token, l2Token, amount, simMode); } // Check how much ETH is on the target chain and if it is above the threshold the wrap it to WETH. Note that this only diff --git a/src/clients/bridges/ArbitrumAdapter.ts b/src/clients/bridges/ArbitrumAdapter.ts index 8127a4e3b..d5fdac167 100644 --- a/src/clients/bridges/ArbitrumAdapter.ts +++ b/src/clients/bridges/ArbitrumAdapter.ts @@ -8,7 +8,6 @@ import { BigNumberish, isDefined, TransactionResponse, - resolveTokenSymbols, toBN, toWei, paginatedEventQuery, @@ -18,9 +17,9 @@ import { TOKEN_SYMBOLS_MAP, } from "../../utils"; import { SpokePoolClient } from "../../clients"; -import { BaseAdapter } from "./BaseAdapter"; import { SortableEvent, OutstandingTransfers } from "../../interfaces"; import { CONTRACT_ADDRESSES } from "../../common"; +import { CCTPAdapter } from "./CCTPAdapter"; // TODO: Move to ../../common/ContractAddresses.ts // These values are obtained from Arbitrum's gateway router contract. @@ -34,6 +33,7 @@ const l1Gateways = { [TOKEN_SYMBOLS_MAP.BADGER.addresses[CHAIN_IDs.MAINNET]]: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", // BADGER [TOKEN_SYMBOLS_MAP.BAL.addresses[CHAIN_IDs.MAINNET]]: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", // BAL [TOKEN_SYMBOLS_MAP.ACX.addresses[CHAIN_IDs.MAINNET]]: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", // ACX + [TOKEN_SYMBOLS_MAP.POOL.addresses[CHAIN_IDs.MAINNET]]: "0xa3A7B6F88361F48403514059F1F16C8E78d60EeC", // POOL } as const; const l2Gateways = { @@ -46,6 +46,7 @@ const l2Gateways = { [TOKEN_SYMBOLS_MAP.BADGER.addresses[CHAIN_IDs.MAINNET]]: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", // BADGER [TOKEN_SYMBOLS_MAP.BAL.addresses[CHAIN_IDs.MAINNET]]: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", // BAL [TOKEN_SYMBOLS_MAP.ACX.addresses[CHAIN_IDs.MAINNET]]: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", // ACX + [TOKEN_SYMBOLS_MAP.POOL.addresses[CHAIN_IDs.MAINNET]]: "0x09e9222E96E7B4AE2a407B98d48e330053351EEe", // POOL } as const; type SupportedL1Token = string; @@ -53,7 +54,7 @@ type SupportedL1Token = string; // TODO: replace these numbers using the arbitrum SDK. these are bad values that mean we will over pay but transactions // wont get stuck. -export class ArbitrumAdapter extends BaseAdapter { +export class ArbitrumAdapter extends CCTPAdapter { l2GasPrice: BigNumber = toBN(20e9); l2GasLimit: BigNumber = toBN(150000); // abi.encoding of the maxL2Submission cost. of 0.01e18 @@ -66,16 +67,18 @@ export class ArbitrumAdapter extends BaseAdapter { readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, monitoredAddresses: string[] ) { - super( - spokePoolClients, - 42161, - monitoredAddresses, - logger, - resolveTokenSymbols( - Array.from(new Set([...Object.keys(l1Gateways), ...Object.keys(l2Gateways)])), - BaseAdapter.HUB_CHAIN_ID - ) - ); + super(spokePoolClients, 42161, monitoredAddresses, logger, [ + "USDC", + "USDT", + "WETH", + "DAI", + "WBTC", + "UMA", + "BADGER", + "BAL", + "ACX", + "POOL", + ]); } async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { @@ -84,13 +87,17 @@ export class ArbitrumAdapter extends BaseAdapter { // Skip the token if we can't find the corresponding bridge. // This is a valid use case as it's more convenient to check cross chain transfers for all tokens // rather than maintaining a list of native bridge-supported tokens. - const availableL1Tokens = l1Tokens.filter(this.isSupportedToken.bind(this)); + const availableL1Tokens = this.filterSupportedTokens(l1Tokens); const promises: Promise[] = []; - const validTokens: string[] = []; + const cctpOutstandingTransfersPromise: Record> = {}; // Fetch bridge events for all monitored addresses. for (const monitoredAddress of this.monitoredAddresses) { for (const l1Token of availableL1Tokens) { + if (this.isL1TokenUsdc(l1Token)) { + cctpOutstandingTransfersPromise[monitoredAddress] = this.getOutstandingCctpTransfers(monitoredAddress); + } + const l1Bridge = this.getL1Bridge(l1Token); const l2Bridge = this.getL2Bridge(l1Token); @@ -105,20 +112,25 @@ export class ArbitrumAdapter extends BaseAdapter { paginatedEventQuery(l1Bridge, l1Bridge.filters.DepositInitiated(...l1SearchFilter), l1SearchConfig), paginatedEventQuery(l2Bridge, l2Bridge.filters.DepositFinalized(...l2SearchFilter), l2SearchConfig) ); - validTokens.push(l1Token); } } - const results = await Promise.all(promises); + const [results, resolvedCCTPEvents] = await Promise.all([ + Promise.all(promises), + Promise.all(this.monitoredAddresses.map((monitoredAddress) => cctpOutstandingTransfersPromise[monitoredAddress])), + ]); + const resultingCCTPEvents: Record = Object.fromEntries( + this.monitoredAddresses.map((monitoredAddress, idx) => [monitoredAddress, resolvedCCTPEvents[idx]]) + ); // 2 events per token. - const numEventsPerMonitoredAddress = 2 * validTokens.length; + const numEventsPerMonitoredAddress = 2 * availableL1Tokens.length; // Segregate the events list by monitored address. const resultsByMonitoredAddress = Object.fromEntries( this.monitoredAddresses.map((monitoredAddress, index) => { const start = index * numEventsPerMonitoredAddress; - return [monitoredAddress, results.slice(start, start + numEventsPerMonitoredAddress + 1)]; + return [monitoredAddress, results.slice(start, start + numEventsPerMonitoredAddress)]; }) ); @@ -128,7 +140,11 @@ export class ArbitrumAdapter extends BaseAdapter { // The logic below takes the results from the promises and spreads them into the l1DepositInitiatedEvents and // l2DepositFinalizedEvents state from the BaseAdapter. eventsToProcess.forEach((result, index) => { - const l1Token = validTokens[Math.floor(index / 2)]; + if (eventsToProcess.length === 0) { + return; + } + assert(eventsToProcess.length % 2 === 0, "Events list length should be even"); + const l1Token = availableL1Tokens[Math.floor(index / 2)]; // l1Token is not an indexed field on Aribtrum gateway's deposit events, so these events are for all tokens. // Therefore, we need to filter unrelated deposits of other tokens. const filteredEvents = result.filter((event) => spreadEvent(event.args)["l1Token"] === l1Token); @@ -145,57 +161,84 @@ export class ArbitrumAdapter extends BaseAdapter { }; }); const eventsStorage = index % 2 === 0 ? this.l1DepositInitiatedEvents : this.l2DepositFinalizedEvents; - assign(eventsStorage, [monitoredAddress, l1Token], events); + const l2Token = this.resolveL2TokenAddress(l1Token, false); // This codepath will never have native USDC - therefore we should pass `false`. + assign(eventsStorage, [monitoredAddress, l1Token, l2Token], events); }); + if (isDefined(resultingCCTPEvents[monitoredAddress])) { + const usdcL1Token = TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]; + const usdcL2Token = this.resolveL2TokenAddress(usdcL1Token, true); // Must specifically be native USDC + assign( + this.l1DepositInitiatedEvents, + [monitoredAddress, usdcL1Token, usdcL2Token], + resultingCCTPEvents[monitoredAddress] + ); + } } - return this.computeOutstandingCrossChainTransfers(validTokens); + return this.computeOutstandingCrossChainTransfers(availableL1Tokens); } async checkTokenApprovals(address: string, l1Tokens: string[]): Promise { + const l1TokenListToApprove = []; + // Note we send the approvals to the L1 Bridge but actually send outbound transfers to the L1 Gateway Router. // Note that if the token trying to be approved is not configured in this client (i.e. not in the l1Gateways object) // then this will pass null into the checkAndSendTokenApprovals. This method gracefully deals with this case. const associatedL1Bridges = l1Tokens - .map((l1Token) => { + .flatMap((l1Token) => { if (!this.isSupportedToken(l1Token)) { - return null; + return []; + } + const bridgeAddresses: string[] = []; + if (this.isL1TokenUsdc(l1Token)) { + bridgeAddresses.push(this.getL1CCTPTokenMessengerBridge().address); } - return this.getL1Bridge(l1Token).address; + bridgeAddresses.push(this.getL1Bridge(l1Token).address); + + // Push the l1 token to the list of tokens to approve N times, where N is the number of bridges. + // I.e. the arrays have to be parallel. + l1TokenListToApprove.push(...Array(bridgeAddresses.length).fill(l1Token)); + + return bridgeAddresses; }) .filter(isDefined); - await this.checkAndSendTokenApprovals(address, l1Tokens, associatedL1Bridges); + await this.checkAndSendTokenApprovals(address, l1TokenListToApprove, associatedL1Bridges); } - async sendTokenToTargetChain( + sendTokenToTargetChain( address: string, l1Token: string, l2Token: string, amount: BigNumber, simMode = false ): Promise { - const args = [ - l1Token, // token - address, // to - amount, // amount - this.l2GasLimit, // maxGas - this.l2GasPrice, // gasPriceBid - this.transactionSubmissionData, // data - ]; - // Pad gas for deposits to Arbitrum to account for under-estimation in Geth. Offchain Labs confirm that this is - // due to their use of BASEFEE to trigger conditional logic. https://github.com/ethereum/go-ethereum/pull/28470. - const gasMultiplier = 1.2; - return await this._sendTokenToTargetChain( - l1Token, - l2Token, - amount, - this.getL1GatewayRouter(), - "outboundTransfer", - args, - gasMultiplier, - this.l1SubmitValue, - simMode - ); + // If both the L1 & L2 tokens are native USDC, we use the CCTP bridge. + if (this.isL1TokenUsdc(l1Token) && this.isL2TokenUsdc(l2Token)) { + return this.sendCctpTokenToTargetChain(address, l1Token, l2Token, amount, simMode); + } else { + const args = [ + l1Token, // token + address, // to + amount, // amount + this.l2GasLimit, // maxGas + this.l2GasPrice, // gasPriceBid + this.transactionSubmissionData, // data + ]; + // Pad gas for deposits to Arbitrum to account for under-estimation in Geth. Offchain Labs confirm that this is + // due to their use of BASEFEE to trigger conditional logic. https://github.com/ethereum/go-ethereum/pull/28470. + const gasMultiplier = 1.2; + return this._sendTokenToTargetChain( + l1Token, + l2Token, + amount, + this.getL1GatewayRouter(), + "outboundTransfer", + args, + gasMultiplier, + this.l1SubmitValue, + simMode + ); + } } // The arbitrum relayer expects to receive ETH steadily per HubPool bundle processed, since it is the L2 refund diff --git a/src/clients/bridges/BaseAdapter.ts b/src/clients/bridges/BaseAdapter.ts index 7a6cf130f..174109f7a 100644 --- a/src/clients/bridges/BaseAdapter.ts +++ b/src/clients/bridges/BaseAdapter.ts @@ -28,19 +28,20 @@ import { BigNumberish, TOKEN_SYMBOLS_MAP, getRedisCache, + getTokenAddressWithCCTP, } from "../../utils"; -import { utils } from "@across-protocol/sdk-v2"; +import { utils } from "@across-protocol/sdk"; import { CONTRACT_ADDRESSES, TOKEN_APPROVALS_TO_FIRST_ZERO } from "../../common"; import { OutstandingTransfers, SortableEvent } from "../../interfaces"; export interface DepositEvent extends SortableEvent { amount: BigNumber; - to: string; + transactionHash: string; } interface Events { [address: string]: { - [l1Token: string]: DepositEvent[]; + [l1Token: string]: { [l2Token: string]: DepositEvent[] }; }; } @@ -53,11 +54,10 @@ export abstract class BaseAdapter { readonly hubChainId = BaseAdapter.HUB_CHAIN_ID; - chainId: number; baseL1SearchConfig: MakeOptional; baseL2SearchConfig: MakeOptional; readonly wethAddress = TOKEN_SYMBOLS_MAP.WETH.addresses[this.hubChainId]; - readonly atomicDepositorAddress = CONTRACT_ADDRESSES[this.hubChainId].atomicDepositor.address; + readonly atomicDepositorAddress = CONTRACT_ADDRESSES[this.hubChainId]?.atomicDepositor.address; l1DepositInitiatedEvents: Events = {}; l2DepositFinalizedEvents: Events = {}; @@ -66,12 +66,11 @@ export abstract class BaseAdapter { constructor( readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, - _chainId: number, + readonly chainId: number, readonly monitoredAddresses: string[], readonly logger: winston.Logger, readonly supportedTokens: SupportedTokenSymbol[] ) { - this.chainId = _chainId; this.baseL1SearchConfig = { ...this.getSearchConfig(this.hubChainId) }; this.baseL2SearchConfig = { ...this.getSearchConfig(this.chainId) }; this.txnClient = new TransactionClient(logger); @@ -85,6 +84,10 @@ export abstract class BaseAdapter { return this.spokePoolClients[chainId].spokePool.provider; } + filterSupportedTokens(l1Tokens: string[]): string[] { + return l1Tokens.filter((l1Token) => this.isSupportedToken(l1Token)); + } + // Note: this must be called after the SpokePoolClients are updated. getUpdatedSearchConfigs(): { l1SearchConfig: EventSearchConfig; l2SearchConfig: EventSearchConfig } { const l1LatestBlock = this.spokePoolClients[this.hubChainId].latestBlockSearched; @@ -115,8 +118,15 @@ export abstract class BaseAdapter { ).getAddress()}_targetContract:${targetContract}`; } + resolveL2TokenAddress(l1Token: string, isNativeUsdc = false): string { + return getTokenAddressWithCCTP(l1Token, this.hubChainId, this.chainId, isNativeUsdc); + } + async checkAndSendTokenApprovals(address: string, l1Tokens: string[], associatedL1Bridges: string[]): Promise { this.log("Checking and sending token approvals", { l1Tokens, associatedL1Bridges }); + + assert(l1Tokens.length === associatedL1Bridges.length, "Token and bridge arrays are not the same length"); + const tokensToApprove: { l1Token: Contract; targetContract: string }[] = []; const l1TokenContracts = l1Tokens.map( (l1Token) => new Contract(l1Token, ERC20.abi, this.getSigner(this.hubChainId)) @@ -197,12 +207,9 @@ export abstract class BaseAdapter { continue; } - if (outstandingTransfers[monitoredAddress] === undefined) { - outstandingTransfers[monitoredAddress] = {}; - } - if (this.l2DepositFinalizedEvents[monitoredAddress] === undefined) { - this.l2DepositFinalizedEvents[monitoredAddress] = {}; - } + outstandingTransfers[monitoredAddress] ??= {}; + + this.l2DepositFinalizedEvents[monitoredAddress] ??= {}; for (const l1Token of l1Tokens) { // Skip if there has been no deposits for this token. @@ -211,35 +218,43 @@ export abstract class BaseAdapter { } // It's okay to not have any finalization events. In that case, all deposits are outstanding. - if (this.l2DepositFinalizedEvents[monitoredAddress][l1Token] === undefined) { - this.l2DepositFinalizedEvents[monitoredAddress][l1Token] = []; - } - const l2FinalizationSet = this.l2DepositFinalizedEvents[monitoredAddress][l1Token]; - - // Match deposits and finalizations by amount. We're only doing a limited lookback of events so collisions - // should be unlikely. - const finalizedAmounts = l2FinalizationSet.map((finalization) => finalization.amount.toString()); - const pendingDeposits = this.l1DepositInitiatedEvents[monitoredAddress][l1Token].filter((deposit) => { - // Remove the first match. This handles scenarios where are collisions by amount. - const index = finalizedAmounts.indexOf(deposit.amount.toString()); - if (index > -1) { - finalizedAmounts.splice(index, 1); - return false; + this.l2DepositFinalizedEvents[monitoredAddress][l1Token] ??= {}; + + // We want to iterate over the deposit events that have been initiated. We'll then match them with the + // finalization events to determine which deposits are still outstanding. + for (const l2Token of Object.keys(this.l1DepositInitiatedEvents[monitoredAddress][l1Token])) { + this.l2DepositFinalizedEvents[monitoredAddress][l1Token][l2Token] ??= []; + const l2FinalizationSet = this.l2DepositFinalizedEvents[monitoredAddress][l1Token][l2Token]; + + // Match deposits and finalizations by amount. We're only doing a limited lookback of events so collisions + // should be unlikely. + const finalizedAmounts = l2FinalizationSet.map((finalization) => finalization.amount.toString()); + const pendingDeposits = this.l1DepositInitiatedEvents[monitoredAddress][l1Token][l2Token].filter( + (deposit) => { + // Remove the first match. This handles scenarios where are collisions by amount. + const index = finalizedAmounts.indexOf(deposit.amount.toString()); + if (index > -1) { + finalizedAmounts.splice(index, 1); + return false; + } + return true; + } + ); + + // Short circuit early if there are no pending deposits. + if (pendingDeposits.length === 0) { + continue; } - return true; - }); - // Short circuit early if there are no pending deposits. - if (pendingDeposits.length === 0) { - continue; - } + outstandingTransfers[monitoredAddress][l1Token] ??= {}; - const totalAmount = pendingDeposits.reduce((acc, curr) => acc.add(curr.amount), bnZero); - const depositTxHashes = pendingDeposits.map((deposit) => deposit.transactionHash); - outstandingTransfers[monitoredAddress][l1Token] = { - totalAmount, - depositTxHashes, - }; + const totalAmount = pendingDeposits.reduce((acc, curr) => acc.add(curr.amount), bnZero); + const depositTxHashes = pendingDeposits.map((deposit) => deposit.transactionHash); + outstandingTransfers[monitoredAddress][l1Token][l2Token] = { + totalAmount, + depositTxHashes, + }; + } } } @@ -263,6 +278,14 @@ export abstract class BaseAdapter { return compareAddressesSimple(l1Token, this.wethAddress); } + isHubChainContract(address: string): Promise { + return utils.isContractDeployedToAddress(address, this.getProvider(this.hubChainId)); + } + + isL2ChainContract(address: string): Promise { + return utils.isContractDeployedToAddress(address, this.getProvider(this.chainId)); + } + /** * Get L1 Atomic WETH depositor contract * @returns L1 Atomic WETH depositor contract @@ -276,6 +299,14 @@ export abstract class BaseAdapter { ); } + getHubPool(): Contract { + const hubPoolContractData = CONTRACT_ADDRESSES[this.hubChainId]?.hubPool; + if (!hubPoolContractData) { + throw new Error(`hubPoolContractData not found for chain ${this.hubChainId}`); + } + return new Contract(hubPoolContractData.address, hubPoolContractData.abi, this.getSigner(this.hubChainId)); + } + /** * Determine whether this adapter supports an l1 token address * @param l1Token an address @@ -334,7 +365,6 @@ export abstract class BaseAdapter { at: `${this.getName()}#_sendTokenToTargetChain`, message: "Simulation result", succeed, - ...txnRequest, }); return { hash: ZERO_ADDRESS } as TransactionResponse; } diff --git a/src/clients/bridges/CCTPAdapter.ts b/src/clients/bridges/CCTPAdapter.ts new file mode 100644 index 000000000..ee7c1f0fe --- /dev/null +++ b/src/clients/bridges/CCTPAdapter.ts @@ -0,0 +1,146 @@ +import { CONTRACT_ADDRESSES, chainIdsToCctpDomains } from "../../common"; +import { SortableEvent } from "../../interfaces"; +import { + BigNumber, + Contract, + TOKEN_SYMBOLS_MAP, + TransactionResponse, + assert, + bnZero, + compareAddressesSimple, + spreadEventWithBlockNumber, +} from "../../utils"; +import { + cctpAddressToBytes32, + cctpBytes32ToAddress, + retrieveOutstandingCCTPBridgeUSDCTransfers, +} from "../../utils/CCTPUtils"; +import { BaseAdapter } from "./BaseAdapter"; + +/** + * An extension of the BaseAdapter class that is meant to be an intermediary + * between adapters and the BaseAdapter class. Holds additional functions meant + * to be used to bridge USDC via CCTP. + */ +export abstract class CCTPAdapter extends BaseAdapter { + /** + * Get the CCTP domain of the target chain. This is used to determine the destination + * domain of a CCTP message. + * @returns The CCTP domain of the target chain + */ + private get l2DestinationDomain(): number { + return chainIdsToCctpDomains[this.chainId]; + } + + /** + * Check if an L1 token is USDC - this is a requirement to be transferred via CCTP + * @param l1Token A Web3 address of a token + * @returns Whether or not this token is USDC on Mainnet + */ + protected isL1TokenUsdc(l1Token: string): boolean { + return compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); + } + + /** + * Check if an L2 token is USDC - this is a requirement to be transferred via CCTP + * @param l2Token A Web3 address of a token + * @returns Whether or not this token is USDC on the target chain + */ + protected isL2TokenUsdc(l2Token: string): boolean { + return compareAddressesSimple(l2Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId]); + } + + /** + * Get the CCTP token messenger bridge contract. Used to interface with the CCTP bridge. + * @returns The CCTP token messenger bridge contract on the hub chain + */ + protected getL1CCTPTokenMessengerBridge(): Contract { + const { hubChainId } = this; + return new Contract( + CONTRACT_ADDRESSES[hubChainId].cctpTokenMessenger.address, + CONTRACT_ADDRESSES[hubChainId].cctpTokenMessenger.abi, + this.getSigner(hubChainId) + ); + } + + protected getL2CCTPTokenMessengerBridge(): Contract { + return new Contract( + CONTRACT_ADDRESSES[this.chainId].cctpTokenMessenger.address, + CONTRACT_ADDRESSES[this.chainId].cctpTokenMessenger.abi, + this.getSigner(this.chainId) + ); + } + + protected getL2CCTPMessageTransmitter(): Contract { + return new Contract( + CONTRACT_ADDRESSES[this.chainId].cctpMessageTransmitter.address, + CONTRACT_ADDRESSES[this.chainId].cctpMessageTransmitter.abi, + this.getSigner(this.chainId) + ); + } + + /** + * Retrieves the outstanding transfers for USDC from the hub chain to + * the destination chain. + * @param address The address to check for outstanding transfers + * @returns The outstanding transfers for the given address + */ + protected async getOutstandingCctpTransfers(address: string): Promise { + const { l1SearchConfig } = this.getUpdatedSearchConfigs(); + + const l1TokenMessenger = this.getL1CCTPTokenMessengerBridge(); + const l2MessageTransmitter = this.getL2CCTPMessageTransmitter(); + + const events = await retrieveOutstandingCCTPBridgeUSDCTransfers( + l1TokenMessenger, + l2MessageTransmitter, + l1SearchConfig, + TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId], + this.hubChainId, + this.chainId, + address + ); + + return events.map((event) => ({ + ...spreadEventWithBlockNumber(event), + to: cctpBytes32ToAddress(event.args.mintRecipient), + })); + } + + /** + * A helper function to send USDC via CCTP to the target chain + * @param address The recipient address on the target chain + * @param l1Token The token on the hub chain - must be USDC + * @param l2Token The token on the target chain - must be USDC + * @param amount The amount of funds to send via CCTP + * @param simMode Whether or not to simulate the transaction + * @returns The transaction response of the CCTP message + */ + protected sendCctpTokenToTargetChain( + address: string, + l1Token: string, + l2Token: string, + amount: BigNumber, + simMode: boolean + ): Promise { + // Sanity check to ensure that the token is USDC as this is the only token + // configured to be bridged via CCTP + assert(this.isL1TokenUsdc(l1Token), "Token must be native USDC from the hub chain"); + assert(this.isL2TokenUsdc(l2Token), "Token must be native USDC on the target chain"); + + const l1Bridge = this.getL1CCTPTokenMessengerBridge(); + const l1BridgeMethod = "depositForBurn"; + const l1BridgeArgs = [amount, this.l2DestinationDomain, cctpAddressToBytes32(address), l1Token]; + return this._sendTokenToTargetChain( + l1Token, + l2Token, + amount, + l1Bridge, + l1BridgeMethod, + l1BridgeArgs, + 2, // Gas multiplier + bnZero, + simMode + ); + } +} diff --git a/src/clients/bridges/CrossChainTransferClient.ts b/src/clients/bridges/CrossChainTransferClient.ts index d69f0600f..f621c4288 100644 --- a/src/clients/bridges/CrossChainTransferClient.ts +++ b/src/clients/bridges/CrossChainTransferClient.ts @@ -1,4 +1,4 @@ -import { BigNumber, bnZero, winston, assign, toBN, DefaultLogLevels, AnyObject } from "../../utils"; +import { BigNumber, bnZero, winston, DefaultLogLevels, AnyObject } from "../../utils"; import { AdapterManager } from "./AdapterManager"; import { OutstandingTransfers } from "../../interfaces"; @@ -12,14 +12,42 @@ export class CrossChainTransferClient { ) {} // Get any funds currently in the canonical bridge. - getOutstandingCrossChainTransferAmount(address: string, chainId: number | string, l1Token: string): BigNumber { - const amount = this.outstandingCrossChainTransfers[Number(chainId)]?.[address]?.[l1Token]?.totalAmount; - return amount ? toBN(amount) : bnZero; + getOutstandingCrossChainTransferAmount( + address: string, + chainId: number | string, + l1Token: string, + l2Token?: string + ): BigNumber { + const transfers = this.outstandingCrossChainTransfers[Number(chainId)]?.[address]?.[l1Token]; + if (!transfers) { + return bnZero; + } + + if (l2Token) { + return transfers[l2Token]?.totalAmount ?? bnZero; + } + + // No specific l2Token specified; return the sum of all l1Token transfers to chainId. + return Object.values(transfers).reduce((acc, { totalAmount }) => acc.add(totalAmount), bnZero); } - getOutstandingCrossChainTransferTxs(address: string, chainId: number | string, l1Token: string): string[] { - const txHashes = this.outstandingCrossChainTransfers[Number(chainId)]?.[address]?.[l1Token]?.depositTxHashes; - return txHashes ? txHashes : []; + getOutstandingCrossChainTransferTxs( + address: string, + chainId: number | string, + l1Token: string, + l2Token?: string + ): string[] { + const transfers = this.outstandingCrossChainTransfers[Number(chainId)]?.[address]?.[l1Token]; + if (!transfers) { + return []; + } + + if (l2Token) { + return transfers[l2Token]?.depositTxHashes ?? []; + } + + // No specific l2Token specified; return the set of all l1Token transfers to chainId. + return Object.values(transfers).flatMap(({ depositTxHashes }) => depositTxHashes); } getEnabledChains(): number[] { @@ -30,42 +58,39 @@ export class CrossChainTransferClient { return this.getEnabledChains().filter((chainId) => chainId !== 1); } - increaseOutstandingTransfer(address: string, l1Token: string, rebalance: BigNumber, chainId: number): void { - if (!this.outstandingCrossChainTransfers[chainId]) { - this.outstandingCrossChainTransfers[chainId] = {}; - } - const transfers = this.outstandingCrossChainTransfers[chainId]; - if (transfers[address] === undefined) { - transfers[address] = {}; - } - if (transfers[address][l1Token] === undefined) { - transfers[address][l1Token] = { - totalAmount: bnZero, - depositTxHashes: [], - }; - } + increaseOutstandingTransfer( + address: string, + l1Token: string, + l2Token: string, + rebalance: BigNumber, + chainId: number + ): void { + const transfers = (this.outstandingCrossChainTransfers[chainId] ??= {}); + transfers[address] ??= {}; + transfers[address][l1Token] ??= {}; + transfers[address][l1Token][l2Token] ??= { totalAmount: bnZero, depositTxHashes: [] }; // TODO: Require a tx hash here so we can track it as well. - transfers[address][l1Token].totalAmount = this.getOutstandingCrossChainTransferAmount( + transfers[address][l1Token][l2Token].totalAmount = this.getOutstandingCrossChainTransferAmount( address, chainId, - l1Token + l1Token, + l2Token ).add(rebalance); } - async update(l1Tokens: string[]): Promise { - const monitoredChains = this.getEnabledL2Chains(); // Use all chainIds except L1. - this.log("Updating cross chain transfers", { monitoredChains }); + async update(l1Tokens: string[], chainIds = this.getEnabledL2Chains()): Promise { + const enabledChainIds = this.getEnabledL2Chains(); + chainIds = chainIds.filter((chainId) => enabledChainIds.includes(chainId)); + this.log("Updating cross chain transfers", { chainIds }); const outstandingTransfersPerChain = await Promise.all( - monitoredChains.map((chainId) => - this.adapterManager.getOutstandingCrossChainTokenTransferAmount(chainId, l1Tokens) - ) + chainIds.map(async (chainId) => [ + chainId, + await this.adapterManager.getOutstandingCrossChainTokenTransferAmount(chainId, l1Tokens), + ]) ); - outstandingTransfersPerChain.forEach((outstandingTransfers, index) => { - assign(this.outstandingCrossChainTransfers, [monitoredChains[index]], outstandingTransfers); - }); - + this.outstandingCrossChainTransfers = Object.fromEntries(outstandingTransfersPerChain); this.log("Updated cross chain transfers", { outstandingCrossChainTransfers: this.outstandingCrossChainTransfers }); } diff --git a/src/clients/bridges/LineaAdapter.ts b/src/clients/bridges/LineaAdapter.ts index d5b541851..c81b02e6b 100644 --- a/src/clients/bridges/LineaAdapter.ts +++ b/src/clients/bridges/LineaAdapter.ts @@ -1,10 +1,12 @@ -import * as sdk from "@across-protocol/sdk-v2"; +import * as sdk from "@across-protocol/sdk"; import { CONTRACT_ADDRESSES } from "../../common"; import { OutstandingTransfers } from "../../interfaces"; import { BigNumber, CHAIN_IDs, Contract, + EventSearchConfig, + Event, TOKEN_SYMBOLS_MAP, TransactionResponse, assert, @@ -125,19 +127,6 @@ export class LineaAdapter extends BaseAdapter { return this.isUsdc(l1Token) ? this.getL2UsdcBridge() : this.getL2TokenBridge(); } - /** - * Get L1 Atomic WETH depositor contract - * @returns L1 Atomic WETH depositor contract - */ - getAtomicDepositor(): Contract { - const { hubChainId } = this; - return new Contract( - this.atomicDepositorAddress, - CONTRACT_ADDRESSES[hubChainId].atomicDepositor.abi, - this.getSigner(hubChainId) - ); - } - isUsdc(l1Token: string): boolean { return compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); } @@ -150,96 +139,248 @@ export class LineaAdapter extends BaseAdapter { : this.getL1TokenBridge(); } - async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { + async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { const outstandingTransfers: OutstandingTransfers = {}; const { l1SearchConfig, l2SearchConfig } = this.getUpdatedSearchConfigs(); - const supportedL1Tokens = l1Tokens.filter(this.isSupportedToken.bind(this)); + const supportedL1Tokens = this.filterSupportedTokens(l1Tokens); await sdk.utils.mapAsync(this.monitoredAddresses, async (address) => { + // We can only support monitoring the spoke pool contract, not the hub pool. + if (address === CONTRACT_ADDRESSES[this.hubChainId]?.hubPool?.address) { + return; + } await sdk.utils.mapAsync(supportedL1Tokens, async (l1Token) => { if (this.isWeth(l1Token)) { - const atomicDepositor = this.getAtomicDepositor(); const l1MessageService = this.getL1MessageService(); const l2MessageService = this.getL2MessageService(); // We need to do the following sequential steps. - // 1. Get all initiated MessageSent events from the L1MessageService where the 'from' address is - // the AtomicDepositor and the 'to' address is the user's address. + // 1. Get all initiated MessageSent events from the L1MessageService where the 'to' address is the + // user's address. // 2. Pipe the resulting _messageHash argument from step 1 into the MessageClaimed event filter // 3. For each MessageSent, match the _messageHash to the _messageHash in the MessageClaimed event // any unmatched MessageSent events are considered outstanding transfers. - const initiatedQueryResult = await paginatedEventQuery( + const initiatedQueryResult = await this.getWethDepositInitiatedEvents( l1MessageService, - l1MessageService.filters.MessageSent(atomicDepositor.address, address), + address, l1SearchConfig ); const internalMessageHashes = initiatedQueryResult.map(({ args }) => args._messageHash); - const finalizedQueryResult = await paginatedEventQuery( + const finalizedQueryResult = await this.getWethDepositFinalizedEvents( l2MessageService, - // Passing in an array of message hashes results in an OR filter - l2MessageService.filters.MessageClaimed(internalMessageHashes), + internalMessageHashes, l2SearchConfig ); - initiatedQueryResult - .filter( - ({ args }) => - !finalizedQueryResult.some( - (finalizedEvent) => args._messageHash.toLowerCase() === finalizedEvent.args._messageHash.toLowerCase() - ) - ) - .forEach((event) => { - const txHash = event.transactionHash; - const amount = event.args._value; - outstandingTransfers[address] ??= {}; - outstandingTransfers[address][l1Token] ??= { totalAmount: bnZero, depositTxHashes: [] }; - outstandingTransfers[address][l1Token] = { - totalAmount: outstandingTransfers[address][l1Token].totalAmount.add(amount), - depositTxHashes: [...outstandingTransfers[address][l1Token].depositTxHashes, txHash], - }; - }); + this.matchWethDepositEvents( + initiatedQueryResult, + finalizedQueryResult, + outstandingTransfers, + address, + l1Token + ); } else { const isUsdc = this.isUsdc(l1Token); const l1Bridge = this.getL1Bridge(l1Token); const l2Bridge = this.getL2Bridge(l1Token); - // Define the initialized and finalized event filters for the L1 and L2 bridges - const [filterL1, filterL2] = isUsdc - ? [l1Bridge.filters.Deposited(address, null, address), l2Bridge.filters.ReceivedFromOtherLayer(address)] - : [l1Bridge.filters.BridgingInitiated(address, null, l1Token), l2Bridge.filters.BridgingFinalized(l1Token)]; - + // Define the initialized and finalized event filters for the L1 and L2 bridges. We only filter + // on the recipient so that the filters work both to track Hub-->Spoke transfers and EOA transfers, and + // because some filters like ReceivedFromOtherLayer only index the recipient. const [initiatedQueryResult, finalizedQueryResult] = await Promise.all([ - paginatedEventQuery(l1Bridge, filterL1, l1SearchConfig), - paginatedEventQuery(l2Bridge, filterL2, l2SearchConfig), + isUsdc + ? this.getUsdcDepositInitiatedEvents(l1Bridge, address, l1SearchConfig) + : this.getErc20DepositInitiatedEvents(l1Bridge, address, l1Token, l1SearchConfig), + isUsdc + ? this.getUsdcDepositFinalizedEvents(l2Bridge, address, l2SearchConfig) + : this.getErc20DepositFinalizedEvents(l2Bridge, address, l1Token, l2SearchConfig), ]); - initiatedQueryResult - .filter( - (initialEvent) => - !isDefined( - finalizedQueryResult.find((finalEvent) => - isUsdc - ? finalEvent.args.amount.eq(initialEvent.args.amount) && - compareAddressesSimple(initialEvent.args.to, finalEvent.args.recipient) - : finalEvent.args.amount.eq(initialEvent.args.amount) && - compareAddressesSimple(initialEvent.args.recipient, finalEvent.args.recipient) && - compareAddressesSimple(finalEvent.args.nativeToken, initialEvent.args.token) - ) - ) - ) - .forEach((initialEvent) => { - const txHash = initialEvent.transactionHash; - const amount = initialEvent.args.amount; - outstandingTransfers[address] ??= {}; - outstandingTransfers[address][l1Token] ??= { totalAmount: bnZero, depositTxHashes: [] }; - outstandingTransfers[address][l1Token] = { - totalAmount: outstandingTransfers[address][l1Token].totalAmount.add(amount), - depositTxHashes: [...outstandingTransfers[address][l1Token].depositTxHashes, txHash], - }; - }); + if (isUsdc) { + this.matchUsdcDepositEvents( + initiatedQueryResult, + finalizedQueryResult, + outstandingTransfers, + address, + l1Token + ); + } else { + this.matchErc20DepositEvents( + initiatedQueryResult, + finalizedQueryResult, + outstandingTransfers, + address, + l1Token + ); + } } }); }); return outstandingTransfers; } + async getWethDepositInitiatedEvents( + l1MessageService: Contract, + l2RecipientAddress: string, + l1SearchConfig: EventSearchConfig + ): Promise { + const _initiatedQueryResult = await paginatedEventQuery( + l1MessageService, + l1MessageService.filters.MessageSent(null, l2RecipientAddress), + l1SearchConfig + ); + // @dev There will be a MessageSent to the SpokePool address for each RelayedRootBundle so remove + // those with 0 value. + return _initiatedQueryResult.filter(({ args }) => args._value.gt(0)); + } + + async getWethDepositFinalizedEvents( + l2MessageService: Contract, + internalMessageHashes: string[], + l2SearchConfig: EventSearchConfig + ): Promise { + return await paginatedEventQuery( + l2MessageService, + // Passing in an array of message hashes results in an OR filter + l2MessageService.filters.MessageClaimed(internalMessageHashes), + l2SearchConfig + ); + } + + matchWethDepositEvents( + initiatedQueryResult: Event[], + finalizedQueryResult: Event[], + outstandingTransfers: OutstandingTransfers, + monitoredAddress: string, + l1Token: string + ): void { + const transferEvents = initiatedQueryResult.filter( + ({ args }) => + !finalizedQueryResult.some( + (finalizedEvent) => args._messageHash.toLowerCase() === finalizedEvent.args._messageHash.toLowerCase() + ) + ); + this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); + } + + computeOutstandingTransfers( + outstandingTransfers: OutstandingTransfers, + monitoredAddress: string, + l1Token: string, + transferEvents: Event[] + ): void { + const l2Token = this.resolveL2TokenAddress(l1Token, false); // There's no native USDC on Linea + assert(!isDefined(TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId])); // We can blow up if this eventually stops being true + transferEvents.forEach((event) => { + const txHash = event.transactionHash; + // @dev WETH events have a _value field, while ERC20 events have an amount field. + const amount = event.args._value ?? event.args.amount; + outstandingTransfers[monitoredAddress] ??= {}; + outstandingTransfers[monitoredAddress][l1Token] ??= {}; + outstandingTransfers[monitoredAddress][l1Token][l2Token] ??= { totalAmount: bnZero, depositTxHashes: [] }; + outstandingTransfers[monitoredAddress][l1Token][l2Token] = { + totalAmount: outstandingTransfers[monitoredAddress][l1Token][l2Token].totalAmount.add(amount), + depositTxHashes: [...outstandingTransfers[monitoredAddress][l1Token][l2Token].depositTxHashes, txHash], + }; + }); + } + + async getErc20DepositInitiatedEvents( + l1Bridge: Contract, + monitoredAddress: string, + l1Token: string, + l1SearchConfig: EventSearchConfig + ): Promise { + const initiatedQueryResult = await paginatedEventQuery( + l1Bridge, + l1Bridge.filters.BridgingInitiatedV2(null /* sender */, monitoredAddress /* recipient */, l1Token), + l1SearchConfig + ); + return initiatedQueryResult; + } + + async getErc20DepositFinalizedEvents( + l2Bridge: Contract, + monitoredAddress: string, + l1Token: string, + l2SearchConfig: EventSearchConfig + ): Promise { + const finalizedQueryResult = await paginatedEventQuery( + l2Bridge, + l2Bridge.filters.BridgingFinalizedV2( + l1Token, + null /* bridgedToken */, + null /* bridgedToken */, + monitoredAddress /* recipient */ + ), + l2SearchConfig + ); + return finalizedQueryResult; + } + + matchErc20DepositEvents( + initiatedQueryResult: Event[], + finalizedQueryResult: Event[], + outstandingTransfers: OutstandingTransfers, + monitoredAddress: string, + l1Token: string + ): void { + const transferEvents = initiatedQueryResult.filter( + (initialEvent) => + !isDefined( + finalizedQueryResult.find( + (finalEvent) => + finalEvent.args.amount.eq(initialEvent.args.amount) && + compareAddressesSimple(initialEvent.args.recipient, finalEvent.args.recipient) && + compareAddressesSimple(finalEvent.args.nativeToken, initialEvent.args.token) + ) + ) + ); + + this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); + } + + getUsdcDepositInitiatedEvents( + l1Bridge: Contract, + monitoredAddress: string, + l1SearchConfig: EventSearchConfig + ): Promise { + return paginatedEventQuery( + l1Bridge, + l1Bridge.filters.Deposited(null /* depositor */, null /* amount */, monitoredAddress /* to */), + l1SearchConfig + ); + } + + getUsdcDepositFinalizedEvents( + l2Bridge: Contract, + monitoredAddress: string, + l2SearchConfig: EventSearchConfig + ): Promise { + return paginatedEventQuery( + l2Bridge, + l2Bridge.filters.ReceivedFromOtherLayer(monitoredAddress /* recipient */), + l2SearchConfig + ); + } + + matchUsdcDepositEvents( + initiatedQueryResult: Event[], + finalizedQueryResult: Event[], + outstandingTransfers: OutstandingTransfers, + monitoredAddress: string, + l1Token: string + ): void { + const transferEvents = initiatedQueryResult.filter( + (initialEvent) => + !isDefined( + finalizedQueryResult.find( + (finalEvent) => + finalEvent.args.amount.eq(initialEvent.args.amount) && + compareAddressesSimple(initialEvent.args.to, finalEvent.args.recipient) + ) + ) + ); + this.computeOutstandingTransfers(outstandingTransfers, monitoredAddress, l1Token, transferEvents); + } + sendTokenToTargetChain( address: string, l1Token: string, diff --git a/src/clients/bridges/PolygonAdapter.ts b/src/clients/bridges/PolygonAdapter.ts index 31be5dc7c..0d0941a5c 100644 --- a/src/clients/bridges/PolygonAdapter.ts +++ b/src/clients/bridges/PolygonAdapter.ts @@ -8,18 +8,18 @@ import { isDefined, BigNumberish, TransactionResponse, - resolveTokenSymbols, ZERO_ADDRESS, spreadEventWithBlockNumber, paginatedEventQuery, CHAIN_IDs, TOKEN_SYMBOLS_MAP, bnZero, + assert, } from "../../utils"; import { SpokePoolClient } from "../../clients"; -import { BaseAdapter } from "./"; import { SortableEvent, OutstandingTransfers } from "../../interfaces"; import { CONTRACT_ADDRESSES } from "../../common"; +import { CCTPAdapter } from "./CCTPAdapter"; // ether bridge = 0x8484Ef722627bf18ca5Ae6BcF031c23E6e922B30 // erc20 bridge = 0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf @@ -111,19 +111,25 @@ const tokenToBridge = { type SupportedL1Token = string; -export class PolygonAdapter extends BaseAdapter { +export class PolygonAdapter extends CCTPAdapter { constructor( logger: winston.Logger, readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, monitoredAddresses: string[] ) { - super( - spokePoolClients, - 137, - monitoredAddresses, - logger, - resolveTokenSymbols(Object.keys(tokenToBridge), BaseAdapter.HUB_CHAIN_ID) - ); + super(spokePoolClients, 137, monitoredAddresses, logger, [ + "USDC", + "USDT", + "WETH", + "DAI", + "WBTC", + "UMA", + "BAL", + "ACX", + "BADGER", + "POOL", + "MATIC", + ]); } // On polygon a bridge transaction looks like a transfer from address(0) to the target. @@ -133,26 +139,34 @@ export class PolygonAdapter extends BaseAdapter { // Skip the tokens if we can't find the corresponding bridge. // This is a valid use case as it's more convenient to check cross chain transfers for all tokens // rather than maintaining a list of native bridge-supported tokens. - const availableTokens = l1Tokens.filter(this.isSupportedToken.bind(this)); + const availableTokens = this.filterSupportedTokens(l1Tokens); const promises: Promise[] = []; - const validTokens: SupportedL1Token[] = []; - // Fetch bridge events for all monitored addresses. - for (const monitoredAddress of this.monitoredAddresses) { + const cctpOutstandingTransfersPromise: Record> = {}; + // Fetch bridge events for all monitored addresses. This function will not work to monitor the hub pool contract, + // only the spoke pool address and EOA's. + const monitoredAddresses = this.monitoredAddresses.filter((address) => address !== this.getHubPool().address); + for (const monitoredAddress of monitoredAddresses) { for (const l1Token of availableTokens) { + if (this.isL1TokenUsdc(l1Token)) { + cctpOutstandingTransfersPromise[monitoredAddress] = this.getOutstandingCctpTransfers(monitoredAddress); + } + const l1Bridge = this.getL1Bridge(l1Token); const l2Token = this.getL2Token(l1Token); const l1Method = tokenToBridge[l1Token].l1Method; let l1SearchFilter: (string | undefined)[] = []; if (l1Method === "LockedERC20") { - l1SearchFilter = [monitoredAddress, undefined, l1Token]; + l1SearchFilter = [undefined /* depositor */, monitoredAddress /* depositReceiver */, l1Token]; } if (l1Method === "LockedEther") { - l1SearchFilter = [undefined, monitoredAddress]; + l1SearchFilter = [undefined /* depositor */, monitoredAddress /* depositReceiver */]; } if (l1Method === "NewDepositBlock") { - l1SearchFilter = [monitoredAddress, TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET]]; + // @dev This won't work for tracking Hub to Spoke transfers since the l1 "owner" will be different + // from the L2 "user". We leave it in here for future EOA relayer rebalancing of Matic. + l1SearchFilter = [monitoredAddress /* owner */, TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET]]; } const l2Method = @@ -162,35 +176,48 @@ export class PolygonAdapter extends BaseAdapter { l2SearchFilter = [ZERO_ADDRESS, monitoredAddress]; } if (l2Method === "TokenDeposited") { - l2SearchFilter = [TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET], ZERO_ADDRESS, monitoredAddress]; + l2SearchFilter = [ + TOKEN_SYMBOLS_MAP.MATIC.addresses[CHAIN_IDs.MAINNET], + ZERO_ADDRESS, + monitoredAddress /* user */, + ]; } promises.push( paginatedEventQuery(l1Bridge, l1Bridge.filters[l1Method](...l1SearchFilter), l1SearchConfig), paginatedEventQuery(l2Token, l2Token.filters[l2Method](...l2SearchFilter), l2SearchConfig) ); - validTokens.push(l1Token); } } - const results = await Promise.all(promises); + const [results, resolvedCCTPEvents] = await Promise.all([ + Promise.all(promises), + Promise.all(monitoredAddresses.map((monitoredAddress) => cctpOutstandingTransfersPromise[monitoredAddress])), + ]); + const resultingCCTPEvents: Record = Object.fromEntries( + monitoredAddresses.map((monitoredAddress, idx) => [monitoredAddress, resolvedCCTPEvents[idx]]) + ); // 2 events per token. - const numEventsPerMonitoredAddress = 2 * validTokens.length; + const numEventsPerMonitoredAddress = 2 * availableTokens.length; // Segregate the events list by monitored address. const resultsByMonitoredAddress = Object.fromEntries( - this.monitoredAddresses.map((monitoredAddress, index) => { + monitoredAddresses.map((monitoredAddress, index) => { const start = index * numEventsPerMonitoredAddress; - return [monitoredAddress, results.slice(start, start + numEventsPerMonitoredAddress + 1)]; + return [monitoredAddress, results.slice(start, start + numEventsPerMonitoredAddress)]; }) ); // Process events for each monitored address. - for (const monitoredAddress of this.monitoredAddresses) { + for (const monitoredAddress of monitoredAddresses) { const eventsToProcess = resultsByMonitoredAddress[monitoredAddress]; eventsToProcess.forEach((result, index) => { - const l1Token = validTokens[Math.floor(index / 2)]; + if (eventsToProcess.length === 0) { + return; + } + assert(eventsToProcess.length % 2 === 0, "Events list length should be even"); + const l1Token = availableTokens[Math.floor(index / 2)]; const amountProp = index % 2 === 0 ? tokenToBridge[l1Token].l1AmountProp : tokenToBridge[l1Token].l2AmountProp; const events = result.map((event) => { // Hacky typing here. We should probably rework the structure of this function to improve. @@ -205,58 +232,88 @@ export class PolygonAdapter extends BaseAdapter { }; }); const eventsStorage = index % 2 === 0 ? this.l1DepositInitiatedEvents : this.l2DepositFinalizedEvents; - assign(eventsStorage, [monitoredAddress, l1Token], events); + const l2Token = this.resolveL2TokenAddress(l1Token, false); // these are all either normal L2 tokens or bridged USDC + assign(eventsStorage, [monitoredAddress, l1Token, l2Token], events); }); + if (isDefined(resultingCCTPEvents[monitoredAddress])) { + assign( + this.l1DepositInitiatedEvents, + [ + monitoredAddress, + TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId], + TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId], // Must map to the USDC Native L2 token address + ], + resultingCCTPEvents[monitoredAddress] + ); + } } this.baseL1SearchConfig.fromBlock = l1SearchConfig.toBlock + 1; this.baseL2SearchConfig.fromBlock = l2SearchConfig.toBlock + 1; - return this.computeOutstandingCrossChainTransfers(validTokens); + return this.computeOutstandingCrossChainTransfers(availableTokens); } - async sendTokenToTargetChain( + sendTokenToTargetChain( address: string, l1Token: string, l2Token: string, amount: BigNumber, simMode = false ): Promise { - let method = "depositFor"; - // note that the amount is the bytes 32 encoding of the amount. - let args = [address, l1Token, bnToHex(amount)]; + // If both the L1 & L2 tokens are native USDC, we use the CCTP bridge. + if (this.isL1TokenUsdc(l1Token) && this.isL2TokenUsdc(l2Token)) { + return this.sendCctpTokenToTargetChain(address, l1Token, l2Token, amount, simMode); + } else { + let method = "depositFor"; + // note that the amount is the bytes 32 encoding of the amount. + let args = [address, l1Token, bnToHex(amount)]; - // If this token is WETH (the tokenToEvent maps to the ETH method) then we modify the params to deposit ETH. - if (this.isWeth(l1Token)) { - method = "bridgeWethToPolygon"; - args = [address, amount.toString()]; + // If this token is WETH (the tokenToEvent maps to the ETH method) then we modify the params to deposit ETH. + if (this.isWeth(l1Token)) { + method = "bridgeWethToPolygon"; + args = [address, amount.toString()]; + } + return this._sendTokenToTargetChain( + l1Token, + l2Token, + amount, + this.getL1TokenGateway(l1Token), + method, + args, + 1, + bnZero, + simMode + ); } - return await this._sendTokenToTargetChain( - l1Token, - l2Token, - amount, - this.getL1TokenGateway(l1Token), - method, - args, - 1, - bnZero, - simMode - ); } async checkTokenApprovals(address: string, l1Tokens: string[]): Promise { + const l1TokenListToApprove = []; + const associatedL1Bridges = l1Tokens - .map((l1Token) => { + .flatMap((l1Token) => { + if (!this.isSupportedToken(l1Token)) { + return []; + } if (this.isWeth(l1Token)) { - return this.getL1TokenGateway(l1Token)?.address; + l1TokenListToApprove.push(l1Token); + return [this.getL1TokenGateway(l1Token)?.address]; } - if (!this.isSupportedToken(l1Token)) { - return null; + const bridgeAddresses: string[] = []; + if (this.isL1TokenUsdc(l1Token)) { + bridgeAddresses.push(this.getL1CCTPTokenMessengerBridge().address); } - return this.getL1Bridge(l1Token).address; + bridgeAddresses.push(this.getL1Bridge(l1Token).address); + + // Push the l1 token to the list of tokens to approve N times, where N is the number of bridges. + // I.e. the arrays have to be parallel. + l1TokenListToApprove.push(...Array(bridgeAddresses.length).fill(l1Token)); + + return bridgeAddresses; }) .filter(isDefined); - await this.checkAndSendTokenApprovals(address, l1Tokens, associatedL1Bridges); + await this.checkAndSendTokenApprovals(address, l1TokenListToApprove, associatedL1Bridges); } getL1Bridge(l1Token: SupportedL1Token): Contract { diff --git a/src/clients/bridges/ZKSyncAdapter.ts b/src/clients/bridges/ZKSyncAdapter.ts index 31ae0d4e3..4ce2e26ee 100644 --- a/src/clients/bridges/ZKSyncAdapter.ts +++ b/src/clients/bridges/ZKSyncAdapter.ts @@ -9,7 +9,6 @@ import { assign, Event, ZERO_ADDRESS, - getTokenAddress, TOKEN_SYMBOLS_MAP, bnZero, } from "../../utils"; @@ -18,8 +17,9 @@ import assert from "assert"; import * as zksync from "zksync-web3"; import { CONTRACT_ADDRESSES } from "../../common"; import { isDefined } from "../../utils/TypeGuards"; -import { gasPriceOracle, utils } from "@across-protocol/sdk-v2"; +import { gasPriceOracle, utils } from "@across-protocol/sdk"; import { zkSync as zkSyncUtils } from "../../utils/chains"; +import { matchL2EthDepositAndWrapEvents } from "./utils"; /** * Responsible for providing a common interface for interacting with the ZKSync Era @@ -39,11 +39,12 @@ export class ZKSyncAdapter extends BaseAdapter { // Resolve the mailbox and bridge contracts for L1 and L2. const l2EthContract = this.getL2Eth(); + const l2WethContract = this.getL2Weth(); const atomicWethDepositor = this.getAtomicDepositor(); - const aliasedAtomicWethDepositor = zksync.utils.applyL1ToL2Alias(atomicWethDepositor.address); + const hubPool = this.getHubPool(); const l1ERC20Bridge = this.getL1ERC20BridgeContract(); const l2ERC20Bridge = this.getL2ERC20BridgeContract(); - const supportedL1Tokens = l1Tokens.filter(this.isSupportedToken.bind(this)); + const supportedL1Tokens = this.filterSupportedTokens(l1Tokens); // Predeclare this function for use below. It is used to process all events that are saved. const processEvent = (event: Event) => { @@ -51,6 +52,7 @@ export class ZKSyncAdapter extends BaseAdapter { const eventSpread = spreadEventWithBlockNumber(event) as SortableEvent & { _amount: BigNumberish; _to: string; + // WETH deposit events `ZkSyncEthDepositInitiated` (emitted by AtomicWethDepositor) don't have an l1Token param. l1Token?: string; }; return { @@ -63,30 +65,62 @@ export class ZKSyncAdapter extends BaseAdapter { await utils.mapAsync(this.monitoredAddresses, async (address) => { return await utils.mapAsync(supportedL1Tokens, async (l1TokenAddress) => { + const isL2Contract = await this.isL2ChainContract(address); + // This adapter will only work to track EOA's or the SpokePool's transfers, so exclude the hub pool + // and any L2 contracts that are not the SpokePool. + if (address === this.getHubPool().address) { + return; + } + const isSpokePoolContract = isL2Contract; + + let initiatedQueryResult: Event[], finalizedQueryResult: Event[], wrapQueryResult: Event[]; + // Resolve whether the token is WETH or not. const isWeth = this.isWeth(l1TokenAddress); - - let initiatedQueryResult: Event[], finalizedQueryResult: Event[]; + const l2Token = this.resolveL2TokenAddress(l1TokenAddress, false); // CCTP doesn't exist on ZkSync. if (isWeth) { - // If WETH, then the deposit initiated event will appear on AtomicDepositor and withdrawal finalized - // will appear in mailbox. - [initiatedQueryResult, finalizedQueryResult] = await Promise.all([ - // Filter on 'from' address and 'to' address + [initiatedQueryResult, finalizedQueryResult, wrapQueryResult] = await Promise.all([ + // If sending WETH from EOA, we can assume the EOA is unwrapping ETH and sending it through the + // AtomicDepositor. If sending WETH from a contract, then the only event we can track from a ZkSync contract + // is the NewPriorityRequest event which doesn't have any parameters about the 'to' or 'amount' sent. + // Therefore, we must track the HubPool and assume any transfers we are tracking from contracts are + // being sent by the HubPool. paginatedEventQuery( - atomicWethDepositor, - atomicWethDepositor.filters.ZkSyncEthDepositInitiated(address, address), + isSpokePoolContract ? hubPool : atomicWethDepositor, + isSpokePoolContract + ? hubPool.filters.TokensRelayed() + : atomicWethDepositor.filters.ZkSyncEthDepositInitiated(address, address), l1SearchConfig ), - - // Filter on transfers between aliased AtomicDepositor address and l2Receiver + // L2 WETH transfer will come from aliased L1 contract that initiated the deposit. paginatedEventQuery( l2EthContract, - l2EthContract.filters.Transfer(aliasedAtomicWethDepositor, address), + l2EthContract.filters.Transfer( + zksync.utils.applyL1ToL2Alias(isSpokePoolContract ? hubPool.address : atomicWethDepositor.address), + address + ), l2SearchConfig ), + // For WETH transfers involving an EOA, only count them if a wrap txn followed the L2 deposit finalization. + isSpokePoolContract + ? Promise.resolve([]) + : paginatedEventQuery( + l2WethContract, + l2WethContract.filters.Transfer(ZERO_ADDRESS, address), + l2SearchConfig + ), ]); + + if (isSpokePoolContract) { + // Filter here if monitoring SpokePool address since TokensRelayed does not have any indexed params. + initiatedQueryResult = initiatedQueryResult.filter( + (e) => e.args.to === address && e.args.l1Token === l1TokenAddress + ); + } else { + // If EOA, additionally verify that the ETH deposit was followed by a WETH wrap event. + finalizedQueryResult = matchL2EthDepositAndWrapEvents(finalizedQueryResult, wrapQueryResult); + } } else { - const l2Token = getTokenAddress(l1TokenAddress, this.hubChainId, this.chainId); [initiatedQueryResult, finalizedQueryResult] = await Promise.all([ // Filter on 'from' and 'to' address paginatedEventQuery( @@ -106,15 +140,22 @@ export class ZKSyncAdapter extends BaseAdapter { assign( this.l1DepositInitiatedEvents, - [address, l1TokenAddress], - initiatedQueryResult.map(processEvent).filter((e) => e?.l1Token && e.l1Token === l1TokenAddress) + [address, l1TokenAddress, l2Token], + // An initiatedQueryResult could be a zkSync DepositInitiated or an AtomicDepositor + // ZkSyncEthDepositInitiated event, subject to whether the deposit token was WETH or not. + // A ZkSyncEthDepositInitiated event doesn't have a token or l1Token param. + initiatedQueryResult.map(processEvent).filter((e) => isWeth || e.l1Token === l1TokenAddress) + ); + assign( + this.l2DepositFinalizedEvents, + [address, l1TokenAddress, l2Token], + finalizedQueryResult.map(processEvent) ); - assign(this.l2DepositFinalizedEvents, [address, l1TokenAddress], finalizedQueryResult.map(processEvent)); }); }); this.baseL1SearchConfig.fromBlock = l1SearchConfig.toBlock + 1; - this.baseL1SearchConfig.fromBlock = l2SearchConfig.toBlock + 1; + this.baseL2SearchConfig.fromBlock = l2SearchConfig.toBlock + 1; return this.computeOutstandingCrossChainTransfers(l1Tokens); } @@ -277,11 +318,20 @@ export class ZKSyncAdapter extends BaseAdapter { const { chainId } = this; const ethContractData = CONTRACT_ADDRESSES[chainId]?.eth; if (!ethContractData) { - throw new Error(`contractData not found for chain ${chainId}`); + throw new Error(`ethContractData not found for chain ${chainId}`); } return new Contract(ethContractData.address, ethContractData.abi, this.getSigner(chainId)); } + private getL2Weth(): Contract { + const { chainId } = this; + const wethContractData = CONTRACT_ADDRESSES[chainId]?.weth; + if (!wethContractData) { + throw new Error(`wethContractData not found for chain ${chainId}`); + } + return new Contract(wethContractData.address, wethContractData.abi, this.getSigner(chainId)); + } + private getL1ERC20BridgeContract(): Contract { const { hubChainId } = this; const l1Erc20BridgeContractData = CONTRACT_ADDRESSES[hubChainId]?.zkSyncDefaultErc20Bridge; diff --git a/src/clients/bridges/index.ts b/src/clients/bridges/index.ts index d2bb55e17..20a62b046 100644 --- a/src/clients/bridges/index.ts +++ b/src/clients/bridges/index.ts @@ -1,7 +1,10 @@ export * from "./BaseAdapter"; export * from "./AdapterManager"; export * from "./op-stack/optimism/OptimismAdapter"; +export * from "./op-stack/base/BaseChainAdapter"; +export * from "./op-stack/mode/ModeAdapter"; export * from "./ArbitrumAdapter"; export * from "./PolygonAdapter"; export * from "./CrossChainTransferClient"; export * from "./ZKSyncAdapter"; +export * from "./LineaAdapter"; diff --git a/src/clients/bridges/op-stack/DefaultErc20Bridge.ts b/src/clients/bridges/op-stack/DefaultErc20Bridge.ts index fb7d66e5d..a80bb85a3 100644 --- a/src/clients/bridges/op-stack/DefaultErc20Bridge.ts +++ b/src/clients/bridges/op-stack/DefaultErc20Bridge.ts @@ -1,12 +1,16 @@ -import { Contract, BigNumber, paginatedEventQuery, Event, Signer, EventSearchConfig, Provider } from "../../../utils"; +import { Contract, BigNumber, paginatedEventQuery, Signer, EventSearchConfig, Provider } from "../../../utils"; import { CONTRACT_ADDRESSES } from "../../../common"; -import { BridgeTransactionDetails, OpStackBridge } from "./OpStackBridgeInterface"; +import { BridgeTransactionDetails, OpStackBridge, OpStackEvents } from "./OpStackBridgeInterface"; -export class DefaultERC20Bridge implements OpStackBridge { +export class DefaultERC20Bridge extends OpStackBridge { private readonly l1Bridge: Contract; private readonly l2Bridge: Contract; - constructor(private l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [ + CONTRACT_ADDRESSES[hubChainId][`ovmStandardBridge_${l2chainId}`].address, + ]); + const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId][`ovmStandardBridge_${l2chainId}`]; this.l1Bridge = new Contract(l1Address, l1Abi, l1Signer); @@ -14,10 +18,6 @@ export class DefaultERC20Bridge implements OpStackBridge { this.l2Bridge = new Contract(l2Address, l2Abi, l2SignerOrProvider); } - get l1Gateway(): string { - return this.l1Bridge.address; - } - constructL1ToL2Txn( toAddress: string, l1Token: string, @@ -32,27 +32,31 @@ export class DefaultERC20Bridge implements OpStackBridge { }; } - queryL1BridgeInitiationEvents( + async queryL1BridgeInitiationEvents( l1Token: string, fromAddress: string, eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - this.l1Bridge, - this.l1Bridge.filters.ERC20DepositInitiated(l1Token, undefined, fromAddress), - eventConfig - ); + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l1Bridge, + this.l1Bridge.filters.ERC20DepositInitiated(l1Token, undefined, fromAddress), + eventConfig + ), + }; } - queryL2BridgeFinalizationEvents( + async queryL2BridgeFinalizationEvents( l1Token: string, fromAddress: string, eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - this.l2Bridge, - this.l2Bridge.filters.DepositFinalized(l1Token, undefined, fromAddress), - eventConfig - ); + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l2Bridge, + this.l2Bridge.filters.DepositFinalized(l1Token, undefined, fromAddress), + eventConfig + ), + }; } } diff --git a/src/clients/bridges/op-stack/OpStackAdapter.ts b/src/clients/bridges/op-stack/OpStackAdapter.ts index 5c10983a4..f32caf7ad 100644 --- a/src/clients/bridges/op-stack/OpStackAdapter.ts +++ b/src/clients/bridges/op-stack/OpStackAdapter.ts @@ -19,6 +19,7 @@ import { CONTRACT_ADDRESSES } from "../../../common"; import { OpStackBridge } from "./OpStackBridgeInterface"; import { WethBridge } from "./WethBridge"; import { DefaultERC20Bridge } from "./DefaultErc20Bridge"; +import { UsdcTokenSplitterBridge } from "./UsdcTokenSplitterBridge"; export class OpStackAdapter extends BaseAdapter { public l2Gas: number; @@ -36,7 +37,7 @@ export class OpStackAdapter extends BaseAdapter { this.l2Gas = 200000; // Typically, a custom WETH bridge is not provided, so use the standard one. - const wethAddress = TOKEN_SYMBOLS_MAP.WETH.addresses[this.hubChainId]; + const wethAddress = this.wethAddress; if (wethAddress && !this.customBridges[wethAddress]) { this.customBridges[wethAddress] = new WethBridge( this.chainId, @@ -46,6 +47,21 @@ export class OpStackAdapter extends BaseAdapter { ); } + // We should manually override the bridge for USDC to use CCTP if this chain has a Native USDC entry. We can + // assume that all Op Stack chains will have a bridged USDC.e variant that uses the OVM standard bridge, so we + // only need to check if a native USDC exists for this chain. If so, then we'll use the TokenSplitter bridge + // which maps to either the CCTP or OVM Standard bridge depending on the request. + const usdcAddress = TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]; + const l2NativeUsdcAddress = TOKEN_SYMBOLS_MAP.USDC.addresses[this.chainId]; + if (usdcAddress && l2NativeUsdcAddress) { + this.customBridges[usdcAddress] = new UsdcTokenSplitterBridge( + this.chainId, + this.hubChainId, + this.getSigner(this.hubChainId), + this.getSigner(chainId) + ); + } + this.defaultBridge = new DefaultERC20Bridge( this.chainId, this.hubChainId, @@ -62,6 +78,7 @@ export class OpStackAdapter extends BaseAdapter { async getOutstandingCrossChainTransfers(l1Tokens: string[]): Promise { const { l1SearchConfig, l2SearchConfig } = this.getUpdatedSearchConfigs(); + const availableL1Tokens = this.filterSupportedTokens(l1Tokens); const processEvent = (event: Event) => { const eventSpread = spreadEventWithBlockNumber(event) as SortableEvent & { @@ -78,7 +95,7 @@ export class OpStackAdapter extends BaseAdapter { await Promise.all( this.monitoredAddresses.map((monitoredAddress) => Promise.all( - l1Tokens.map(async (l1Token) => { + availableL1Tokens.map(async (l1Token) => { const bridge = this.getBridge(l1Token); const [depositInitiatedResults, depositFinalizedResults] = await Promise.all([ @@ -86,16 +103,13 @@ export class OpStackAdapter extends BaseAdapter { bridge.queryL2BridgeFinalizationEvents(l1Token, monitoredAddress, l2SearchConfig), ]); - assign( - this.l1DepositInitiatedEvents, - [monitoredAddress, l1Token], - depositInitiatedResults.map(processEvent) - ); - assign( - this.l2DepositFinalizedEvents, - [monitoredAddress, l1Token], - depositFinalizedResults.map(processEvent) - ); + Object.entries(depositInitiatedResults).forEach(([l2Token, events]) => { + assign(this.l1DepositInitiatedEvents, [monitoredAddress, l1Token, l2Token], events.map(processEvent)); + }); + + Object.entries(depositFinalizedResults).forEach(([l2Token, events]) => { + assign(this.l2DepositFinalizedEvents, [monitoredAddress, l1Token, l2Token], events.map(processEvent)); + }); }) ) ) @@ -104,7 +118,7 @@ export class OpStackAdapter extends BaseAdapter { this.baseL1SearchConfig.fromBlock = l1SearchConfig.toBlock + 1; this.baseL1SearchConfig.fromBlock = l2SearchConfig.toBlock + 1; - return this.computeOutstandingCrossChainTransfers(l1Tokens); + return this.computeOutstandingCrossChainTransfers(availableL1Tokens); } async sendTokenToTargetChain( @@ -141,7 +155,6 @@ export class OpStackAdapter extends BaseAdapter { simMode = false ): Promise { const { chainId } = this; - assert([10, 8453].includes(chainId), `chainId ${chainId} is not supported`); const ovmWeth = CONTRACT_ADDRESSES[this.chainId].weth; const ethBalance = await this.getSigner(chainId).getBalance(); @@ -163,9 +176,16 @@ export class OpStackAdapter extends BaseAdapter { } async checkTokenApprovals(address: string, l1Tokens: string[]): Promise { + const l1TokenListToApprove = []; // We need to approve the Atomic depositor to bridge WETH to optimism via the ETH route. - const associatedL1Bridges = l1Tokens.map((l1Token) => this.getBridge(l1Token).l1Gateway); - await this.checkAndSendTokenApprovals(address, l1Tokens, associatedL1Bridges); + const associatedL1Bridges = l1Tokens.flatMap((l1Token) => { + const bridges = this.getBridge(l1Token).l1Gateways; + // Push the l1 token to the list of tokens to approve N times, where N is the number of bridges. + // I.e. the arrays have to be parallel. + l1TokenListToApprove.push(...Array(bridges.length).fill(l1Token)); + return bridges; + }); + await this.checkAndSendTokenApprovals(address, l1TokenListToApprove, associatedL1Bridges); } getBridge(l1Token: string): OpStackBridge { diff --git a/src/clients/bridges/op-stack/OpStackBridgeInterface.ts b/src/clients/bridges/op-stack/OpStackBridgeInterface.ts index bd4b7bc38..407fe1e27 100644 --- a/src/clients/bridges/op-stack/OpStackBridgeInterface.ts +++ b/src/clients/bridges/op-stack/OpStackBridgeInterface.ts @@ -1,4 +1,12 @@ -import { Contract, BigNumber, Event, EventSearchConfig } from "../../../utils"; +import { + Contract, + BigNumber, + Event, + EventSearchConfig, + Signer, + Provider, + getTokenAddressWithCCTP, +} from "../../../utils"; export interface BridgeTransactionDetails { readonly contract: Contract; @@ -6,19 +14,38 @@ export interface BridgeTransactionDetails { readonly args: unknown[]; } -export interface OpStackBridge { - readonly l1Gateway: string; - constructL1ToL2Txn( +export type OpStackEvents = { [l2Token: string]: Event[] }; + +export abstract class OpStackBridge { + constructor( + protected l2chainId: number, + protected hubChainId: number, + protected l1Signer: Signer, + protected l2SignerOrProvider: Signer | Provider, + readonly l1Gateways: string[] + ) {} + + abstract constructL1ToL2Txn( toAddress: string, l1Token: string, l2Token: string, amount: BigNumber, l2Gas: number ): BridgeTransactionDetails; - queryL1BridgeInitiationEvents(l1Token: string, fromAddress: string, eventConfig: EventSearchConfig): Promise; - queryL2BridgeFinalizationEvents( + + abstract queryL1BridgeInitiationEvents( + l1Token: string, + fromAddress: string, + eventConfig: EventSearchConfig + ): Promise; + + abstract queryL2BridgeFinalizationEvents( l1Token: string, fromAddress: string, eventConfig: EventSearchConfig - ): Promise; + ): Promise; + + protected resolveL2TokenAddress(l1Token: string): string { + return getTokenAddressWithCCTP(l1Token, this.hubChainId, this.l2chainId, false); + } } diff --git a/src/clients/bridges/op-stack/UsdcCCTPBridge.ts b/src/clients/bridges/op-stack/UsdcCCTPBridge.ts new file mode 100644 index 000000000..2a445f2a1 --- /dev/null +++ b/src/clients/bridges/op-stack/UsdcCCTPBridge.ts @@ -0,0 +1,83 @@ +import { BigNumber, Contract, Signer } from "ethers"; +import { CONTRACT_ADDRESSES, chainIdsToCctpDomains } from "../../../common"; +import { BridgeTransactionDetails, OpStackBridge, OpStackEvents } from "./OpStackBridgeInterface"; +import { EventSearchConfig, Provider, TOKEN_SYMBOLS_MAP } from "../../../utils"; +import { cctpAddressToBytes32, retrieveOutstandingCCTPBridgeUSDCTransfers } from "../../../utils/CCTPUtils"; + +export class UsdcCCTPBridge extends OpStackBridge { + private readonly l1CctpTokenBridge: Contract; + private readonly l2CctpMessageTransmitter: Contract; + + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [ + CONTRACT_ADDRESSES[hubChainId].cctpTokenMessenger.address, + ]); + + const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId].cctpTokenMessenger; + this.l1CctpTokenBridge = new Contract(l1Address, l1Abi, l1Signer); + + const { address: l2Address, abi: l2Abi } = CONTRACT_ADDRESSES[l2chainId].cctpMessageTransmitter; + this.l2CctpMessageTransmitter = new Contract(l2Address, l2Abi, l2SignerOrProvider); + } + + private get l2DestinationDomain(): number { + return chainIdsToCctpDomains[this.l2chainId]; + } + + private get l1UsdcTokenAddress(): string { + return TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]; + } + + protected resolveL2TokenAddress(l1Token: string): string { + l1Token; + return TOKEN_SYMBOLS_MAP.USDC.addresses[this.l2chainId]; + } + + constructL1ToL2Txn( + toAddress: string, + _l1Token: string, + _l2Token: string, + amount: BigNumber, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + _l2Gas: number + ): BridgeTransactionDetails { + return { + contract: this.l1CctpTokenBridge, + method: "depositForBurn", + args: [amount, this.l2DestinationDomain, cctpAddressToBytes32(toAddress), this.l1UsdcTokenAddress], + }; + } + + async queryL1BridgeInitiationEvents( + l1Token: string, + fromAddress: string, + eventConfig: EventSearchConfig + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await retrieveOutstandingCCTPBridgeUSDCTransfers( + this.l1CctpTokenBridge, + this.l2CctpMessageTransmitter, + eventConfig, + this.l1UsdcTokenAddress, + this.hubChainId, + this.l2chainId, + fromAddress + ), + }; + } + queryL2BridgeFinalizationEvents( + l1Token: string, + fromAddress: string, + eventConfig: EventSearchConfig + ): Promise { + // Lint Appeasement + l1Token; + fromAddress; + eventConfig; + + // Per the documentation of the BaseAdapter's computeOutstandingCrossChainTransfers method, we can return an empty array here + // and only return the relevant outstanding events from queryL1BridgeInitiationEvents. + // Relevant link: https://github.com/across-protocol/relayer/blob/master/src/clients/bridges/BaseAdapter.ts#L189 + return Promise.resolve({}); + } +} diff --git a/src/clients/bridges/op-stack/UsdcTokenSplitterBridge.ts b/src/clients/bridges/op-stack/UsdcTokenSplitterBridge.ts new file mode 100644 index 000000000..31ad4cafa --- /dev/null +++ b/src/clients/bridges/op-stack/UsdcTokenSplitterBridge.ts @@ -0,0 +1,91 @@ +import { BigNumber, Signer } from "ethers"; +import { DefaultERC20Bridge } from "./DefaultErc20Bridge"; +import { UsdcCCTPBridge } from "./UsdcCCTPBridge"; +import { EventSearchConfig, Provider, TOKEN_SYMBOLS_MAP, assert, compareAddressesSimple } from "../../../utils"; +import { BridgeTransactionDetails, OpStackBridge, OpStackEvents } from "./OpStackBridgeInterface"; +import { CONTRACT_ADDRESSES } from "../../../common"; + +export class UsdcTokenSplitterBridge extends OpStackBridge { + private readonly cctpBridge: UsdcCCTPBridge; + private readonly canonicalBridge: DefaultERC20Bridge; + + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [ + CONTRACT_ADDRESSES[hubChainId].cctpTokenMessenger.address, + CONTRACT_ADDRESSES[hubChainId][`ovmStandardBridge_${l2chainId}`].address, + ]); + this.cctpBridge = new UsdcCCTPBridge(l2chainId, hubChainId, l1Signer, l2SignerOrProvider); + this.canonicalBridge = new DefaultERC20Bridge(l2chainId, hubChainId, l1Signer, l2SignerOrProvider); + } + + /** + * Get the correct bridge for the given L2 token address. + * @param l2Token The L2 token address to get the bridge for. + * @returns If the L2 token is native USDC, returns the CCTP bridge. Otherwise, returns the canonical bridge. + */ + private getL1Bridge(l2Token: string): OpStackBridge { + return compareAddressesSimple(l2Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.l2chainId]) + ? this.cctpBridge + : this.canonicalBridge; + } + + constructL1ToL2Txn( + toAddress: string, + l1Token: string, + l2Token: string, + amount: BigNumber, + l2Gas: number + ): BridgeTransactionDetails { + // We should *only* be calling this class for USDC tokens + assert(compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId])); + return this.getL1Bridge(l2Token).constructL1ToL2Txn(toAddress, l1Token, l2Token, amount, l2Gas); + } + + async queryL1BridgeInitiationEvents( + l1Token: string, + fromAddress: string, + eventConfig: EventSearchConfig + ): Promise { + // We should *only* be calling this class for USDC tokens + assert(compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId])); + const events = await Promise.all([ + this.cctpBridge.queryL1BridgeInitiationEvents(l1Token, fromAddress, eventConfig), + this.canonicalBridge.queryL1BridgeInitiationEvents(l1Token, fromAddress, eventConfig), + ]); + // Reduce the events to a single Object. If there are any duplicate keys, merge the events. + return events.reduce((acc, event) => { + Object.entries(event).forEach(([l2Token, events]) => { + if (l2Token in acc) { + acc[l2Token] = acc[l2Token].concat(events); + } else { + acc[l2Token] = events; + } + }); + return acc; + }, {}); + } + + async queryL2BridgeFinalizationEvents( + l1Token: string, + fromAddress: string, + eventConfig: EventSearchConfig + ): Promise { + // We should *only* be calling this class for USDC tokens + assert(compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId])); + const events = await Promise.all([ + this.cctpBridge.queryL2BridgeFinalizationEvents(l1Token, fromAddress, eventConfig), + this.canonicalBridge.queryL2BridgeFinalizationEvents(l1Token, fromAddress, eventConfig), + ]); + // Reduce the events to a single object. If there are any duplicate keys, merge the events. + return events.reduce((acc, event) => { + Object.entries(event).forEach(([l2Token, events]) => { + if (l2Token in acc) { + acc[l2Token] = acc[l2Token].concat(events); + } else { + acc[l2Token] = events; + } + }); + return acc; + }, {}); + } +} diff --git a/src/clients/bridges/op-stack/WethBridge.ts b/src/clients/bridges/op-stack/WethBridge.ts index b551731a1..0d91542db 100644 --- a/src/clients/bridges/op-stack/WethBridge.ts +++ b/src/clients/bridges/op-stack/WethBridge.ts @@ -1,22 +1,36 @@ import { Contract, BigNumber, - Event, EventSearchConfig, paginatedEventQuery, Signer, Provider, ZERO_ADDRESS, + Event, + TOKEN_SYMBOLS_MAP, } from "../../../utils"; import { CONTRACT_ADDRESSES } from "../../../common"; -import { BridgeTransactionDetails, OpStackBridge } from "./OpStackBridgeInterface"; +import { matchL2EthDepositAndWrapEvents } from "../utils"; +import { utils } from "@across-protocol/sdk"; +import { BridgeTransactionDetails, OpStackBridge, OpStackEvents } from "./OpStackBridgeInterface"; -export class WethBridge implements OpStackBridge { +export class WethBridge extends OpStackBridge { private readonly l1Bridge: Contract; private readonly l2Bridge: Contract; private readonly atomicDepositor: Contract; + private readonly l2Weth: Contract; + private readonly hubPoolAddress: string; + + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super( + l2chainId, + hubChainId, + l1Signer, + l2SignerOrProvider, + // To keep existing logic, we should use ataomic depositor as the l1 bridge + [CONTRACT_ADDRESSES[hubChainId].atomicDepositor.address] + ); - constructor(private l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId][`ovmStandardBridge_${l2chainId}`]; this.l1Bridge = new Contract(l1Address, l1Abi, l1Signer); @@ -25,10 +39,11 @@ export class WethBridge implements OpStackBridge { const { address: atomicDepositorAddress, abi: atomicDepositorAbi } = CONTRACT_ADDRESSES[hubChainId].atomicDepositor; this.atomicDepositor = new Contract(atomicDepositorAddress, atomicDepositorAbi, l1Signer); - } - get l1Gateway(): string { - return this.atomicDepositor.address; + const { address: l2WethAddress, abi: l2WethAbi } = CONTRACT_ADDRESSES[l2chainId].weth; + this.l2Weth = new Contract(l2WethAddress, l2WethAbi, l2SignerOrProvider); + + this.hubPoolAddress = CONTRACT_ADDRESSES[this.hubChainId]?.hubPool?.address; } constructL1ToL2Txn( @@ -45,23 +60,115 @@ export class WethBridge implements OpStackBridge { }; } - queryL1BridgeInitiationEvents( + private convertEventListToOpStackEvents(events: Event[]): OpStackEvents { + return { + [this.resolveL2TokenAddress(TOKEN_SYMBOLS_MAP.WETH.addresses[this.hubChainId])]: events, + }; + } + + async queryL1BridgeInitiationEvents( l1Token: string, fromAddress: string, - eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery(this.l1Bridge, this.l1Bridge.filters.ETHDepositInitiated(fromAddress), eventConfig); + eventConfig: EventSearchConfig, + l1Bridge = this.l1Bridge + ): Promise { + // We need to be smart about the filtering here because the ETHDepositInitiated event does not + // index on the `toAddress` which is the `fromAddress` that we pass in here and the address we want + // to actually filter on. So we make some simplifying assumptions: + // - For our tracking purposes, the ETHDepositInitiated `fromAddress` will be the + // AtomicDepositor if the fromAddress is an EOA. + const isContract = await this.isHubChainContract(fromAddress); + const isL2ChainContract = await this.isL2ChainContract(fromAddress); + + // Since we can only index on the `fromAddress` for the ETHDepositInitiated event, we can't support + // monitoring the spoke pool address + if (isL2ChainContract || (isContract && fromAddress !== this.hubPoolAddress)) { + return this.convertEventListToOpStackEvents([]); + } + + const events = await paginatedEventQuery( + l1Bridge, + l1Bridge.filters.ETHDepositInitiated(isContract ? fromAddress : this.atomicDepositor.address), + eventConfig + ); + // If EOA sent the ETH via the AtomicDepositor, then remove any events where the + // toAddress is not the EOA so we don't get confused with other users using the AtomicDepositor + if (!isContract) { + return this.convertEventListToOpStackEvents(events.filter((event) => event.args._to === fromAddress)); + } + return this.convertEventListToOpStackEvents(events); } - queryL2BridgeFinalizationEvents( + async queryL2BridgeFinalizationEvents( l1Token: string, fromAddress: string, - eventConfig: EventSearchConfig + eventConfig: EventSearchConfig, + l2Bridge = this.l2Bridge, + l2Weth = this.l2Weth + ): Promise { + // Check if the sender is a contract on the L1 network. + const isContract = await this.isHubChainContract(fromAddress); + + // See above for why we don't want to monitor the spoke pool contract. + const isL2ChainContract = await this.isL2ChainContract(fromAddress); + if (isL2ChainContract || (isContract && fromAddress !== this.hubPoolAddress)) { + return this.convertEventListToOpStackEvents([]); + } + + if (!isContract) { + // When bridging WETH to OP stack chains from an EOA, ETH is bridged via the AtomicDepositor contract + // and received as ETH on L2. The InventoryClient is built to abstract this subtlety and + // assumes that WETH is being rebalanced from L1 to L2. Therefore, L1 to L2 ETH transfers sent from an EOA + // should only be considered finalized if they are followed by an L2 Wrapped Ether "Deposit" event, + // signifying that the relayer has received WETH into their inventory. + const l2EthDepositEvents = ( + await paginatedEventQuery( + l2Bridge, + l2Bridge.filters.DepositFinalized(ZERO_ADDRESS, undefined, this.atomicDepositor.address), + eventConfig + ) + ) + // If EOA sent the ETH via the AtomicDepositor, then remove any events where the + // toAddress is not the EOA so we don't get confused with other users using the AtomicDepositor + .filter((event) => event.args._to === fromAddress); + + // We only care about WETH finalization events initiated by the relayer running this rebalancer logic, so only + // filter on Deposit events sent from the provided signer. We can't simply filter on `fromAddress` because + // this would require that the AtomicWethDepositor address wrapped the ETH into WETH, which is not the case for + // ETH transfers initiated by the AtomicWethDepositor. ETH is sent from the AtomicWethDepositor contract + // on L1 and received as ETH on L2 by the recipient, which is finally wrapped into WETH on the L2 by the + // recipient--the L2 signer in this class. + const l2EthWrapEvents = await this.queryL2WrapEthEvents(fromAddress, eventConfig, l2Weth); + return this.convertEventListToOpStackEvents(matchL2EthDepositAndWrapEvents(l2EthDepositEvents, l2EthWrapEvents)); + } else { + // Since we can only index on the `fromAddress` for the DepositFinalized event, we can't support + // monitoring the spoke pool address + if (fromAddress !== this.hubPoolAddress) { + return this.convertEventListToOpStackEvents([]); + } + + return this.convertEventListToOpStackEvents( + await paginatedEventQuery( + l2Bridge, + l2Bridge.filters.DepositFinalized(ZERO_ADDRESS, undefined, fromAddress), + eventConfig + ) + ); + } + } + + async isHubChainContract(address: string): Promise { + return utils.isContractDeployedToAddress(address, this.l1Bridge.provider); + } + async isL2ChainContract(address: string): Promise { + return utils.isContractDeployedToAddress(address, this.l2Bridge.provider); + } + + private queryL2WrapEthEvents( + fromAddress: string, + eventConfig: EventSearchConfig, + l2Weth = this.l2Weth ): Promise { - return paginatedEventQuery( - this.l2Bridge, - this.l2Bridge.filters.DepositFinalized(ZERO_ADDRESS, undefined, fromAddress), - eventConfig - ); + return paginatedEventQuery(l2Weth, l2Weth.filters.Deposit(fromAddress), eventConfig); } } diff --git a/src/clients/bridges/op-stack/base/BaseChainAdapter.ts b/src/clients/bridges/op-stack/base/BaseChainAdapter.ts index 88a0c3ac7..eee891d19 100644 --- a/src/clients/bridges/op-stack/base/BaseChainAdapter.ts +++ b/src/clients/bridges/op-stack/base/BaseChainAdapter.ts @@ -14,7 +14,7 @@ export class BaseChainAdapter extends OpStackAdapter { // Custom Bridges {}, logger, - ["BAL", "DAI", "ETH", "WETH", "USDC"], + ["BAL", "DAI", "ETH", "WETH", "USDC", "POOL"], spokePoolClients, monitoredAddresses ); diff --git a/src/clients/bridges/op-stack/index.ts b/src/clients/bridges/op-stack/index.ts index dcf278562..5c612deec 100644 --- a/src/clients/bridges/op-stack/index.ts +++ b/src/clients/bridges/op-stack/index.ts @@ -2,3 +2,4 @@ export * from "./OpStackAdapter"; export * from "./optimism"; export * from "./base"; +export * from "./mode"; diff --git a/src/clients/bridges/op-stack/mode/ModeAdapter.ts b/src/clients/bridges/op-stack/mode/ModeAdapter.ts new file mode 100644 index 000000000..4e29d9851 --- /dev/null +++ b/src/clients/bridges/op-stack/mode/ModeAdapter.ts @@ -0,0 +1,21 @@ +import { winston } from "../../../../utils"; +import { SpokePoolClient } from "../../.."; +import { OpStackAdapter } from "../OpStackAdapter"; + +export class ModeAdapter extends OpStackAdapter { + constructor( + logger: winston.Logger, + readonly spokePoolClients: { [chainId: number]: SpokePoolClient }, + monitoredAddresses: string[] + ) { + super( + 34443, + // Custom Bridges + {}, + logger, + ["ETH", "WETH", "USDC", "USDT", "WBTC"], + spokePoolClients, + monitoredAddresses + ); + } +} diff --git a/src/clients/bridges/op-stack/mode/index.ts b/src/clients/bridges/op-stack/mode/index.ts new file mode 100644 index 000000000..084565978 --- /dev/null +++ b/src/clients/bridges/op-stack/mode/index.ts @@ -0,0 +1 @@ +export * from "./ModeAdapter"; diff --git a/src/clients/bridges/op-stack/optimism/DaiOptimismBridge.ts b/src/clients/bridges/op-stack/optimism/DaiOptimismBridge.ts index ead7b6fdd..6d84a374d 100644 --- a/src/clients/bridges/op-stack/optimism/DaiOptimismBridge.ts +++ b/src/clients/bridges/op-stack/optimism/DaiOptimismBridge.ts @@ -1,20 +1,16 @@ -import { - Contract, - BigNumber, - paginatedEventQuery, - Event, - EventSearchConfig, - Signer, - Provider, -} from "../../../../utils"; +import { Contract, BigNumber, paginatedEventQuery, EventSearchConfig, Signer, Provider } from "../../../../utils"; import { CONTRACT_ADDRESSES } from "../../../../common"; -import { OpStackBridge, BridgeTransactionDetails } from "../OpStackBridgeInterface"; +import { OpStackBridge, BridgeTransactionDetails, OpStackEvents } from "../OpStackBridgeInterface"; -export class DaiOptimismBridge implements OpStackBridge { +export class DaiOptimismBridge extends OpStackBridge { private readonly l1Bridge: Contract; private readonly l2Bridge: Contract; - constructor(private l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [ + CONTRACT_ADDRESSES[hubChainId].daiOptimismBridge.address, + ]); + const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId].daiOptimismBridge; this.l1Bridge = new Contract(l1Address, l1Abi, l1Signer); @@ -22,10 +18,6 @@ export class DaiOptimismBridge implements OpStackBridge { this.l2Bridge = new Contract(l2Address, l2Abi, l2SignerOrProvider); } - get l1Gateway(): string { - return this.l1Bridge.address; - } - constructL1ToL2Txn( toAddress: string, l1Token: string, @@ -40,27 +32,31 @@ export class DaiOptimismBridge implements OpStackBridge { }; } - queryL1BridgeInitiationEvents( + async queryL1BridgeInitiationEvents( l1Token: string, fromAddress: string, eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - this.l1Bridge, - this.l1Bridge.filters.ERC20DepositInitiated(l1Token, undefined, fromAddress), - eventConfig - ); + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l1Bridge, + this.l1Bridge.filters.ERC20DepositInitiated(l1Token, undefined, fromAddress), + eventConfig + ), + }; } - queryL2BridgeFinalizationEvents( + async queryL2BridgeFinalizationEvents( l1Token: string, fromAddress: string, eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery( - this.l2Bridge, - this.l2Bridge.filters.DepositFinalized(l1Token, undefined, fromAddress), - eventConfig - ); + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l2Bridge, + this.l2Bridge.filters.DepositFinalized(l1Token, undefined, fromAddress), + eventConfig + ), + }; } } diff --git a/src/clients/bridges/op-stack/optimism/SnxOptimismBridge.ts b/src/clients/bridges/op-stack/optimism/SnxOptimismBridge.ts index 2452953b6..fdfc542c8 100644 --- a/src/clients/bridges/op-stack/optimism/SnxOptimismBridge.ts +++ b/src/clients/bridges/op-stack/optimism/SnxOptimismBridge.ts @@ -1,20 +1,16 @@ -import { - Contract, - BigNumber, - paginatedEventQuery, - Event, - EventSearchConfig, - Signer, - Provider, -} from "../../../../utils"; +import { Contract, BigNumber, paginatedEventQuery, EventSearchConfig, Signer, Provider } from "../../../../utils"; import { CONTRACT_ADDRESSES } from "../../../../common"; -import { OpStackBridge, BridgeTransactionDetails } from "../OpStackBridgeInterface"; +import { OpStackBridge, BridgeTransactionDetails, OpStackEvents } from "../OpStackBridgeInterface"; -export class SnxOptimismBridge implements OpStackBridge { +export class SnxOptimismBridge extends OpStackBridge { private readonly l1Bridge: Contract; private readonly l2Bridge: Contract; - constructor(private l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + constructor(l2chainId: number, hubChainId: number, l1Signer: Signer, l2SignerOrProvider: Signer | Provider) { + super(l2chainId, hubChainId, l1Signer, l2SignerOrProvider, [ + CONTRACT_ADDRESSES[hubChainId].snxOptimismBridge.address, + ]); + const { address: l1Address, abi: l1Abi } = CONTRACT_ADDRESSES[hubChainId].snxOptimismBridge; this.l1Bridge = new Contract(l1Address, l1Abi, l1Signer); @@ -22,10 +18,6 @@ export class SnxOptimismBridge implements OpStackBridge { this.l2Bridge = new Contract(l2Address, l2Abi, l2SignerOrProvider); } - get l1Gateway(): string { - return this.l1Bridge.address; - } - constructL1ToL2Txn( toAddress: string, l1Token: string, @@ -41,21 +33,33 @@ export class SnxOptimismBridge implements OpStackBridge { }; } - queryL1BridgeInitiationEvents(l1Token: string, toAddress: string, eventConfig: EventSearchConfig): Promise { + async queryL1BridgeInitiationEvents( + l1Token: string, + toAddress: string, + eventConfig: EventSearchConfig + ): Promise { // @dev For the SnxBridge, only the `toAddress` is indexed on the L2 event so we treat the `fromAddress` as the // toAddress when fetching the L1 event. - return paginatedEventQuery( - this.l1Bridge, - this.l1Bridge.filters.DepositInitiated(undefined, toAddress), - eventConfig - ); + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l1Bridge, + this.l1Bridge.filters.DepositInitiated(undefined, toAddress), + eventConfig + ), + }; } - queryL2BridgeFinalizationEvents( + async queryL2BridgeFinalizationEvents( l1Token: string, toAddress: string, eventConfig: EventSearchConfig - ): Promise { - return paginatedEventQuery(this.l2Bridge, this.l2Bridge.filters.DepositFinalized(toAddress), eventConfig); + ): Promise { + return { + [this.resolveL2TokenAddress(l1Token)]: await paginatedEventQuery( + this.l2Bridge, + this.l2Bridge.filters.DepositFinalized(toAddress), + eventConfig + ), + }; } } diff --git a/src/clients/bridges/utils.ts b/src/clients/bridges/utils.ts new file mode 100644 index 000000000..08be9161a --- /dev/null +++ b/src/clients/bridges/utils.ts @@ -0,0 +1,35 @@ +import { Event } from "ethers"; +/** + * @notice This function is designed to be used in L2 chain adapters when identifying "finalized" cross + * chain transfers. For certain L2 chains, sending WETH from L1 to L2 is impossible so the EOA is forced to + * first unwrap the WETH into ETH via the AtomicWethDepositor contract before receiving ETH to their L2 EOA. As + * a final step, the EOA must wrap the ETH back into WETH. This function is designed to be used to match the + * receipt of ETH on L2 with the wrapping of ETH into WETH on L2 to produce a single stream of "finalized" cross + * chain transfers. + * @dev Matching wrap and deposit finalized events is inexact because the wrapped amount usually differs + * slightly from the deposited amount due to fees and inventory management logic. This function therefore + * coarsely matches L2 deposit events with L2 wrap events by finding the first wrap event following the deposit + * event. This wrap event is then removed from a list and cannot be matched with any other deposit event. Since + * wrapping ETH is only expected to be done by this relayer, then this is a very accurate proxy for deciding + * when WETH cross chain transfers have finalized into the relayer's L2 WETH inventory. + * @dev This function is used in the WethBridge class in the OP stack and the ZkSyncAdapter. + * @param l2EthDepositEvents List of L2 DepositFinalized events emitted when the EOA receives ETH on L2. + * @param _l2WrapEvents List of L2 Wrap events emitted when the EOA wraps ETH into WETH on L2. + * @returns List of l2EthDepositEvents followed by a l2WrapEvent. None of the + * l2EthDepositEvents will match with the same l2WrapEvent. + */ +export function matchL2EthDepositAndWrapEvents(l2EthDepositEvents: Event[], _l2WrapEvents: Event[]): Event[] { + const l2WrapEvents = [..._l2WrapEvents]; // deep-copy because we're going to modify this in-place. + return l2EthDepositEvents.filter((l2EthDepositEvent: Event) => { + // Search from left to right to find the first following wrap event. + const followingWrapEventIndex = l2WrapEvents.findIndex( + (wrapEvent) => wrapEvent.blockNumber >= l2EthDepositEvent.blockNumber + ); + // Delete the wrap event from the l2 wrap events array to avoid duplicate processing. + if (followingWrapEventIndex >= 0) { + l2WrapEvents.splice(followingWrapEventIndex, 1); + return true; + } + return false; + }); +} diff --git a/src/clients/index.ts b/src/clients/index.ts index 782c72472..bcfefbb3e 100644 --- a/src/clients/index.ts +++ b/src/clients/index.ts @@ -1,4 +1,4 @@ -import { clients } from "@across-protocol/sdk-v2"; +import { clients } from "@across-protocol/sdk"; export type SpokePoolClient = clients.SpokePoolClient; export type SpokePoolUpdate = clients.SpokePoolUpdate; diff --git a/src/common/ClientHelper.ts b/src/common/ClientHelper.ts index 2f3f7ff3e..becf3c461 100644 --- a/src/common/ClientHelper.ts +++ b/src/common/ClientHelper.ts @@ -17,7 +17,7 @@ import { import { HubPoolClient, MultiCallerClient, ConfigStoreClient, SpokePoolClient } from "../clients"; import { CommonConfig } from "./Config"; import { SpokePoolClientsByChain } from "../interfaces"; -import { caching, clients, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { caching, clients, utils as sdkUtils } from "@across-protocol/sdk"; export interface Clients { hubPoolClient: HubPoolClient; @@ -40,6 +40,46 @@ async function getSpokePoolSigners( ); } +/** + * Resolve the spoke chain activation block for a SpokePool deployment. Prefer sourcing the + * block number from cache, but fall back to resolution via RPC queries and cache the result. + * @param chainId Chain ID for the SpokePool deployment. + * @param hubPoolClient HubPoolClient instance. + * @returns SpokePool activation block number on chainId. + */ +export async function resolveSpokePoolActivationBlock( + chainId: number, + hubPoolClient: HubPoolClient, + blockNumber?: number +): Promise { + const spokePoolAddr = hubPoolClient.getSpokePoolForBlock(chainId, blockNumber); + const key = `relayer_${chainId}_spokepool_${spokePoolAddr}_activation_block`; + + const redis = await getRedisCache(hubPoolClient.logger); + if (isDefined(redis)) { + const activationBlock = await redis.get(key); + const numericActivationBlock = Number(activationBlock); + if (Number.isInteger(numericActivationBlock) && numericActivationBlock > 0) { + return numericActivationBlock; + } + } + + // Get the timestamp of the block where the SpokePool was activated on mainnet, and resolve that + // to a block number on the SpokePool chain. Use this block as the lower bound for the search. + const blockFinder = undefined; + const mainnetActivationBlock = hubPoolClient.getSpokePoolActivationBlock(chainId, spokePoolAddr); + const { timestamp } = await hubPoolClient.hubPool.provider.getBlock(mainnetActivationBlock); + const hints = { lowBlock: getDeploymentBlockNumber("SpokePool", chainId) }; + const activationBlock = await getBlockForTimestamp(chainId, timestamp, blockFinder, redis, hints); + + const cacheAfter = 5 * 24 * 3600; // 5 days + if (isDefined(redis) && getCurrentTime() - timestamp > cacheAfter) { + await redis.set(key, activationBlock.toString()); + } + + return activationBlock; +} + /** * Construct spoke pool clients that query from [latest-lookback, latest]. Clients on chains that are disabled at * latest-lookback will be set to undefined. @@ -163,16 +203,9 @@ export async function constructSpokePoolClientsWithStartBlocks( const spokePoolSigners = await getSpokePoolSigners(baseSigner, enabledChains); const spokePools = await Promise.all( enabledChains.map(async (chainId) => { - // Grab latest spoke pool as of `toBlockOverride[1]`. If `toBlockOverride[1]` is undefined, then grabs current - // spoke pool. - const latestSpokePool = hubPoolClient.getSpokePoolForBlock(chainId, toBlockOverride[1]); - const spokePoolContract = new Contract(latestSpokePool, SpokePool.abi, spokePoolSigners[chainId]); - const spokePoolActivationBlock = hubPoolClient.getSpokePoolActivationBlock(chainId, latestSpokePool); - const time = (await hubPoolClient.hubPool.provider.getBlock(spokePoolActivationBlock)).timestamp; - - // Improve BlockFinder efficiency by clamping its search space lower bound to the SpokePool deployment block. - const hints = { lowBlock: getDeploymentBlockNumber("SpokePool", chainId) }; - const registrationBlock = await getBlockForTimestamp(chainId, time, blockFinder, redis, hints); + const spokePoolAddr = hubPoolClient.getSpokePoolForBlock(chainId, toBlockOverride[1]); + const spokePoolContract = SpokePool.connect(spokePoolAddr, spokePoolSigners[chainId]); + const registrationBlock = await resolveSpokePoolActivationBlock(chainId, hubPoolClient, toBlockOverride[1]); return { chainId, contract: spokePoolContract, registrationBlock }; }) ); diff --git a/src/common/Config.ts b/src/common/Config.ts index 02a679cf0..77c1ba2d4 100644 --- a/src/common/Config.ts +++ b/src/common/Config.ts @@ -23,7 +23,7 @@ export class CommonConfig { readonly arweaveGateway: ArweaveGatewayInterface; // State we'll load after we update the config store client and fetch all chains we want to support. - public multiCallChunkSize: { [chainId: number]: number }; + public multiCallChunkSize: { [chainId: number]: number } = {}; public toBlockOverride: Record = {}; constructor(env: ProcessEnv) { @@ -91,8 +91,6 @@ export class CommonConfig { * @param chainIdIndices All expected chain ID's that could be supported by this config. */ loadAndValidateConfigForChains(chainIdIndices: number[]): void { - const multiCallChunkSize: { [chainId: number]: number } = {}; - for (const chainId of chainIdIndices) { // Validate that there is a block range end block buffer for each chain. if (Object.keys(this.blockRangeEndBlockBuffer).length > 0) { @@ -116,9 +114,9 @@ export class CommonConfig { process.env[`MULTICALL_CHUNK_SIZE_CHAIN_${chainId}`] ?? DEFAULT_CHAIN_MULTICALL_CHUNK_SIZE[chainId] ?? DEFAULT_MULTICALL_CHUNK_SIZE - ); + ); assert(chunkSize > 0, `Chain ${chainId} multicall chunk size (${chunkSize}) must be greater than 0`); - multiCallChunkSize[chainId] = chunkSize; + this.multiCallChunkSize[chainId] = chunkSize; // Load any toBlock overrides. if (process.env[`TO_BLOCK_OVERRIDE_${chainId}`] !== undefined) { @@ -127,7 +125,5 @@ export class CommonConfig { this.toBlockOverride[chainId] = toBlock; } } - - this.multiCallChunkSize = multiCallChunkSize; } } diff --git a/src/common/Constants.ts b/src/common/Constants.ts index 0c0709c61..83e224b17 100644 --- a/src/common/Constants.ts +++ b/src/common/Constants.ts @@ -3,7 +3,7 @@ import { CHAIN_IDs, TOKEN_SYMBOLS_MAP, ethers } from "../utils"; // Maximum supported version of the configuration loaded into the Across ConfigStore. // It protects bots from running outdated code against newer version of the on-chain config store. // @dev Incorrectly setting this value may lead to incorrect behaviour and potential loss of funds. -export const CONFIG_STORE_VERSION = 3; +export const CONFIG_STORE_VERSION = 4; export const RELAYER_MIN_FEE_PCT = 0.0003; @@ -18,6 +18,7 @@ export const DATAWORKER_FAST_LOOKBACK: { [chainId: number]: number } = { 288: 11520, 324: 4 * 24 * 60 * 60, 8453: 172800, // Same as Optimism. + 34443: 172800, // Same as Optimism. 42161: 1382400, 59144: 115200, // 1 block every 3 seconds }; @@ -54,12 +55,14 @@ export const DEFAULT_MIN_DEPOSIT_CONFIRMATIONS = { 288: 0, 324: 120, 8453: 120, + 34443: 120, 42161: 0, 59144: 30, // Testnets: 5: 0, 280: 0, 420: 0, + 919: 0, 59140: 0, 80001: 0, 84531: 0, @@ -77,12 +80,14 @@ export const MIN_DEPOSIT_CONFIRMATIONS: { [threshold: number | string]: { [chain 288: 0, 324: 0, 8453: 60, + 34443: 60, 42161: 0, 59144: 1, // Testnets: 5: 0, 280: 0, 420: 0, + 919: 0, 59140: 0, 80001: 0, 84531: 0, @@ -95,12 +100,14 @@ export const MIN_DEPOSIT_CONFIRMATIONS: { [threshold: number | string]: { [chain 288: 0, 324: 0, 8453: 60, + 34443: 60, 42161: 0, 59144: 1, // Testnets: 5: 0, 280: 0, 420: 0, + 919: 0, 59140: 0, 80001: 0, 84531: 0, @@ -122,12 +129,14 @@ export const CHAIN_MAX_BLOCK_LOOKBACK = { 288: 4990, 324: 10000, 8453: 1500, + 34443: 1500, 42161: 10000, 59144: 5000, // Testnets: 5: 10000, 280: 10000, 420: 10000, + 919: 10000, 59140: 10000, 80001: 10000, 84531: 10000, @@ -149,12 +158,14 @@ export const BUNDLE_END_BLOCK_BUFFERS = { 288: 0, // **UPDATE** 288 is disabled so there should be no buffer. 324: 120, // ~1s/block. ZkSync is a centralized sequencer but is relatively unstable so this is kept higher than 0 8453: 60, // 2s/block. Same finality profile as Optimism + 34443: 60, // 2s/block. Same finality profile as Optimism 42161: 240, // ~0.25s/block. Arbitrum is a centralized sequencer 59144: 40, // At 3s/block, 2 mins = 40 blocks. // Testnets: 5: 0, 280: 0, 420: 0, + 919: 0, 59140: 0, 80001: 0, 84531: 0, @@ -173,6 +184,7 @@ export const DEFAULT_MULTICALL_CHUNK_SIZE = 100; export const DEFAULT_CHAIN_MULTICALL_CHUNK_SIZE: { [chainId: number]: number } = { 10: 75, 8453: 75, + 59144: 50, }; // List of proposal block numbers to ignore. This should be ignored because they are administrative bundle proposals @@ -194,6 +206,7 @@ export const CHAIN_CACHE_FOLLOW_DISTANCE: { [chainId: number]: number } = { 288: 0, 324: 512, 8453: 120, + 34443: 120, 42161: 32, 59144: 100, // Linea has a soft-finality of 1 block. This value is padded - but at 3s/block the padding is 5 minutes 534352: 0, @@ -201,6 +214,7 @@ export const CHAIN_CACHE_FOLLOW_DISTANCE: { [chainId: number]: number } = { 5: 0, 280: 0, 420: 0, + 919: 0, 59140: 0, 80001: 0, 84531: 0, @@ -222,6 +236,7 @@ export const DEFAULT_NO_TTL_DISTANCE: { [chainId: number]: number } = { 288: 86400, 324: 172800, 8453: 86400, + 34443: 86400, 59144: 57600, 42161: 691200, 534352: 57600, @@ -234,6 +249,7 @@ export const DEFAULT_GAS_FEE_SCALERS: { 1: { maxFeePerGasScaler: 3, maxPriorityFeePerGasScaler: 1.2 }, 10: { maxFeePerGasScaler: 2, maxPriorityFeePerGasScaler: 1 }, 8453: { maxFeePerGasScaler: 2, maxPriorityFeePerGasScaler: 1 }, + 34443: { maxFeePerGasScaler: 2, maxPriorityFeePerGasScaler: 1 }, }; // This is how many seconds stale the block number can be for us to use it for evaluating the reorg distance in the cache provider. @@ -251,6 +267,7 @@ export const multicall3Addresses = { 288: "0xcA11bde05977b3631167028862bE2a173976CA11", 324: "0xF9cda624FBC7e059355ce98a31693d299FACd963", 8453: "0xcA11bde05977b3631167028862bE2a173976CA11", + 34443: "0xcA11bde05977b3631167028862bE2a173976CA11", 42161: "0xcA11bde05977b3631167028862bE2a173976CA11", 59144: "0xcA11bde05977b3631167028862bE2a173976CA11", 534352: "0xcA11bde05977b3631167028862bE2a173976CA11", @@ -273,7 +290,7 @@ export type Multicall2Call = { // These are the spokes that can hold both ETH and WETH, so they should be added together when caclulating whether // a bundle execution is possible with the funds in the pool. -export const spokesThatHoldEthAndWeth = [10, 324, 8453, 59144]; +export const spokesThatHoldEthAndWeth = [10, 324, 8453, 34443, 59144]; /** * An official mapping of chain IDs to CCTP domains. This mapping is separate from chain identifiers @@ -317,5 +334,6 @@ export const RELAYER_DEFAULT_SPOKEPOOL_INDEXER = "./dist/src/libexec/RelayerSpok export const DEFAULT_ARWEAVE_GATEWAY = { url: "arweave.net", port: 443, protocol: "https" }; -// Chains with slow (> 2 day liveness) canonical L2-->L1 bridges. +// Chains with slow (> 2 day liveness) canonical L2-->L1 bridges that we prioritize taking repayment on. +// This does not include all 7-day withdrawal chains because we don't necessarily prefer being repaid on some of these 7-day chains, like Mode. export const SLOW_WITHDRAWAL_CHAINS = [CHAIN_IDs.BASE, CHAIN_IDs.ARBITRUM, CHAIN_IDs.OPTIMISM]; diff --git a/src/common/ContractAddresses.ts b/src/common/ContractAddresses.ts index a69bc7740..543322569 100644 --- a/src/common/ContractAddresses.ts +++ b/src/common/ContractAddresses.ts @@ -37,6 +37,25 @@ const CCTP_MESSAGE_TRANSMITTER_CONTRACT_ABI = [ stateMutability: "nonpayable", type: "function", }, + { + inputs: [ + { + internalType: "bytes32", + name: "", + type: "bytes32", + }, + ], + name: "usedNonces", + outputs: [ + { + internalType: "uint256", + name: "", + type: "uint256", + }, + ], + stateMutability: "view", + type: "function", + }, ]; export const CCTP_TOKEN_MESSENGER_CONTRACT_ABI = [ @@ -242,7 +261,7 @@ export const LINEA_TOKEN_BRIDGE_CONTRACT_ABI = [ type: "address", }, { - indexed: false, + indexed: true, internalType: "address", name: "recipient", type: "address", @@ -254,13 +273,13 @@ export const LINEA_TOKEN_BRIDGE_CONTRACT_ABI = [ type: "address", }, { - indexed: true, + indexed: false, internalType: "uint256", name: "amount", type: "uint256", }, ], - name: "BridgingInitiated", + name: "BridgingInitiatedV2", type: "event", }, { @@ -279,19 +298,19 @@ export const LINEA_TOKEN_BRIDGE_CONTRACT_ABI = [ type: "address", }, { - indexed: true, + indexed: false, internalType: "uint256", name: "amount", type: "uint256", }, { - indexed: false, + indexed: true, internalType: "address", name: "recipient", type: "address", }, ], - name: "BridgingFinalized", + name: "BridgingFinalizedV2", type: "event", }, { @@ -395,6 +414,121 @@ export const LINEA_USDC_BRIDGE_CONTRACT_ABI = [ }, ]; +const OP_WETH_ABI = [ + { + constant: false, + inputs: [{ name: "wad", type: "uint256" }], + name: "withdraw", + outputs: [], + payable: false, + stateMutability: "nonpayable", + type: "function", + }, + { + constant: false, + inputs: [], + name: "deposit", + outputs: [], + payable: true, + stateMutability: "payable", + type: "function", + }, + { + constant: true, + inputs: [{ name: "", type: "address" }], + name: "balanceOf", + outputs: [{ name: "", type: "uint256" }], + payable: false, + stateMutability: "view", + type: "function", + }, + { + anonymous: false, + inputs: [ + { + indexed: true, + internalType: "address", + name: "dst", + type: "address", + }, + { + indexed: false, + internalType: "uint256", + name: "wad", + type: "uint256", + }, + ], + name: "Deposit", + type: "event", + }, +]; + +const OP_STANDARD_BRIDGE_ABI = [ + { + anonymous: false, + inputs: [ + { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, + { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, + { indexed: true, internalType: "address", name: "_from", type: "address" }, + { indexed: false, internalType: "address", name: "_to", type: "address" }, + { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, + { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, + ], + name: "DepositFinalized", + type: "event", + }, +]; + +const OVM_STANDARD_BRIDGE_ABI = [ + { + anonymous: false, + inputs: [ + { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, + { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, + { indexed: true, internalType: "address", name: "_from", type: "address" }, + { indexed: false, internalType: "address", name: "_to", type: "address" }, + { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, + { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, + ], + name: "ERC20DepositInitiated", + type: "event", + }, + { + anonymous: false, + inputs: [ + { indexed: true, internalType: "address", name: "_from", type: "address" }, + { indexed: true, internalType: "address", name: "_to", type: "address" }, + { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, + { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, + ], + name: "ETHDepositInitiated", + type: "event", + }, + { + inputs: [ + { internalType: "uint32", name: "_l2Gas", type: "uint32" }, + { internalType: "bytes", name: "_data", type: "bytes" }, + ], + name: "depositETH", + outputs: [], + stateMutability: "payable", + type: "function", + }, + { + inputs: [ + { internalType: "address", name: "_l1Token", type: "address" }, + { internalType: "address", name: "_l2Token", type: "address" }, + { internalType: "uint256", name: "_amount", type: "uint256" }, + { internalType: "uint32", name: "_l2Gas", type: "uint32" }, + { internalType: "bytes", name: "_data", type: "bytes" }, + ], + name: "depositERC20", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, +]; + // Constants file exporting hardcoded contract addresses per chain. export const CONTRACT_ADDRESSES: { [chainId: number]: { @@ -583,7 +717,7 @@ export const CONTRACT_ADDRESSES: { // OVM, ZkSync, Linea, and Polygon cant deposit WETH directly so we use an atomic depositor contract that unwraps WETH and // bridges ETH other the canonical bridge. atomicDepositor: { - address: "0x6e41f79772c3CF7D6d15d17d899e129d5aAAA740", + address: "0x24d8b91aB9c461d7c0D6fB9F5a294CEA61D11710", abi: [ { stateMutability: "payable", type: "fallback" }, { @@ -658,109 +792,17 @@ export const CONTRACT_ADDRESSES: { // Stack chain to the name to differentiate. This one is for Optimism. ovmStandardBridge_10: { address: "0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1", - abi: [ - { - anonymous: false, - inputs: [ - { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, - { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: false, internalType: "address", name: "_to", type: "address" }, - { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "ERC20DepositInitiated", - type: "event", - }, - { - anonymous: false, - inputs: [ - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: true, internalType: "address", name: "_to", type: "address" }, - { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "ETHDepositInitiated", - type: "event", - }, - { - inputs: [ - { internalType: "uint32", name: "_l2Gas", type: "uint32" }, - { internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "depositETH", - outputs: [], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_l1Token", type: "address" }, - { internalType: "address", name: "_l2Token", type: "address" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - { internalType: "uint32", name: "_l2Gas", type: "uint32" }, - { internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "depositERC20", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - ], + abi: OVM_STANDARD_BRIDGE_ABI, }, // Since there are multiple ovmStandardBridges on mainnet for different OP Stack chains, we append the chain id of the Op // Stack chain to the name to differentiate. This one is for Base. ovmStandardBridge_8453: { address: "0x3154Cf16ccdb4C6d922629664174b904d80F2C35", - abi: [ - { - anonymous: false, - inputs: [ - { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, - { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: false, internalType: "address", name: "_to", type: "address" }, - { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "ERC20DepositInitiated", - type: "event", - }, - { - anonymous: false, - inputs: [ - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: true, internalType: "address", name: "_to", type: "address" }, - { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "ETHDepositInitiated", - type: "event", - }, - { - inputs: [ - { internalType: "uint32", name: "_l2Gas", type: "uint32" }, - { internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "depositETH", - outputs: [], - stateMutability: "payable", - type: "function", - }, - { - inputs: [ - { internalType: "address", name: "_l1Token", type: "address" }, - { internalType: "address", name: "_l2Token", type: "address" }, - { internalType: "uint256", name: "_amount", type: "uint256" }, - { internalType: "uint32", name: "_l2Gas", type: "uint32" }, - { internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "depositERC20", - outputs: [], - stateMutability: "nonpayable", - type: "function", - }, - ], + abi: OVM_STANDARD_BRIDGE_ABI, + }, + ovmStandardBridge_34443: { + address: "0x735aDBbE72226BD52e818E7181953f42E3b0FF21", + abi: OVM_STANDARD_BRIDGE_ABI, }, polygonRootChainManager: { address: "0xA0c68C638235ee32657e8f720a23ceC1bFc77C77", @@ -964,6 +1006,22 @@ export const CONTRACT_ADDRESSES: { }, ], }, + hubPool: { + address: "0xc186fA914353c44b2E33eBE05f21846F1048bEda", + abi: [ + { + anonymous: false, + inputs: [ + { indexed: false, internalType: "address", name: "l1Token", type: "address" }, + { indexed: false, internalType: "address", name: "l2Token", type: "address" }, + { indexed: false, internalType: "uint256", name: "amount", type: "uint256" }, + { indexed: false, internalType: "address", name: "to", type: "address" }, + ], + name: "TokensRelayed", + type: "event", + }, + ], + }, }, 10: { daiOptimismBridge: { @@ -1000,53 +1058,11 @@ export const CONTRACT_ADDRESSES: { }, ovmStandardBridge: { address: "0x4200000000000000000000000000000000000010", - abi: [ - { - anonymous: false, - inputs: [ - { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, - { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: false, internalType: "address", name: "_to", type: "address" }, - { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, - ], - name: "DepositFinalized", - type: "event", - }, - ], + abi: OP_STANDARD_BRIDGE_ABI, }, weth: { address: "0x4200000000000000000000000000000000000006", - abi: [ - { - constant: false, - inputs: [{ name: "wad", type: "uint256" }], - name: "withdraw", - outputs: [], - payable: false, - stateMutability: "nonpayable", - type: "function", - }, - { - constant: false, - inputs: [], - name: "deposit", - outputs: [], - payable: true, - stateMutability: "payable", - type: "function", - }, - { - constant: true, - inputs: [{ name: "", type: "address" }], - name: "balanceOf", - outputs: [{ name: "", type: "uint256" }], - payable: false, - stateMutability: "view", - type: "function", - }, - ], + abi: OP_WETH_ABI, }, eth: { address: "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", @@ -1130,58 +1146,28 @@ export const CONTRACT_ADDRESSES: { }, weth: { address: "0x5AEa5775959fBC2557Cc8789bC1bf90A239D9a91", - }, - }, - 8453: { - ovmStandardBridge: { - address: "0x4200000000000000000000000000000000000010", abi: [ { anonymous: false, inputs: [ - { indexed: true, internalType: "address", name: "_l1Token", type: "address" }, - { indexed: true, internalType: "address", name: "_l2Token", type: "address" }, - { indexed: true, internalType: "address", name: "_from", type: "address" }, - { indexed: false, internalType: "address", name: "_to", type: "address" }, + { indexed: true, internalType: "address", name: "from", type: "address" }, + { indexed: true, internalType: "address", name: "_to", type: "address" }, { indexed: false, internalType: "uint256", name: "_amount", type: "uint256" }, - { indexed: false, internalType: "bytes", name: "_data", type: "bytes" }, ], - name: "DepositFinalized", + name: "Transfer", type: "event", }, ], }, + }, + 8453: { + ovmStandardBridge: { + address: "0x4200000000000000000000000000000000000010", + abi: OP_STANDARD_BRIDGE_ABI, + }, weth: { address: "0x4200000000000000000000000000000000000006", - abi: [ - { - constant: false, - inputs: [{ name: "wad", type: "uint256" }], - name: "withdraw", - outputs: [], - payable: false, - stateMutability: "nonpayable", - type: "function", - }, - { - constant: false, - inputs: [], - name: "deposit", - outputs: [], - payable: true, - stateMutability: "payable", - type: "function", - }, - { - constant: true, - inputs: [{ name: "", type: "address" }], - name: "balanceOf", - outputs: [{ name: "", type: "uint256" }], - payable: false, - stateMutability: "view", - type: "function", - }, - ], + abi: OP_WETH_ABI, }, eth: { address: "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", @@ -1195,6 +1181,19 @@ export const CONTRACT_ADDRESSES: { abi: CCTP_TOKEN_MESSENGER_CONTRACT_ABI, }, }, + 34443: { + ovmStandardBridge: { + address: "0x4200000000000000000000000000000000000010", + abi: OP_STANDARD_BRIDGE_ABI, + }, + weth: { + address: "0x4200000000000000000000000000000000000006", + abi: OP_WETH_ABI, + }, + eth: { + address: "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", + }, + }, 42161: { erc20Gateway: { abi: [ diff --git a/src/dataworker/Dataworker.ts b/src/dataworker/Dataworker.ts index b4c81e512..4bae77879 100644 --- a/src/dataworker/Dataworker.ts +++ b/src/dataworker/Dataworker.ts @@ -1,6 +1,6 @@ import assert from "assert"; import { Contract, utils as ethersUtils } from "ethers"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { bnZero, winston, @@ -42,7 +42,7 @@ import { } from "./DataworkerUtils"; import _ from "lodash"; import { CONTRACT_ADDRESSES, spokePoolClientsToProviders } from "../common"; -import * as sdk from "@across-protocol/sdk-v2"; +import * as sdk from "@across-protocol/sdk"; import { BundleDepositsV3, BundleExcessSlowFills, @@ -383,6 +383,7 @@ export class Dataworker { blockRangesForProposal, spokePoolClients, latestBlockSearched, + false, // Don't load data from arweave when proposing. logData ); @@ -470,11 +471,12 @@ export class Dataworker { blockRangesForProposal: number[][], spokePoolClients: SpokePoolClientsByChain, latestMainnetBundleEndBlock: number, + loadDataFromArweave = false, logData = false ): Promise { const timerStart = Date.now(); const { bundleDepositsV3, bundleFillsV3, bundleSlowFillsV3, unexecutableSlowFills, expiredDepositsToRefundV3 } = - await this.clients.bundleDataClient.loadData(blockRangesForProposal, spokePoolClients); + await this.clients.bundleDataClient.loadData(blockRangesForProposal, spokePoolClients, loadDataFromArweave); // Prepare information about what we need to store to // Arweave for the bundle. We will be doing this at a // later point so that we can confirm that this data is @@ -627,7 +629,8 @@ export class Dataworker { widestPossibleExpectedBlockRange: number[][], rootBundle: PendingRootBundle, spokePoolClients: { [chainId: number]: SpokePoolClient }, - earliestBlocksInSpokePoolClients: { [chainId: number]: number } + earliestBlocksInSpokePoolClients: { [chainId: number]: number }, + loadDataFromArweave = false ): Promise< // If valid is false, we get a reason and we might get expected trees. | { @@ -834,7 +837,7 @@ export class Dataworker { // @dev only throw this error if the hub chain ID is 1, suggesting we're running on production. if (versionAtProposalBlock <= sdk.constants.TRANSFER_THRESHOLD_MAX_CONFIG_STORE_VERSION && hubPoolChainId === 1) { throw new Error( - "Must use relayer-v2 code at commit 412ddc30af72c2ac78f9e4c8dccfccfd0eb478ab to validate a bundle with transferThreshold set" + "Must use relayer code at commit 412ddc30af72c2ac78f9e4c8dccfccfd0eb478ab to validate a bundle with transferThreshold set" ); } @@ -843,6 +846,7 @@ export class Dataworker { blockRangesImpliedByBundleEndBlocks, spokePoolClients, rootBundle.proposalBlockNumber, + loadDataFromArweave, logData ); @@ -1039,7 +1043,8 @@ export class Dataworker { const rootBundleData = await this._proposeRootBundle( blockNumberRanges, spokePoolClients, - matchingRootBundle.blockNumber + matchingRootBundle.blockNumber, + true // Load data from arweave when executing for speed. ); const { slowFillLeaves: leaves, slowFillTree: tree } = rootBundleData; @@ -1057,7 +1062,11 @@ export class Dataworker { continue; } - const leavesForChain = leaves.filter((leaf) => leaf.chainId === chainId); + // Filter out slow fill leaves for other chains and also expired deposits. + const currentTime = client.getCurrentTime(); + const leavesForChain = leaves.filter( + (leaf) => leaf.chainId === chainId && leaf.relayData.fillDeadline >= currentTime + ); const unexecutedLeaves = leavesForChain.filter((leaf) => { const executedLeaf = slowFillsForChain.find( (event) => @@ -1327,7 +1336,8 @@ export class Dataworker { widestPossibleExpectedBlockRange, pendingRootBundle, spokePoolClients, - earliestBlocksInSpokePoolClients + earliestBlocksInSpokePoolClients, + true // Load data from arweave when executing leaves for speed. ); if (!valid) { @@ -1372,6 +1382,17 @@ export class Dataworker { return leafCount; } + // At this point, check again that there are still unexecuted pool rebalance leaves. This is done because the above + // logic, to reconstruct this pool rebalance root and the prerequisite spoke pool client updates, can take a while. + const pendingProposal: PendingRootBundle = await this.clients.hubPoolClient.hubPool.rootBundleProposal(); + if (pendingProposal.unclaimedPoolRebalanceLeafCount === 0) { + this.logger.debug({ + at: "Dataworker#executePoolRebalanceLeaves", + message: "Exiting early due to dataworker function collision", + }); + return leafCount; + } + const executedLeaves = this.clients.hubPoolClient.getExecutedLeavesForRootBundle( this.clients.hubPoolClient.getLatestProposedRootBundle(), this.clients.hubPoolClient.latestBlockSearched @@ -1530,7 +1551,6 @@ export class Dataworker { root: tree.getHexRoot(), leafId: leaf.leafId, rebalanceChain: leaf.chainId, - chainId: hubPoolChainId, token: leaf.l1Tokens, netSendAmounts: leaf.netSendAmounts, }); @@ -1958,7 +1978,8 @@ export class Dataworker { const { relayerRefundLeaves: leaves, relayerRefundTree: tree } = await this._proposeRootBundle( blockNumberRanges, spokePoolClients, - matchingRootBundle.blockNumber + matchingRootBundle.blockNumber, + true // load data from Arweave for speed purposes ); if (tree.getHexRoot() !== rootBundleRelay.relayerRefundRoot) { @@ -2217,6 +2238,7 @@ export class Dataworker { // FIXME: Temporary fix to disable root cache rebalancing and to keep the // executor running for tonight (2023-08-28) until we can fix the // root cache rebalancing bug. + // eslint-disable-next-line @typescript-eslint/no-misused-promises if (!this.rootCache[key] || process.env.DATAWORKER_DISABLE_REBALANCE_ROOT_CACHE === "true") { this.rootCache[key] = _buildPoolRebalanceRoot( latestMainnetBlock, diff --git a/src/dataworker/DataworkerClientHelper.ts b/src/dataworker/DataworkerClientHelper.ts index cd80818af..888f72b11 100644 --- a/src/dataworker/DataworkerClientHelper.ts +++ b/src/dataworker/DataworkerClientHelper.ts @@ -13,7 +13,7 @@ import { BundleDataClient, HubPoolClient, TokenClient } from "../clients"; import { getBlockForChain } from "./DataworkerUtils"; import { Dataworker } from "./Dataworker"; import { ProposedRootBundle, SpokePoolClientsByChain } from "../interfaces"; -import { caching } from "@across-protocol/sdk-v2"; +import { caching } from "@across-protocol/sdk"; export interface DataworkerClients extends Clients { tokenClient: TokenClient; diff --git a/src/dataworker/DataworkerConfig.ts b/src/dataworker/DataworkerConfig.ts index eb21d1e81..bec59abd3 100644 --- a/src/dataworker/DataworkerConfig.ts +++ b/src/dataworker/DataworkerConfig.ts @@ -10,7 +10,8 @@ export class DataworkerConfig extends CommonConfig { // These variables can be toggled to choose whether the bot will go through the dataworker logic. readonly disputerEnabled: boolean; readonly proposerEnabled: boolean; - readonly executorEnabled: boolean; + readonly l2ExecutorEnabled: boolean; + readonly l1ExecutorEnabled: boolean; readonly finalizerEnabled: boolean; // This variable can be toggled to bypass the proposer logic and always attempt to propose @@ -49,7 +50,8 @@ export class DataworkerConfig extends CommonConfig { MAX_RELAYER_REPAYMENT_LEAF_SIZE_OVERRIDE, DISPUTER_ENABLED, PROPOSER_ENABLED, - EXECUTOR_ENABLED, + L2_EXECUTOR_ENABLED, + L1_EXECUTOR_ENABLED, SPOKE_ROOTS_LOOKBACK_COUNT, SEND_DISPUTES, SEND_PROPOSALS, @@ -85,9 +87,10 @@ export class DataworkerConfig extends CommonConfig { : toBNWei("500000"); this.disputerEnabled = DISPUTER_ENABLED === "true"; this.proposerEnabled = PROPOSER_ENABLED === "true"; - this.executorEnabled = EXECUTOR_ENABLED === "true"; - if (this.executorEnabled) { - assert(this.spokeRootsLookbackCount > 0, "must set spokeRootsLookbackCount > 0 if executor enabled"); + this.l2ExecutorEnabled = L2_EXECUTOR_ENABLED === "true"; + this.l1ExecutorEnabled = L1_EXECUTOR_ENABLED === "true"; + if (this.l2ExecutorEnabled) { + assert(this.spokeRootsLookbackCount > 0, "must set spokeRootsLookbackCount > 0 if L2 executor enabled"); } else if (this.disputerEnabled || this.proposerEnabled) { // should set spokeRootsLookbackCount == 0 if executor disabled and proposer/disputer enabled this.spokeRootsLookbackCount = 0; diff --git a/src/dataworker/DataworkerUtils.ts b/src/dataworker/DataworkerUtils.ts index ecd47dda3..f7fb9f5c6 100644 --- a/src/dataworker/DataworkerUtils.ts +++ b/src/dataworker/DataworkerUtils.ts @@ -1,5 +1,5 @@ import assert from "assert"; -import { utils, interfaces, caching } from "@across-protocol/sdk-v2"; +import { utils, interfaces, caching } from "@across-protocol/sdk"; import { SpokePoolClient } from "../clients"; import { spokesThatHoldEthAndWeth } from "../common/Constants"; import { CONTRACT_ADDRESSES } from "../common/ContractAddresses"; @@ -69,7 +69,7 @@ export function getEndBlockBuffers( return chainIdListForBundleEvaluationBlockNumbers.map((chainId: number) => blockRangeEndBlockBuffer[chainId] ?? 0); } -// TODO: Move to SDK-v2 since this implements UMIP logic about validating block ranges. +// TODO: Move to SDK since this implements UMIP logic about validating block ranges. // Return true if we won't be able to construct a root bundle for the bundle block ranges ("blockRanges") because // the bundle wants to look up data for events that weren't in the spoke pool client's search range. export async function blockRangesAreInvalidForSpokeClients( @@ -246,14 +246,16 @@ export function getRefundsFromBundle( if (refunds === undefined) { return; } + // @dev use shallow copy so that modifying combinedRefunds doesn't modify the original refunds object. + const refundsShallowCopy = { ...refunds }; if (combinedRefunds[repaymentChainId][l2TokenAddress] === undefined) { - combinedRefunds[repaymentChainId][l2TokenAddress] = refunds; + combinedRefunds[repaymentChainId][l2TokenAddress] = refundsShallowCopy; } else { // Each refunds object should have a unique refund address so we can add new ones to the // existing dictionary. combinedRefunds[repaymentChainId][l2TokenAddress] = { ...combinedRefunds[repaymentChainId][l2TokenAddress], - ...refunds, + ...refundsShallowCopy, }; } }); @@ -343,7 +345,11 @@ export function _buildRelayerRefundRoot( return; } - const l2TokenCounterpart = clients.hubPoolClient.getL2TokenForL1TokenAtBlock(leaf.l1Tokens[index], leaf.chainId); + const l2TokenCounterpart = clients.hubPoolClient.getL2TokenForL1TokenAtBlock( + leaf.l1Tokens[index], + leaf.chainId, + endBlockForMainnet + ); // If we've already seen this leaf, then skip. const existingLeaf = relayerRefundLeaves.find( (relayerRefundLeaf) => @@ -420,7 +426,7 @@ export async function _buildPoolRebalanceRoot( const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( l2TokenAddress, repaymentChainId, - latestMainnetBlock + mainnetBundleEndBlock ); updateRunningBalance(runningBalances, repaymentChainId, l1TokenCounterpart, totalRefundAmount); @@ -443,7 +449,7 @@ export async function _buildPoolRebalanceRoot( const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( outputToken, destinationChainId, - latestMainnetBlock + mainnetBundleEndBlock ); const lpFee = deposit.lpFeePct.mul(deposit.inputAmount).div(fixedPointAdjustment); updateRunningBalance(runningBalances, destinationChainId, l1TokenCounterpart, deposit.inputAmount.sub(lpFee)); @@ -467,7 +473,7 @@ export async function _buildPoolRebalanceRoot( const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( outputToken, destinationChainId, - latestMainnetBlock + mainnetBundleEndBlock ); const lpFee = deposit.lpFeePct.mul(deposit.inputAmount).div(fixedPointAdjustment); updateRunningBalance(runningBalances, destinationChainId, l1TokenCounterpart, lpFee.sub(deposit.inputAmount)); @@ -503,7 +509,7 @@ export async function _buildPoolRebalanceRoot( const l1TokenCounterpart = clients.hubPoolClient.getL1TokenForL2TokenAtBlock( inputToken, originChainId, - latestMainnetBlock + mainnetBundleEndBlock ); updateRunningBalance(runningBalances, originChainId, l1TokenCounterpart, deposit.inputAmount); }); diff --git a/src/dataworker/PoolRebalanceUtils.ts b/src/dataworker/PoolRebalanceUtils.ts index 1236b4847..5de310951 100644 --- a/src/dataworker/PoolRebalanceUtils.ts +++ b/src/dataworker/PoolRebalanceUtils.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { ConfigStoreClient, HubPoolClient, SpokePoolClient } from "../clients"; import { Clients } from "../common"; import * as interfaces from "../interfaces"; @@ -231,7 +231,7 @@ export function getRunningBalanceForL1Token( // This returns a possible next block range that could be submitted as a new root bundle, or used as a reference // when evaluating pending root bundle. The block end numbers must be less than the latest blocks for each chain ID -// (because we can't evaluate events in the future), and greater than the the expected start blocks, which are the +// (because we can't evaluate events in the future), and greater than the expected start blocks, which are the // greater of 0 and the latest bundle end block for an executed root bundle proposal + 1. export function getWidestPossibleExpectedBlockRange( chainIdListForBundleEvaluationBlockNumbers: number[], diff --git a/src/dataworker/README.md b/src/dataworker/README.md index 89f07f0c1..d88d590f2 100644 --- a/src/dataworker/README.md +++ b/src/dataworker/README.md @@ -74,7 +74,7 @@ These instructions can very easily be modified to conduct an "attack" on Across: On the other hand, the longer the challenge period, the slower that Across can respond to capital requirements. Across essentially can only move capital around as often as the challenge period, so every two hours currently. -Every two hours, the Dataworker will propose capital reallocation instructions to Across based on the previous two hours' worth of token flows. This is where the concept of "bundle block ranges" comes into play. Each proposed root bundle includes something called the ["bundle evaluation block numbers"](https://github.com/across-protocol/contracts-v2/blob/master/contracts/HubPool.sol#L149). These are included by the Dataworker [at proposal time](https://github.com/across-protocol/contracts-v2/blob/master/contracts/HubPool.sol#L567) and are used by everyone else to validate their proposed Merkle roots. +Every two hours, the Dataworker will propose capital reallocation instructions to Across based on the previous two hours' worth of token flows. This is where the concept of "bundle block ranges" comes into play. Each proposed root bundle includes something called the ["bundle evaluation block numbers"](https://github.com/across-protocol/contracts/blob/master/contracts/HubPool.sol#L149). These are included by the Dataworker [at proposal time](https://github.com/across-protocol/contracts/blob/master/contracts/HubPool.sol#L567) and are used by everyone else to validate their proposed Merkle roots. These end blocks inform all actors which block ranges, per chain, they included Bridge deposit and fill information for to construct their Merkle roots containing instructions for how Across should move capital around. @@ -89,7 +89,7 @@ The implied block range for a chain is simply: ### `bundleEvaluationBlockNumbers` is an array of numbers, how do you know which chain each block references? -This is why Across must define a canonical "chain ID index" as shown [here](https://github.com/across-protocol/relayer-v2/blob/master/src/common/Constants.ts#L9) which contains an append-only list of chain ID's. This index list is used by an example Dataworker implementation and matches identically the same canonical list defined in the [Across UMIP](https://github.com/UMAprotocol/UMIPs/blob/master/UMIPs/umip-157.md#global-constants). +This is why Across must define a canonical "chain ID index" as shown [here](https://github.com/across-protocol/relayer/blob/master/src/common/Constants.ts#L9) which contains an append-only list of chain ID's. This index list is used by an example Dataworker implementation and matches identically the same canonical list defined in the [Across UMIP](https://github.com/UMAprotocol/UMIPs/blob/master/UMIPs/umip-157.md#global-constants). When evaluating a `bundleEvaluationBlockNumbers`, the index of the block number must be matched with the index in the "chain ID index" list to figure out which chain the block number refers to. For example, if the `bundleEvaluationBlockNumbers=[1,2,3,4,5]` and the chain ID index list is `[1,10,137,288,42161]`, then the end block of `2` must be used as the end block in a block range for chain ID `10` (i.e. Optimism). @@ -97,7 +97,7 @@ One assumption in Across, is that each chain that Across supports must have an e ## Determining bundle start blocks when evaluating a pending root bundle proposal -`B` is trivially known since it is emitted in the [`ProposedRootBundle`](https://github.com/across-protocol/contracts-v2/blob/master/contracts/HubPool.sol#L152) event during the creation of each new pending bundle proposal. We therefore need to find `A`, the bundle start block `<= B` to evaluating the root bundle. +`B` is trivially known since it is emitted in the [`ProposedRootBundle`](https://github.com/across-protocol/contracts/blob/master/contracts/HubPool.sol#L152) event during the creation of each new pending bundle proposal. We therefore need to find `A`, the bundle start block `<= B` to evaluating the root bundle. ```mermaid flowchart LR @@ -123,7 +123,7 @@ flowchart TD ### Validating fills -A fill must match a deposit on every shared parameter that they have in common. The matched deposit does not have to be in the same bundle as the fill. A fill contains the following [event parameter](https://github.com/across-protocol/contracts-v2/blob/master/contracts/SpokePool.sol#L139)'s: +A fill must match a deposit on every shared parameter that they have in common. The matched deposit does not have to be in the same bundle as the fill. A fill contains the following [event parameter](https://github.com/across-protocol/contracts/blob/master/contracts/SpokePool.sol#L139)'s: ```solidity event FilledRelay( @@ -145,7 +145,7 @@ event FilledRelay( ); ``` -A [deposit](https://github.com/across-protocol/contracts-v2/blob/master/contracts/SpokePool.sol#L119) contains: +A [deposit](https://github.com/across-protocol/contracts/blob/master/contracts/SpokePool.sol#L119) contains: ```solidity event FundsDeposited( @@ -198,13 +198,13 @@ Excesses from slow fills are only created when a partial fill completes a deposi At this point we have a running balance for the token for each chain. We also know all of the refund and slow fill payments that we need to instruct each SpokePool to reserve payments for. We can finally figure out how many LP funds to send out of the HubPool to each SpokePool. -This is where we'll incorporate the section on [SpokePool targets and thresholds](#spokepool-targets-and-thresholds) to determine how much of the running balances to move over to the `netSendAmount` value in a [PoolRebalanceLeaf](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/HubPoolInterface.sol#L22). Inside the HubPool's code, only the positive `netSendAmounts` are [sent out of the HubPool](https://github.com/across-protocol/contracts-v2/blob/master/contracts/HubPool.sol#L893) to the SpokePools via the canonical bridges. Conversely, the `runningBalances` are simply accounting values to keep track of the running count of SpokePool balances. Whenever a portion of the `runningBalances` are included in the `netSendAmounts`, the running balances should be decremented accordingly to account for the tokens being sent out of the Hub or SpokePool. +This is where we'll incorporate the section on [SpokePool targets and thresholds](#spokepool-targets-and-thresholds) to determine how much of the running balances to move over to the `netSendAmount` value in a [PoolRebalanceLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/HubPoolInterface.sol#L22). Inside the HubPool's code, only the positive `netSendAmounts` are [sent out of the HubPool](https://github.com/across-protocol/contracts/blob/master/contracts/HubPool.sol#L893) to the SpokePools via the canonical bridges. Conversely, the `runningBalances` are simply accounting values to keep track of the running count of SpokePool balances. Whenever a portion of the `runningBalances` are included in the `netSendAmounts`, the running balances should be decremented accordingly to account for the tokens being sent out of the Hub or SpokePool. ## Completing the `RelayerRefundLeaf` If a `runningBalance` is below its target for a particular chain, the Dataworker might include a positive `netSendAmount` for that chain to instruct the HubPool to send tokens to the SpokePool. -However, if a `runningBalance` is above its target, the Dataworker might want to send tokens from the SpokePool to the Hub. This is achieved by setting a negative `netSendAmount`. At the HubPool level, negative `netSendAmounts` do nothing. However, the `RelayerRefundLeaf` has a property called [`amountToReturn`](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/SpokePoolInterface.sol#L12) which is supposed to be set equal the negative of any negative `netSendAmounts`. Any positive `amountToReturn` values result in [tokens being sent from the SpokePool](https://github.com/across-protocol/contracts-v2/blob/master/contracts/SpokePool.sol#L923) back to the Hub via the canonical bridge. +However, if a `runningBalance` is above its target, the Dataworker might want to send tokens from the SpokePool to the Hub. This is achieved by setting a negative `netSendAmount`. At the HubPool level, negative `netSendAmounts` do nothing. However, the `RelayerRefundLeaf` has a property called [`amountToReturn`](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/SpokePoolInterface.sol#L12) which is supposed to be set equal the negative of any negative `netSendAmounts`. Any positive `amountToReturn` values result in [tokens being sent from the SpokePool](https://github.com/across-protocol/contracts/blob/master/contracts/SpokePool.sol#L923) back to the Hub via the canonical bridge. ## Conclusion @@ -231,7 +231,7 @@ This is everything that the Dataworker needs to construct a root bundle! All tha Root bundle merkle leaf formats -- [PoolRebalanceLeaf](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/HubPoolInterface.sol#L11): One per chain -- [RelayerRefundLeaf](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/SpokePoolInterface.sol#L9) One per token per chain -- [SlowFillLeaf](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/SpokePoolInterface.sol#L29) One per unfilled deposit -- [RootBundle](https://github.com/across-protocol/contracts-v2/blob/master/contracts/interfaces/HubPoolInterface.sol#L53) how the Dataworker's proposal is stored in the HubPool throughout its pending challenge window +- [PoolRebalanceLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/HubPoolInterface.sol#L11): One per chain +- [RelayerRefundLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/SpokePoolInterface.sol#L9) One per token per chain +- [SlowFillLeaf](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/SpokePoolInterface.sol#L29) One per unfilled deposit +- [RootBundle](https://github.com/across-protocol/contracts/blob/master/contracts/interfaces/HubPoolInterface.sol#L53) how the Dataworker's proposal is stored in the HubPool throughout its pending challenge window diff --git a/src/dataworker/index.ts b/src/dataworker/index.ts index 110186013..7b5e1f90b 100644 --- a/src/dataworker/index.ts +++ b/src/dataworker/index.ts @@ -128,29 +128,33 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Proposer disabled" }); } - if (config.executorEnabled) { + if (config.l2ExecutorEnabled || config.l1ExecutorEnabled) { const balanceAllocator = new BalanceAllocator(spokePoolClientsToProviders(spokePoolClients)); - poolRebalanceLeafExecutionCount = await dataworker.executePoolRebalanceLeaves( - spokePoolClients, - balanceAllocator, - config.sendingExecutionsEnabled, - fromBlocks - ); + if (config.l1ExecutorEnabled) { + poolRebalanceLeafExecutionCount = await dataworker.executePoolRebalanceLeaves( + spokePoolClients, + balanceAllocator, + config.sendingExecutionsEnabled, + fromBlocks + ); + } - // Execute slow relays before relayer refunds to give them priority for any L2 funds. - await dataworker.executeSlowRelayLeaves( - spokePoolClients, - balanceAllocator, - config.sendingExecutionsEnabled, - fromBlocks - ); - await dataworker.executeRelayerRefundLeaves( - spokePoolClients, - balanceAllocator, - config.sendingExecutionsEnabled, - fromBlocks - ); + if (config.l2ExecutorEnabled) { + // Execute slow relays before relayer refunds to give them priority for any L2 funds. + await dataworker.executeSlowRelayLeaves( + spokePoolClients, + balanceAllocator, + config.sendingExecutionsEnabled, + fromBlocks + ); + await dataworker.executeRelayerRefundLeaves( + spokePoolClients, + balanceAllocator, + config.sendingExecutionsEnabled, + fromBlocks + ); + } } else { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Executor disabled" }); } @@ -197,7 +201,7 @@ export async function runDataworker(_logger: winston.Logger, baseSigner: Signer) pendingProposal, }); } else { - await clients.multiCallerClient.executeTransactionQueue(); + await clients.multiCallerClient.executeTxnQueues(); } }; diff --git a/src/finalizer/index.ts b/src/finalizer/index.ts index 69cc57bb1..b19e5f17a 100644 --- a/src/finalizer/index.ts +++ b/src/finalizer/index.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import assert from "assert"; import { BigNumber, Contract, constants } from "ethers"; import { getAddress } from "ethers/lib/utils"; @@ -17,7 +17,6 @@ import { import { DataworkerConfig } from "../dataworker/DataworkerConfig"; import { SpokePoolClientsByChain } from "../interfaces"; import { - CHAIN_IDs, Signer, blockExplorerLink, config, @@ -40,37 +39,75 @@ import { scrollFinalizer, zkSyncFinalizer, } from "./utils"; +import { assert as ssAssert, enums } from "superstruct"; const { isDefined } = sdkUtils; config(); let logger: winston.Logger; -const chainFinalizers: { [chainId: number]: ChainFinalizer } = { - 10: opStackFinalizer, - 137: polygonFinalizer, - 280: zkSyncFinalizer, - 324: zkSyncFinalizer, - 8453: opStackFinalizer, - 42161: arbitrumOneFinalizer, - 59144: lineaL2ToL1Finalizer, - 534352: scrollFinalizer, -}; +/** + * The finalization type is used to determine the direction of the finalization. + */ +type FinalizationType = "l1->l2" | "l2->l1" | "l1<->l2"; /** - * A list of finalizers that should be run for each chain. Note: we do this - * because some chains have multiple finalizers that need to be run. - * Mainly related to CCTP and Linea + * A list of finalizers that can be used to finalize messages on a chain. These are + * broken down into two categories: finalizers that finalize messages on L1 and finalizers + * that finalize messages on L2. + * @note: finalizeOnL1 is used to finalize L2 -> L1 messages (from the spoke chain to mainnet) + * @note: finalizeOnL2 is used to finalize L1 -> L2 messages (from mainnet to the spoke chain) */ -const chainFinalizerOverrides: { [chainId: number]: ChainFinalizer[] } = { +const chainFinalizers: { [chainId: number]: { finalizeOnL2: ChainFinalizer[]; finalizeOnL1: ChainFinalizer[] } } = { // Mainnets - 10: [opStackFinalizer, cctpL1toL2Finalizer, cctpL2toL1Finalizer], - 137: [polygonFinalizer, cctpL1toL2Finalizer, cctpL2toL1Finalizer], - 8453: [opStackFinalizer, cctpL1toL2Finalizer, cctpL2toL1Finalizer], - 42161: [arbitrumOneFinalizer, cctpL1toL2Finalizer, cctpL2toL1Finalizer], - 59144: [lineaL1ToL2Finalizer, lineaL2ToL1Finalizer], + 10: { + finalizeOnL1: [opStackFinalizer, cctpL2toL1Finalizer], + finalizeOnL2: [cctpL1toL2Finalizer], + }, + 137: { + finalizeOnL1: [polygonFinalizer, cctpL2toL1Finalizer], + finalizeOnL2: [cctpL1toL2Finalizer], + }, + 324: { + finalizeOnL1: [zkSyncFinalizer], + finalizeOnL2: [], + }, + 8453: { + finalizeOnL1: [opStackFinalizer, cctpL2toL1Finalizer], + finalizeOnL2: [cctpL1toL2Finalizer], + }, + 42161: { + finalizeOnL1: [arbitrumOneFinalizer, cctpL2toL1Finalizer], + finalizeOnL2: [cctpL1toL2Finalizer], + }, + 59144: { + finalizeOnL1: [lineaL2ToL1Finalizer], + finalizeOnL2: [lineaL1ToL2Finalizer], + }, + 280: { + finalizeOnL1: [zkSyncFinalizer], + finalizeOnL2: [], + }, + 534352: { + finalizeOnL1: [scrollFinalizer], + finalizeOnL2: [], + }, + 34443: { + finalizeOnL1: [opStackFinalizer], + finalizeOnL2: [], + }, // Testnets - 84532: [cctpL1toL2Finalizer, cctpL2toL1Finalizer], - 59140: [lineaL1ToL2Finalizer, lineaL2ToL1Finalizer], + 84532: { + finalizeOnL1: [cctpL2toL1Finalizer], + finalizeOnL2: [cctpL1toL2Finalizer], + }, + 59140: { + finalizeOnL1: [lineaL2ToL1Finalizer], + finalizeOnL2: [lineaL1ToL2Finalizer], + }, + 919: { + finalizeOnL1: [opStackFinalizer], + finalizeOnL2: [], + }, }; function enrichL1ToL2AddressesToFinalize(l1ToL2AddressesToFinalize: string[], addressesToEnsure: string[]): string[] { @@ -91,7 +128,8 @@ export async function finalize( spokePoolClients: SpokePoolClientsByChain, configuredChainIds: number[], l1ToL2AddressesToFinalize: string[], - submitFinalizationTransactions: boolean + submitFinalizationTransactions: boolean, + finalizationStrategy: FinalizationType ): Promise { const hubChainId = hubPoolClient.chainId; @@ -114,9 +152,23 @@ export async function finalize( return; } - // We want to first resolve a possible override for the finalizer, and - // then fallback to the default finalizer. - const chainSpecificFinalizers = (chainFinalizerOverrides[chainId] ?? [chainFinalizers[chainId]]).filter(isDefined); + // We should only finalize the direction that has been specified in + // the finalization strategy. + const chainSpecificFinalizers: ChainFinalizer[] = []; + switch (finalizationStrategy) { + case "l1->l2": + chainSpecificFinalizers.push(...chainFinalizers[chainId].finalizeOnL2); + break; + case "l2->l1": + chainSpecificFinalizers.push(...chainFinalizers[chainId].finalizeOnL1); + break; + case "l1<->l2": + chainSpecificFinalizers.push( + ...chainFinalizers[chainId].finalizeOnL1, + ...chainFinalizers[chainId].finalizeOnL2 + ); + break; + } assert(chainSpecificFinalizers?.length > 0, `No finalizer available for chain ${chainId}`); const network = getNetworkName(chainId); @@ -129,10 +181,8 @@ export async function finalize( hubPoolClient.hubPool.address, CONTRACT_ADDRESSES[hubChainId]?.atomicDepositor?.address, ]; - // For linea specifically, we want to include the Linea Spokepool as well. - if (sdkUtils.chainIsLinea(chainId)) { - addressesToEnsure.push(spokePoolClients[CHAIN_IDs.LINEA].spokePool.address); - } + // Add the spoke pool address to the list of addresses to ensure. + addressesToEnsure.push(spokePoolClients[chainId].spokePool.address); l1ToL2AddressesToFinalize = enrichL1ToL2AddressesToFinalize(l1ToL2AddressesToFinalize, addressesToEnsure); } @@ -358,6 +408,7 @@ export class FinalizerConfig extends DataworkerConfig { readonly maxFinalizerLookback: number; readonly chainsToFinalize: number[]; readonly addressesToMonitorForL1L2Finalizer: string[]; + readonly finalizationStrategy: FinalizationType; constructor(env: ProcessEnv) { const { FINALIZER_MAX_TOKENBRIDGE_LOOKBACK, FINALIZER_CHAINS, L1_L2_FINALIZER_MONITOR_ADDRESS } = env; @@ -372,6 +423,10 @@ export class FinalizerConfig extends DataworkerConfig { Number.isInteger(this.maxFinalizerLookback), `Invalid FINALIZER_MAX_TOKENBRIDGE_LOOKBACK: ${FINALIZER_MAX_TOKENBRIDGE_LOOKBACK}` ); + + const _finalizationStategy = (env.FINALIZATION_STRATEGY ?? "l1<->l2").toLowerCase(); + ssAssert(_finalizationStategy, enums(["l1->l2", "l2->l1", "l1<->l2"])); + this.finalizationStrategy = _finalizationStategy; } } @@ -401,7 +456,8 @@ export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): spokePoolClients, config.chainsToFinalize.length === 0 ? availableChains : config.chainsToFinalize, config.addressesToMonitorForL1L2Finalizer, - config.sendingFinalizationsEnabled + config.sendingFinalizationsEnabled, + config.finalizationStrategy ); } else { logger[startupLogLevel(config)]({ at: "Dataworker#index", message: "Finalizer disabled" }); @@ -413,6 +469,7 @@ export async function runFinalizer(_logger: winston.Logger, baseSigner: Signer): message: `Time to loop: ${Math.round((loopEndPostFinalizations - loopStart) / 1000)}s`, timeToUpdateSpokeClients: Math.round((loopStartPostSpokePoolUpdates - loopStart) / 1000), timeToFinalize: Math.round((loopEndPostFinalizations - loopStartPostSpokePoolUpdates) / 1000), + strategy: config.finalizationStrategy, }); if (await processEndPollingLoop(logger, "Dataworker", config.pollingDelay)) { diff --git a/src/finalizer/utils/arbitrum.ts b/src/finalizer/utils/arbitrum.ts index 52aa72343..682e8e4ed 100644 --- a/src/finalizer/utils/arbitrum.ts +++ b/src/finalizer/utils/arbitrum.ts @@ -10,6 +10,9 @@ import { getCurrentTime, getRedisCache, getBlockForTimestamp, + getL1TokenInfo, + compareAddressesSimple, + TOKEN_SYMBOLS_MAP, } from "../../utils"; import { TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; @@ -28,20 +31,24 @@ export async function arbitrumOneFinalizer( // Arbitrum takes 7 days to finalize withdrawals, so don't look up events younger than that. const redis = await getRedisCache(logger); - const [fromBlock, toBlock] = await Promise.all([ - getBlockForTimestamp(chainId, getCurrentTime() - 9 * 60 * 60 * 24, undefined, redis), - getBlockForTimestamp(chainId, getCurrentTime() - 7 * 60 * 60 * 24, undefined, redis), - ]); + const latestBlockToFinalize = await getBlockForTimestamp( + chainId, + getCurrentTime() - 7 * 60 * 60 * 24, + undefined, + redis + ); logger.debug({ at: "Finalizer#ArbitrumFinalizer", message: "Arbitrum TokensBridged event filter", - fromBlock, - toBlock, + toBlock: latestBlockToFinalize, }); // Skip events that are likely not past the seven day challenge period. - const olderTokensBridgedEvents = spokePoolClient - .getTokensBridged() - .filter((e) => e.blockNumber <= toBlock && e.blockNumber >= fromBlock); + const olderTokensBridgedEvents = spokePoolClient.getTokensBridged().filter( + (e) => + e.blockNumber <= latestBlockToFinalize && + // USDC withdrawals for Arbitrum should be finalized via the CCTP Finalizer. + !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[CHAIN_ID]) + ); return await multicallArbitrumFinalizations(olderTokensBridgedEvents, signer, hubPoolClient, logger); } @@ -55,12 +62,7 @@ async function multicallArbitrumFinalizations( const finalizableMessages = await getFinalizableMessages(logger, tokensBridged, hubSigner); const callData = await Promise.all(finalizableMessages.map((message) => finalizeArbitrum(message.message))); const crossChainTransfers = finalizableMessages.map(({ info: { l2TokenAddress, amountToReturn } }) => { - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - l2TokenAddress, - CHAIN_ID, - hubPoolClient.latestBlockSearched - ); - const l1TokenInfo = hubPoolClient.getTokenInfo(1, l1TokenCounterpart); + const l1TokenInfo = getL1TokenInfo(l2TokenAddress, CHAIN_ID); const amountFromWei = convertFromWei(amountToReturn.toString(), l1TokenInfo.decimals); const withdrawal: CrossChainMessage = { originationChainId: CHAIN_ID, @@ -172,13 +174,15 @@ async function getMessageOutboxStatusAndProof( try { const l2ToL1Messages = await l2Receipt.getL2ToL1Messages(l1Signer); if (l2ToL1Messages.length === 0 || l2ToL1Messages.length - 1 < logIndex) { - const error = new Error(`No outgoing messages found in transaction:${event.transactionHash}`); + const error = new Error( + `No outgoing messages found in transaction:${event.transactionHash} for l2 token ${event.l2TokenAddress}` + ); logger.warn({ at: "ArbitrumFinalizer", message: "Arbitrum transaction that emitted TokensBridged event unexpectedly contains 0 L2-to-L1 messages 🤢!", logIndex, l2ToL1Messages: l2ToL1Messages.length, - txnHash: event.transactionHash, + event, reason: error.stack || error.message || error.toString(), notificationPath: "across-error", }); diff --git a/src/finalizer/utils/cctp/l1ToL2.ts b/src/finalizer/utils/cctp/l1ToL2.ts index 3c108dd17..e80c786ec 100644 --- a/src/finalizer/utils/cctp/l1ToL2.ts +++ b/src/finalizer/utils/cctp/l1ToL2.ts @@ -2,22 +2,25 @@ import { TransactionReceipt, TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; +import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES, Multicall2Call, chainIdsToCctpDomains } from "../../../common"; import { Contract, EventSearchConfig, Signer, TOKEN_SYMBOLS_MAP, + assert, formatUnitsForToken, getBlockForTimestamp, getCachedProvider, getCurrentTime, getNetworkName, getRedisCache, + groupObjectCountsByProp, + isDefined, paginatedEventQuery, winston, } from "../../../utils"; -import { DecodedCCTPMessage, resolveCCTPRelatedTxns } from "../../../utils/CCTPUtils"; +import { CCTPMessageStatus, DecodedCCTPMessage, resolveCCTPRelatedTxns } from "../../../utils/CCTPUtils"; import { FinalizerPromise, CrossChainMessage } from "../../types"; import { getBlockRangeByHoursOffsets } from "../linea/common"; import { uniqWith } from "lodash"; @@ -29,27 +32,40 @@ export async function cctpL1toL2Finalizer( spokePoolClient: SpokePoolClient, l1ToL2AddressesToFinalize: string[] ): Promise { - // Let's just assume for now CCTP transfers don't take longer than 1 day and can - // happen very quickly. - const lookback = getCurrentTime() - 60 * 60 * 24; + const lookback = getCurrentTime() - 60 * 60 * 24 * 7; const redis = await getRedisCache(logger); const fromBlock = await getBlockForTimestamp(hubPoolClient.chainId, lookback, undefined, redis); logger.debug({ - at: "Finalizer#CCTPL1ToL2Finalizer", + at: `Finalizer#CCTPL1ToL2Finalizer:${spokePoolClient.chainId}`, message: `MessageSent event filter for L1 to ${getNetworkName(spokePoolClient.chainId)}`, fromBlock, }); + const cctpMessageReceiverDetails = CONTRACT_ADDRESSES[spokePoolClient.chainId].cctpMessageTransmitter; + const contract = new ethers.Contract( + cctpMessageReceiverDetails.address, + cctpMessageReceiverDetails.abi, + spokePoolClient.spokePool.provider + ); const decodedMessages = await resolveRelatedTxnReceipts( l1ToL2AddressesToFinalize, hubPoolClient.chainId, spokePoolClient.chainId, fromBlock ); - const cctpMessageReceiverDetails = CONTRACT_ADDRESSES[spokePoolClient.chainId].cctpMessageTransmitter; - const contract = new ethers.Contract(cctpMessageReceiverDetails.address, cctpMessageReceiverDetails.abi, signer); + const unprocessedMessages = decodedMessages.filter((message) => message.status === "ready"); + const statusesGrouped = groupObjectCountsByProp( + decodedMessages, + (message: { status: CCTPMessageStatus }) => message.status + ); + logger.debug({ + at: `Finalizer#CCTPL1ToL2Finalizer:${spokePoolClient.chainId}`, + message: `Detected ${unprocessedMessages.length} ready to finalize messages for CCTP L1 to ${spokePoolClient.chainId}`, + statusesGrouped, + }); + return { - crossChainMessages: await generateDepositData(decodedMessages, hubPoolClient.chainId, spokePoolClient.chainId), - callData: await generateMultiCallData(contract, decodedMessages), + crossChainMessages: await generateDepositData(unprocessedMessages, hubPoolClient.chainId, spokePoolClient.chainId), + callData: await generateMultiCallData(contract, unprocessedMessages), }; } @@ -65,7 +81,7 @@ async function findRelevantTxnReceiptsForCCTPDeposits( ); const eventFilter = tokenMessengerContract.filters.DepositForBurn( undefined, - TOKEN_SYMBOLS_MAP._USDC.addresses[currentChainId], // Filter by only USDC token deposits + TOKEN_SYMBOLS_MAP.USDC.addresses[currentChainId], // Filter by only USDC token deposits undefined, addressesToSearch // All depositors that we are monitoring for ); @@ -102,6 +118,7 @@ async function generateMultiCallData( messageTransmitter: Contract, messages: DecodedCCTPMessage[] ): Promise { + assert(messages.every((message) => isDefined(message.attestation))); return Promise.all( messages.map(async (message) => { const txn = (await messageTransmitter.populateTransaction.receiveMessage( diff --git a/src/finalizer/utils/cctp/l2ToL1.ts b/src/finalizer/utils/cctp/l2ToL1.ts index 717176fbb..2c4cbdece 100644 --- a/src/finalizer/utils/cctp/l2ToL1.ts +++ b/src/finalizer/utils/cctp/l2ToL1.ts @@ -2,17 +2,22 @@ import { TransactionRequest } from "@ethersproject/abstract-provider"; import { ethers } from "ethers"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { CONTRACT_ADDRESSES, Multicall2Call } from "../../../common"; +import { CONTRACT_ADDRESSES, Multicall2Call, chainIdsToCctpDomains } from "../../../common"; import { Contract, Signer, + TOKEN_SYMBOLS_MAP, + assert, + compareAddressesSimple, getBlockForTimestamp, getCurrentTime, getNetworkName, getRedisCache, + groupObjectCountsByProp, + isDefined, winston, } from "../../../utils"; -import { DecodedCCTPMessage, resolveCCTPRelatedTxns } from "../../../utils/CCTPUtils"; +import { CCTPMessageStatus, DecodedCCTPMessage, resolveCCTPRelatedTxns } from "../../../utils/CCTPUtils"; import { FinalizerPromise, CrossChainMessage } from "../../types"; export async function cctpL2toL1Finalizer( @@ -21,22 +26,39 @@ export async function cctpL2toL1Finalizer( hubPoolClient: HubPoolClient, spokePoolClient: SpokePoolClient ): Promise { - // Let's just assume for now CCTP transfers don't take longer than 1 day and can - // happen very quickly. - const lookback = getCurrentTime() - 60 * 60 * 24; + const lookback = getCurrentTime() - 60 * 60 * 24 * 7; const redis = await getRedisCache(logger); - const fromBlock = await getBlockForTimestamp(hubPoolClient.chainId, lookback, undefined, redis); + const fromBlock = await getBlockForTimestamp(spokePoolClient.chainId, lookback, undefined, redis); logger.debug({ - at: "Finalizer#CCTPL2ToL1Finalizer", + at: `Finalizer#CCTPL2ToL1Finalizer:${spokePoolClient.chainId}`, message: `MessageSent event filter for ${getNetworkName(spokePoolClient.chainId)} to L1`, fromBlock, }); - const decodedMessages = await resolveRelatedTxnReceipts(spokePoolClient, hubPoolClient.chainId, fromBlock); const cctpMessageReceiverDetails = CONTRACT_ADDRESSES[hubPoolClient.chainId].cctpMessageTransmitter; - const contract = new ethers.Contract(cctpMessageReceiverDetails.address, cctpMessageReceiverDetails.abi, signer); + const contract = new ethers.Contract( + cctpMessageReceiverDetails.address, + cctpMessageReceiverDetails.abi, + hubPoolClient.hubPool.provider + ); + const decodedMessages = await resolveRelatedTxnReceipts(spokePoolClient, hubPoolClient.chainId, fromBlock); + const unprocessedMessages = decodedMessages.filter((message) => message.status === "ready"); + const statusesGrouped = groupObjectCountsByProp( + decodedMessages, + (message: { status: CCTPMessageStatus }) => message.status + ); + logger.debug({ + at: `Finalizer#CCTPL2ToL1Finalizer:${spokePoolClient.chainId}`, + message: `Detected ${unprocessedMessages.length} ready to finalize messages for CCTP ${spokePoolClient.chainId} to L1`, + statusesGrouped, + }); + return { - crossChainMessages: await generateWithdrawalData(decodedMessages, spokePoolClient.chainId, hubPoolClient.chainId), - callData: await generateMultiCallData(contract, decodedMessages), + crossChainMessages: await generateWithdrawalData( + unprocessedMessages, + spokePoolClient.chainId, + hubPoolClient.chainId + ), + callData: await generateMultiCallData(contract, unprocessedMessages), }; } @@ -45,14 +67,25 @@ async function resolveRelatedTxnReceipts( targetDestinationChainId: number, latestBlockToFinalize: number ): Promise { + const sourceChainId = client.chainId; + // Dedup the txnReceipt list because there might be multiple tokens bridged events in the same txn hash. + + const uniqueTxnHashes = new Set(); + client + .getTokensBridged() + .filter( + (bridgeEvent) => + bridgeEvent.blockNumber >= latestBlockToFinalize && + compareAddressesSimple(bridgeEvent.l2TokenAddress, TOKEN_SYMBOLS_MAP.USDC.addresses[sourceChainId]) + ) + .forEach((bridgeEvent) => uniqueTxnHashes.add(bridgeEvent.transactionHash)); + // Resolve the receipts to all collected txns const txnReceipts = await Promise.all( - client - .getTokensBridged() - .filter((bridgeEvent) => bridgeEvent.blockNumber >= latestBlockToFinalize) - .map((bridgeEvent) => client.spokePool.provider.getTransactionReceipt(bridgeEvent.transactionHash)) + Array.from(uniqueTxnHashes).map((hash) => client.spokePool.provider.getTransactionReceipt(hash)) ); - return resolveCCTPRelatedTxns(txnReceipts, client.chainId, targetDestinationChainId); + + return resolveCCTPRelatedTxns(txnReceipts, sourceChainId, targetDestinationChainId); } /** @@ -65,6 +98,7 @@ async function generateMultiCallData( messageTransmitter: Contract, messages: DecodedCCTPMessage[] ): Promise { + assert(messages.every((message) => isDefined(message.attestation))); return Promise.all( messages.map(async (message) => { const txn = (await messageTransmitter.populateTransaction.receiveMessage( diff --git a/src/finalizer/utils/linea/common.ts b/src/finalizer/utils/linea/common.ts index 4e3bde269..a6457cbcc 100644 --- a/src/finalizer/utils/linea/common.ts +++ b/src/finalizer/utils/linea/common.ts @@ -2,7 +2,7 @@ import { LineaSDK, Message, OnChainMessageStatus } from "@consensys/linea-sdk"; import { L1MessageServiceContract, L2MessageServiceContract } from "@consensys/linea-sdk/dist/lib/contracts"; import { L1ClaimingService } from "@consensys/linea-sdk/dist/lib/sdk/claiming/L1ClaimingService"; import { MessageSentEvent } from "@consensys/linea-sdk/dist/typechain/L2MessageService"; -import { Linea_Adapter__factory } from "@across-protocol/contracts-v2"; +import { Linea_Adapter__factory } from "@across-protocol/contracts"; import { BigNumber, Contract, @@ -16,6 +16,7 @@ import { getNodeUrlList, getRedisCache, paginatedEventQuery, + retryAsync, } from "../../../utils"; import { HubPoolClient } from "../../../clients"; import { CONTRACT_ADDRESSES } from "../../../common"; @@ -82,8 +83,10 @@ export function makeGetMessagesWithStatusByTxHash( }; }); + // The Linea SDK MessageServiceContract constructs its own Provider without our retry logic so we retry each call + // twice with a 1 second delay between in case of intermittent RPC failures. const messageStatus = await Promise.all( - messages.map((message) => dstClaimingService.getMessageStatus(message.messageHash)) + messages.map((message) => retryAsync(() => dstClaimingService.getMessageStatus(message.messageHash), 2, 1)) ); return messages.map((message, index) => ({ ...message, @@ -203,7 +206,7 @@ export async function findMessageFromTokenBridge( ): Promise { const bridgeEvents = await paginatedEventQuery( bridgeContract, - bridgeContract.filters.BridgingInitiated(l1ToL2AddressesToFinalize), + bridgeContract.filters.BridgingInitiatedV2(l1ToL2AddressesToFinalize), searchConfig ); const messageSent = messageServiceContract.contract.interface.getEventTopic("MessageSent"); diff --git a/src/finalizer/utils/linea/l1ToL2.ts b/src/finalizer/utils/linea/l1ToL2.ts index c2575ccc7..478be8bc6 100644 --- a/src/finalizer/utils/linea/l1ToL2.ts +++ b/src/finalizer/utils/linea/l1ToL2.ts @@ -1,10 +1,10 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { OnChainMessageStatus } from "@consensys/linea-sdk"; import { Contract } from "ethers"; import { groupBy } from "lodash"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; import { CHAIN_MAX_BLOCK_LOOKBACK, CONTRACT_ADDRESSES } from "../../../common"; -import { EventSearchConfig, Signer, convertFromWei, winston } from "../../../utils"; +import { EventSearchConfig, Signer, convertFromWei, retryAsync, winston } from "../../../utils"; import { CrossChainMessage, FinalizerPromise } from "../../types"; import { determineMessageType, @@ -40,9 +40,7 @@ export async function lineaL1ToL2Finalizer( ); // Optimize block range for querying Linea's MessageSent events on L1. - // We want to conservatively query for events that are between 0 and 24 hours old - // because Linea L1->L2 messages are claimable after ~20 mins. - const { fromBlock, toBlock } = await getBlockRangeByHoursOffsets(l1ChainId, 24, 0); + const { fromBlock, toBlock } = await getBlockRangeByHoursOffsets(l1ChainId, 24 * 7, 0); logger.debug({ at: "Finalizer#LineaL1ToL2Finalizer", message: "Linea MessageSent event filter", @@ -71,7 +69,10 @@ export async function lineaL1ToL2Finalizer( } = event; // It's unlikely that our multicall will have multiple transactions to bridge to Linea // so we can grab the statuses individually. - const messageStatus = await l2MessageServiceContract.getMessageStatus(_messageHash); + + // The Linea SDK MessageServiceContract constructs its own Provider without our retry logic so we retry each call + // twice with a 1 second delay between in case of intermittent RPC failures. + const messageStatus = await retryAsync(() => l2MessageServiceContract.getMessageStatus(_messageHash), 2, 1); return { messageSender: _from, destination: _to, diff --git a/src/finalizer/utils/linea/l2ToL1.ts b/src/finalizer/utils/linea/l2ToL1.ts index c4d68c4f9..6649b7ba7 100644 --- a/src/finalizer/utils/linea/l2ToL1.ts +++ b/src/finalizer/utils/linea/l2ToL1.ts @@ -3,7 +3,7 @@ import { Wallet } from "ethers"; import { groupBy } from "lodash"; import { HubPoolClient, SpokePoolClient } from "../../../clients"; -import { Signer, winston, convertFromWei } from "../../../utils"; +import { Signer, winston, convertFromWei, getL1TokenInfo } from "../../../utils"; import { FinalizerPromise, CrossChainMessage } from "../../types"; import { TokensBridged } from "../../../interfaces"; import { @@ -27,9 +27,8 @@ export async function lineaL2ToL1Finalizer( const getMessagesWithStatusByTxHash = makeGetMessagesWithStatusByTxHash(l2Contract, l1ClaimingService); // Optimize block range for querying relevant source events on L2. - // We want to conservatively query for events that are between 8 and 72 hours old - // because Linea L2->L1 messages are claimable after 6 - 32 hours - const { fromBlock, toBlock } = await getBlockRangeByHoursOffsets(l2ChainId, 72, 8); + // Linea L2->L1 messages are claimable after 6 - 32 hours + const { fromBlock, toBlock } = await getBlockRangeByHoursOffsets(l2ChainId, 24 * 8, 6); logger.debug({ at: "Finalizer#LineaL2ToL1Finalizer", message: "Linea TokensBridged event filter", @@ -103,12 +102,7 @@ export async function lineaL2ToL1Finalizer( // Populate cross chain transfers for claimed messages const transfers = claimable.map(({ tokensBridged }) => { const { l2TokenAddress, amountToReturn } = tokensBridged; - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - l2TokenAddress, - l2ChainId, - hubPoolClient.latestBlockSearched - ); - const { decimals, symbol: l1TokenSymbol } = hubPoolClient.getTokenInfo(l1ChainId, l1TokenCounterpart); + const { decimals, symbol: l1TokenSymbol } = getL1TokenInfo(l2TokenAddress, l2ChainId); const amountFromWei = convertFromWei(amountToReturn.toString(), decimals); const transfer: CrossChainMessage = { originationChainId: l2ChainId, diff --git a/src/finalizer/utils/opStack.ts b/src/finalizer/utils/opStack.ts index fcabc7c1d..0b4edbf41 100644 --- a/src/finalizer/utils/opStack.ts +++ b/src/finalizer/utils/opStack.ts @@ -2,19 +2,23 @@ import assert from "assert"; import { groupBy } from "lodash"; import * as optimismSDK from "@eth-optimism/sdk"; import { HubPoolClient, SpokePoolClient } from "../../clients"; -import { L1Token, TokensBridged } from "../../interfaces"; +import { TokensBridged } from "../../interfaces"; import { BigNumber, + CHAIN_IDs, chainIsOPStack, + compareAddressesSimple, convertFromWei, getBlockForTimestamp, getCachedProvider, getCurrentTime, + getL1TokenInfo, getNetworkName, getRedisCache, getUniqueLogIndex, groupObjectCountsByProp, Signer, + TOKEN_SYMBOLS_MAP, winston, } from "../../utils"; import { Multicall2Call } from "../../common"; @@ -30,7 +34,7 @@ interface CrossChainMessageWithStatus extends CrossChainMessageWithEvent { logIndex: number; } -type OVM_CHAIN_ID = 10 | 8453; +type OVM_CHAIN_ID = 10 | 8453 | 34443; type OVM_CROSS_CHAIN_MESSENGER = optimismSDK.CrossChainMessenger; export async function opStackFinalizer( @@ -51,21 +55,22 @@ export async function opStackFinalizer( // - Don't submit proofs for finalizations older than 1 day // - Don't try to withdraw tokens that are not past the 7 day challenge period const redis = await getRedisCache(logger); - const [earliestBlockToFinalize, latestBlockToProve] = await Promise.all([ - getBlockForTimestamp(chainId, getCurrentTime() - 7 * 60 * 60 * 24, undefined, redis), - getBlockForTimestamp(chainId, getCurrentTime() - 60 * 60 * 24, undefined, redis), - ]); + const latestBlockToProve = await getBlockForTimestamp(chainId, getCurrentTime() - 7 * 60 * 60 * 24, undefined, redis); const { recentTokensBridgedEvents = [], olderTokensBridgedEvents = [] } = groupBy( - spokePoolClient.getTokensBridged(), + spokePoolClient.getTokensBridged().filter( + (e) => + // USDC withdrawals for Base and Optimism should be finalized via the CCTP Finalizer. + !compareAddressesSimple(e.l2TokenAddress, TOKEN_SYMBOLS_MAP["USDC"].addresses[chainId]) || + !(chainId === CHAIN_IDs.BASE || chainId === CHAIN_IDs.OPTIMISM) + ), (e) => { if (e.blockNumber >= latestBlockToProve) { return "recentTokensBridgedEvents"; - } else if (e.blockNumber <= earliestBlockToFinalize) { + } else { return "olderTokensBridgedEvents"; } } ); - // First submit proofs for any newly withdrawn tokens. You can submit proofs for any withdrawals that have been // snapshotted on L1, so it takes roughly 1 hour from the withdrawal time logger.debug({ @@ -87,7 +92,7 @@ export async function opStackFinalizer( logger.debug({ at: "Finalizer", message: `Earliest TokensBridged block to attempt to finalize for ${networkName}`, - earliestBlockToFinalize, + earliestBlockToFinalize: latestBlockToProve, }); const finalizations = await multicallOptimismFinalizations( @@ -129,14 +134,14 @@ async function getCrossChainMessages( return ( await Promise.all( - tokensBridged.map( - async (l2Event, i) => - ( - await crossChainMessenger.getMessagesByTransaction(l2Event.transactionHash, { - direction: optimismSDK.MessageDirection.L2_TO_L1, - }) - )[logIndexesForMessage[i]] - ) + tokensBridged.map(async (l2Event, i) => { + const withdrawals = await crossChainMessenger.getMessagesByTransaction(l2Event.transactionHash, { + direction: optimismSDK.MessageDirection.L2_TO_L1, + }); + const logIndexOfEvent = logIndexesForMessage[i]; + assert(logIndexOfEvent < withdrawals.length); + return withdrawals[logIndexOfEvent]; + }) ) ).map((message, i) => { return { @@ -217,13 +222,6 @@ async function getOptimismFinalizableMessages( ); } -function getL1TokenInfoForOptimismToken(chainId: OVM_CHAIN_ID, hubPoolClient: HubPoolClient, l2Token: string): L1Token { - return hubPoolClient.getL1TokenInfoForL2Token( - SpokePoolClient.getExecutedRefundLeafL2Token(chainId, l2Token), - chainId - ); -} - async function finalizeOptimismMessage( _chainId: OVM_CHAIN_ID, crossChainMessenger: OVM_CROSS_CHAIN_MESSENGER, @@ -274,7 +272,7 @@ async function multicallOptimismFinalizations( ) ); const withdrawals = finalizableMessages.map((message) => { - const l1TokenInfo = getL1TokenInfoForOptimismToken(chainId, hubPoolClient, message.event.l2TokenAddress); + const l1TokenInfo = getL1TokenInfo(message.event.l2TokenAddress, chainId); const amountFromWei = convertFromWei(message.event.amountToReturn.toString(), l1TokenInfo.decimals); const withdrawal: CrossChainMessage = { originationChainId: chainId, @@ -306,7 +304,7 @@ async function multicallOptimismL1Proofs( provableMessages.map((message) => proveOptimismMessage(chainId, crossChainMessenger, message, message.logIndex)) ); const withdrawals = provableMessages.map((message) => { - const l1TokenInfo = getL1TokenInfoForOptimismToken(chainId, hubPoolClient, message.event.l2TokenAddress); + const l1TokenInfo = getL1TokenInfo(message.event.l2TokenAddress, chainId); const amountFromWei = convertFromWei(message.event.amountToReturn.toString(), l1TokenInfo.decimals); const proof: CrossChainMessage = { originationChainId: chainId, diff --git a/src/finalizer/utils/polygon.ts b/src/finalizer/utils/polygon.ts index 61560be44..b7034b3cd 100644 --- a/src/finalizer/utils/polygon.ts +++ b/src/finalizer/utils/polygon.ts @@ -12,6 +12,9 @@ import { getCurrentTime, getRedisCache, getBlockForTimestamp, + getL1TokenInfo, + compareAddressesSimple, + TOKEN_SYMBOLS_MAP, } from "../../utils"; import { EthersError, TokensBridged } from "../../interfaces"; import { HubPoolClient, SpokePoolClient } from "../../clients"; @@ -45,7 +48,7 @@ export async function polygonFinalizer( const { chainId } = spokePoolClient; const posClient = await getPosClient(signer); - const lookback = getCurrentTime() - 60 * 60 * 24; + const lookback = getCurrentTime() - 60 * 60 * 24 * 7; const redis = await getRedisCache(logger); const fromBlock = await getBlockForTimestamp(chainId, lookback, undefined, redis); @@ -109,6 +112,18 @@ async function getFinalizableTransactions( const exitStatus = await Promise.all( checkpointedTokensBridged.map(async (_, i) => { const payload = payloads[i]; + const { chainId, l2TokenAddress } = tokensBridged[i]; + + // @dev we can't filter out USDC CCTP withdrawals until after we build the payloads for exit + // because those functions take in a third 'logIndex' parameter which does assume that USDC CCTP + // withdrawals are accounted for. For example, if an L2 withdrawal transaction contains two withdrawals: one USDC + // one followed by a non-USDC one, the USDC 'logIndex' as far as building the payload is concerned + // will be 0 and the non-USDC 'logIndex' will be 1. This is why we can't filter out USDC CCTP withdrawals + // until after we've computed payloads. + if (compareAddressesSimple(l2TokenAddress, TOKEN_SYMBOLS_MAP.USDC.addresses[chainId])) { + return { status: "USDC_CCTP_L2_WITHDRAWAL" }; + } + try { // If we can estimate gas for exit transaction call, then we can exit the burn tx, otherwise its likely // been processed. Note this will capture mislabel some exit txns that fail for other reasons as "exit @@ -204,9 +219,7 @@ async function resolvePolygonRetrievalFinalizations( }) ); const callData = await Promise.all( - tokensInFinalizableMessages.map((l2Token) => - retrieveTokenFromMainnetTokenBridger(l2Token, hubSigner, hubPoolClient) - ) + tokensInFinalizableMessages.map((l2Token) => retrieveTokenFromMainnetTokenBridger(l2Token, hubSigner)) ); const crossChainMessages = finalizableMessages.map((finalizableMessage) => resolveCrossChainTransferStructure(finalizableMessage, "misc", hubPoolClient) @@ -223,12 +236,7 @@ function resolveCrossChainTransferStructure( hubPoolClient: HubPoolClient ): CrossChainMessage { const { l2TokenAddress, amountToReturn } = finalizableMessage; - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - l2TokenAddress, - CHAIN_ID, - hubPoolClient.latestBlockSearched - ); - const l1TokenInfo = hubPoolClient.getTokenInfo(1, l1TokenCounterpart); + const l1TokenInfo = getL1TokenInfo(l2TokenAddress, CHAIN_ID); const amountFromWei = convertFromWei(amountToReturn.toString(), l1TokenInfo.decimals); const transferBase = { originationChainId: CHAIN_ID, @@ -246,12 +254,8 @@ function getMainnetTokenBridger(mainnetSigner: Signer): Contract { return getDeployedContract("PolygonTokenBridger", 1, mainnetSigner); } -async function retrieveTokenFromMainnetTokenBridger( - l2Token: string, - mainnetSigner: Signer, - hubPoolClient: HubPoolClient -): Promise { - const l1Token = hubPoolClient.getL1TokenForL2TokenAtBlock(l2Token, CHAIN_ID, hubPoolClient.latestBlockSearched); +async function retrieveTokenFromMainnetTokenBridger(l2Token: string, mainnetSigner: Signer): Promise { + const l1Token = getL1TokenInfo(l2Token, CHAIN_ID).address; const mainnetTokenBridger = getMainnetTokenBridger(mainnetSigner); const callData = await mainnetTokenBridger.populateTransaction.retrieve(l1Token); return { diff --git a/src/finalizer/utils/scroll.ts b/src/finalizer/utils/scroll.ts index a5cf2c33b..8d4776ce7 100644 --- a/src/finalizer/utils/scroll.ts +++ b/src/finalizer/utils/scroll.ts @@ -1,5 +1,5 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { TransactionRequest } from "@ethersproject/abstract-provider"; import axios from "axios"; import { HubPoolClient, SpokePoolClient } from "../../clients"; diff --git a/src/finalizer/utils/zkSync.ts b/src/finalizer/utils/zkSync.ts index 010807d5e..5ae4cc0ae 100644 --- a/src/finalizer/utils/zkSync.ts +++ b/src/finalizer/utils/zkSync.ts @@ -1,4 +1,4 @@ -import { interfaces, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { interfaces, utils as sdkUtils } from "@across-protocol/sdk"; import { Contract, Wallet, Signer } from "ethers"; import { groupBy } from "lodash"; import { Provider as zksProvider, Wallet as zkWallet } from "zksync-web3"; @@ -9,6 +9,7 @@ import { getBlockForTimestamp, getCurrentTime, getEthAddressForChain, + getL1TokenInfo, getRedisCache, getUniqueLogIndex, winston, @@ -43,21 +44,23 @@ export async function zkSyncFinalizer( const wallet = new zkWallet((signer as Wallet).privateKey, l2Provider, l1Provider); // Zksync takes 1 day to finalize so ignore any events - // older than 2 days and earlier than 1 day. + // earlier than 1 day. const redis = await getRedisCache(logger); - const [fromBlock, toBlock] = await Promise.all([ - getBlockForTimestamp(l2ChainId, getCurrentTime() - 2 * 60 * 60 * 24, undefined, redis), - getBlockForTimestamp(l2ChainId, getCurrentTime() - 1 * 60 * 60 * 24, undefined, redis), - ]); + const latestBlockToFinalize = await getBlockForTimestamp( + l2ChainId, + getCurrentTime() - 1 * 60 * 60 * 24, + undefined, + redis + ); + logger.debug({ at: "Finalizer#ZkSyncFinalizer", message: "ZkSync TokensBridged event filter", - fromBlock, - toBlock, + toBlock: latestBlockToFinalize, }); const withdrawalsToQuery = spokePoolClient .getTokensBridged() - .filter(({ blockNumber }) => blockNumber >= fromBlock && blockNumber <= toBlock); + .filter(({ blockNumber }) => blockNumber <= latestBlockToFinalize); const statuses = await sortWithdrawals(l2Provider, withdrawalsToQuery); const l2Finalized = statuses["finalized"] ?? []; const candidates = await filterMessageLogs(wallet, l2Finalized); @@ -65,12 +68,7 @@ export async function zkSyncFinalizer( const txns = await prepareFinalizations(l1ChainId, l2ChainId, withdrawalParams); const withdrawals = candidates.map(({ l2TokenAddress, amountToReturn }) => { - const l1TokenCounterpart = hubPoolClient.getL1TokenForL2TokenAtBlock( - l2TokenAddress, - l2ChainId, - hubPoolClient.latestBlockSearched - ); - const { decimals, symbol: l1TokenSymbol } = hubPoolClient.getTokenInfo(l1ChainId, l1TokenCounterpart); + const { decimals, symbol: l1TokenSymbol } = getL1TokenInfo(l2TokenAddress, l2ChainId); const amountFromWei = convertFromWei(amountToReturn.toString(), decimals); const withdrawal: CrossChainMessage = { originationChainId: l2ChainId, diff --git a/src/interfaces/BundleData.ts b/src/interfaces/BundleData.ts index 8aad15c16..b2739085c 100644 --- a/src/interfaces/BundleData.ts +++ b/src/interfaces/BundleData.ts @@ -1,4 +1,4 @@ -import { interfaces } from "@across-protocol/sdk-v2"; +import { interfaces } from "@across-protocol/sdk"; import { BigNumber } from "../utils"; export type ExpiredDepositsToRefundV3 = { [originChainId: number]: { diff --git a/src/interfaces/InventoryManagement.ts b/src/interfaces/InventoryManagement.ts index 9cd9303fb..4f2e76c11 100644 --- a/src/interfaces/InventoryManagement.ts +++ b/src/interfaces/InventoryManagement.ts @@ -1,20 +1,59 @@ -import { BigNumber } from "ethers"; +import { BigNumber, utils as ethersUtils } from "ethers"; +import { TOKEN_SYMBOLS_MAP } from "../utils"; +export type TokenBalanceConfig = { + targetOverageBuffer: BigNumber; // Max multiplier for targetPct, to give flexibility in repayment chain selection. + targetPct: BigNumber; // The desired amount of the given token on the L2 chainId. + thresholdPct: BigNumber; // Threshold, below which, we will execute a rebalance. + unwrapWethThreshold?: BigNumber; // Threshold for ETH to trigger WETH unwrapping to maintain ETH balance. + unwrapWethTarget?: BigNumber; // Amount of WETH to unwrap to refill ETH. Unused if unwrapWethThreshold is undefined. +}; + +export type ChainTokenConfig = { + [chainId: string]: TokenBalanceConfig; +}; + +// AliasConfig permits a single HubPool token to map onto multiple tokens on a remote chain. +export type ChainTokenInventory = { + [symbol: string]: ChainTokenConfig; +}; + +/** + * Example configuration: + * - DAI on chains 10 & 42161. + * - Bridged USDC (USDC.e, USDbC) on chains 10, 137, 324, 8453, 42161 & 59144. + * - Native USDC on Polygon. + * + * All token allocations are "global", so Polygon will be allocated a total of 8% of all USDC: + * - 4% of global USDC as Native USDC, and + * - 4% as Bridged USDC. + * + * "tokenConfig": { + * "DAI": { + * "10": { "targetPct": 8, "thresholdPct": 4 }, + * "42161": { "targetPct": 8, "thresholdPct": 4 }, + * }, + * "USDC": { + * "USDC.e": { + * "10": { "targetPct": 8, "thresholdPct": 4 }, + * "137": { "targetPct": 4, "thresholdPct": 2 }, + * "324": { "targetPct": 8, "thresholdPct": 4 }, + * "42161": { "targetPct": 8, "thresholdPct": 4 }, + * "59144": { "targetPct": 5, "thresholdPct": 2 } + * }, + * "USDbC": { + * "8453": { "targetPct": 5, "thresholdPct": 2 } + * }, + * "USDC": { + * "137": { "targetPct": 4, "thresholdPct": 2 } + * } + * } + * } + */ export interface InventoryConfig { - tokenConfig: { - [l1Token: string]: { - [chainId: string]: { - targetOverageBuffer: BigNumber; // The relayer will be allowed to hold this multiple times the targetPct - // of the full token balance on this chain. - targetPct: BigNumber; // The desired amount of the given token on the L2 chainId. - thresholdPct: BigNumber; // Threshold, below which, we will execute a rebalance. - unwrapWethThreshold?: BigNumber; // Threshold for ETH on this chain to trigger WETH unwrapping to maintain - // ETH balance - unwrapWethTarget?: BigNumber; // Amount of WETH to unwrap to refill ETH balance. Unused if unwrapWethThreshold - // is undefined. - }; - }; - }; + // tokenConfig can map to a single token allocation, or a set of allocations that all map to the same HubPool token. + tokenConfig: { [l1Token: string]: ChainTokenConfig } | { [l1Token: string]: ChainTokenInventory }; + // If ETH balance on chain is above threshold, wrap the excess over the target to WETH. wrapEtherTargetPerChain: { [chainId: number]: BigNumber; @@ -25,3 +64,9 @@ export interface InventoryConfig { }; wrapEtherThreshold: BigNumber; } + +export function isAliasConfig(config: ChainTokenConfig | ChainTokenInventory): config is ChainTokenInventory { + return ( + Object.keys(config).every((k) => ethersUtils.isAddress(k)) || Object.keys(config).every((k) => TOKEN_SYMBOLS_MAP[k]) + ); +} diff --git a/src/interfaces/index.ts b/src/interfaces/index.ts index 243985ddc..731e0b93f 100644 --- a/src/interfaces/index.ts +++ b/src/interfaces/index.ts @@ -1,4 +1,5 @@ -import { interfaces } from "@across-protocol/sdk-v2"; +import { BigNumber } from "ethers"; +import { interfaces } from "@across-protocol/sdk"; export * from "./InventoryManagement"; export * from "./SpokePool"; @@ -8,7 +9,16 @@ export * from "./Report"; export * from "./Arweave"; // Bridge interfaces -export type OutstandingTransfers = interfaces.OutstandingTransfers; +export interface OutstandingTransfers { + [address: string]: { + [l1Token: string]: { + [l2Token: string]: { + totalAmount: BigNumber; + depositTxHashes: string[]; + }; + }; + }; +} // Common interfaces export type SortableEvent = interfaces.SortableEvent; diff --git a/src/libexec/RelayerSpokePoolIndexer.ts b/src/libexec/RelayerSpokePoolIndexer.ts index 32c6c4a49..db41dc595 100644 --- a/src/libexec/RelayerSpokePoolIndexer.ts +++ b/src/libexec/RelayerSpokePoolIndexer.ts @@ -2,7 +2,7 @@ import assert from "assert"; import minimist from "minimist"; import { setTimeout } from "node:timers/promises"; import { Contract, Event, EventFilter, providers as ethersProviders, utils as ethersUtils } from "ethers"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import * as utils from "../../scripts/utils"; import { SpokePoolClientMessage } from "../clients"; import { @@ -11,6 +11,7 @@ import { exit, isDefined, getBlockForTimestamp, + getChainQuorum, getDeploymentBlockNumber, getNetworkName, getOriginFromURL, @@ -29,7 +30,6 @@ type EventSearchConfig = sdkUtils.EventSearchConfig; type ScraperOpts = { lookback?: number; // Event lookback (in seconds). finality?: number; // Event finality (in blocks). - quorum?: number; // Provider quorum to apply. deploymentBlock: number; // SpokePool deployment block maxBlockRange?: number; // Maximum block range for paginated getLogs queries. filterArgs?: { [event: string]: string[] }; // Event-specific filter criteria to apply. @@ -209,17 +209,18 @@ async function listen( */ async function run(argv: string[]): Promise { const minimistOpts = { - string: ["relayer"], + string: ["lookback", "relayer"], }; const args = minimist(argv, minimistOpts); - const { chainId, finality = 32, quorum = 1, lookback = 7200, relayer = null, maxBlockRange = 10_000 } = args; + const { chainId, finality = 32, lookback = "5400", relayer = null, maxBlockRange = 10_000 } = args; assert(Number.isInteger(chainId), "chainId must be numeric "); assert(Number.isInteger(finality), "finality must be numeric "); - assert(Number.isInteger(quorum), "quorum must be numeric "); - assert(Number.isInteger(lookback), "lookback must be numeric"); assert(Number.isInteger(maxBlockRange), "maxBlockRange must be numeric"); - assert(!isDefined(relayer) || ethersUtils.isAddress(relayer), "relayer address is invalid"); + assert(!isDefined(relayer) || ethersUtils.isAddress(relayer), `relayer address is invalid (${relayer})`); + + const { quorum = getChainQuorum(chainId) } = args; + assert(Number.isInteger(quorum), "quorum must be numeric "); chain = getNetworkName(chainId); @@ -229,17 +230,24 @@ async function run(argv: string[]): Promise { const latestBlock = await quorumProvider.getBlock("latest"); const deploymentBlock = getDeploymentBlockNumber("SpokePool", chainId); - const startBlock = Math.max( - deploymentBlock, - await getBlockForTimestamp(chainId, latestBlock.timestamp - lookback, blockFinder, cache) - ); - const nBlocks = latestBlock.number - startBlock; + let startBlock: number; + if (/^@[0-9]+$/.test(lookback)) { + // Lookback to a specific block (lookback = @). + startBlock = Number(lookback.slice(1)); + } else { + // Resolve `lookback` seconds from head to a specific block. + assert(Number.isInteger(Number(lookback)), `Invalid lookback (${lookback})`); + startBlock = Math.max( + deploymentBlock, + await getBlockForTimestamp(chainId, latestBlock.timestamp - lookback, blockFinder, cache) + ); + } const opts = { finality, quorum, deploymentBlock, - lookback: nBlocks, + lookback: latestBlock.number - startBlock, maxBlockRange, filterArgs: getEventFilterArgs(relayer), }; @@ -248,12 +256,12 @@ async function run(argv: string[]): Promise { const spokePool = await utils.getSpokePoolContract(chainId); process.on("SIGHUP", () => { - logger.debug({ at: "Relayer#run", message: "Received SIGHUP, stopping..." }); + logger.debug({ at: "Relayer#run", message: `Received SIGHUP in ${chain} listener, stopping...` }); stop = true; }); process.on("disconnect", () => { - logger.debug({ at: "Relayer::run", message: "Parent disconnected, stopping..." }); + logger.debug({ at: "Relayer::run", message: `${chain} parent disconnected, stopping...` }); stop = true; }); @@ -286,6 +294,20 @@ async function run(argv: string[]): Promise { try { providers = getWSProviders(chainId, quorum); assert(providers.length > 0, `Insufficient providers for ${chain} (required ${quorum} by quorum)`); + providers.forEach((provider) => { + provider._websocket.on("error", (err) => + logger.debug({ at: "RelayerSpokePoolIndexer::run", message: `Caught ${chain} provider error.`, err }) + ); + + provider._websocket.on("close", () => { + logger.debug({ + at: "RelayerSpokePoolIndexer::run", + message: `${chain} provider connection closed.`, + provider: getOriginFromURL(provider.connection.url), + }); + }); + }); + logger.debug({ at: "RelayerSpokePoolIndexer::run", message: `Starting ${chain} listener.`, events, opts }); await listen(eventMgr, spokePool, events, providers, opts); } catch (err) { @@ -310,11 +332,12 @@ if (require.main === module) { process.exitCode = NODE_SUCCESS; }) .catch((error) => { - logger.error({ at: "RelayerSpokePoolIndexer", message: "Process exited with error.", error }); + logger.error({ at: "RelayerSpokePoolIndexer", message: `${chain} listener exited with error.`, error }); process.exitCode = NODE_APP_ERR; }) .finally(async () => { await disconnectRedisClients(); + logger.debug({ at: "RelayerSpokePoolIndexer", message: `Exiting ${chain} listener.` }); exit(process.exitCode); }); } diff --git a/src/monitor/Monitor.ts b/src/monitor/Monitor.ts index 752441224..4a7998992 100644 --- a/src/monitor/Monitor.ts +++ b/src/monitor/Monitor.ts @@ -3,12 +3,12 @@ import { spokePoolClientsToProviders } from "../common"; import { BalanceType, BundleAction, + DepositWithBlock, + FillStatus, L1Token, RelayerBalanceReport, RelayerBalanceTable, TokenTransfer, - TransfersByChain, - TransfersByTokens, } from "../interfaces"; import { BigNumber, @@ -18,6 +18,7 @@ import { createFormatFunction, ERC20, ethers, + fillStatusArray, blockExplorerLink, blockExplorerLinks, getEthAddressForChain, @@ -25,19 +26,28 @@ import { getNativeTokenSymbol, getNetworkName, getUnfilledDeposits, + mapAsync, providers, toBN, toBNWei, winston, + TOKEN_SYMBOLS_MAP, + compareAddressesSimple, + CHAIN_IDs, } from "../utils"; import { MonitorClients, updateMonitorClients } from "./MonitorClientHelper"; import { MonitorConfig } from "./MonitorConfig"; import { CombinedRefunds } from "../dataworker/DataworkerUtils"; -export const REBALANCE_FINALIZE_GRACE_PERIOD = 60 * 60 * 4; // 4 hours. +export const REBALANCE_FINALIZE_GRACE_PERIOD = process.env.REBALANCE_FINALIZE_GRACE_PERIOD + ? Number(process.env.REBALANCE_FINALIZE_GRACE_PERIOD) + : 60 * 60; +// 60 minutes, which is the length of the challenge window, so if a rebalance takes longer than this to finalize, +// then its finalizing after the subsequent challenge period has started, which is sub-optimal. + +// bundle frequency. export const ALL_CHAINS_NAME = "All chains"; -export const UNKNOWN_TRANSFERS_NAME = "Unknown transfers (incoming, outgoing, net)"; const ALL_BALANCE_TYPES = [ BalanceType.CURRENT, BalanceType.PENDING, @@ -46,13 +56,6 @@ const ALL_BALANCE_TYPES = [ BalanceType.TOTAL, ]; -interface CategorizedTransfers { - all: TokenTransfer[]; - bond: TokenTransfer[]; - v1: TokenTransfer[]; - other: TokenTransfer[]; -} - type BalanceRequest = { chainId: number; token: string; account: string }; export class Monitor { @@ -177,7 +180,16 @@ export class Monitor { } async reportUnfilledDeposits(): Promise { - const unfilledDeposits = await getUnfilledDeposits(this.clients.spokePoolClients, this.clients.hubPoolClient); + const { hubPoolClient, spokePoolClients } = this.clients; + const unfilledDeposits: Record = Object.fromEntries( + await mapAsync(Object.values(spokePoolClients), async ({ chainId: destinationChainId }) => { + const deposits = getUnfilledDeposits(destinationChainId, spokePoolClients, hubPoolClient).map( + ({ deposit }) => deposit + ); + const fillStatus = await fillStatusArray(spokePoolClients[destinationChainId].spokePool, deposits); + return [destinationChainId, deposits.filter((_, idx) => fillStatus[idx] !== FillStatus.Filled)]; + }) + ); // Group unfilled amounts by chain id and token id. const unfilledAmountByChainAndToken: { [chainId: number]: { [tokenAddress: string]: BigNumber } } = {}; @@ -185,7 +197,7 @@ export class Monitor { const chainId = Number(_destinationChainId); unfilledAmountByChainAndToken[chainId] ??= {}; - deposits.forEach(({ deposit: { outputToken, outputAmount } }) => { + deposits.forEach(({ outputToken, outputAmount }) => { const unfilledAmount = unfilledAmountByChainAndToken[chainId][outputToken] ?? bnZero; unfilledAmountByChainAndToken[chainId][outputToken] = unfilledAmount.add(outputAmount); }); @@ -201,10 +213,19 @@ export class Monitor { const chainId = parseInt(chainIdStr); mrkdwn += `*Destination: ${getNetworkName(chainId)}*\n`; for (const tokenAddress of Object.keys(amountByToken)) { - const tokenInfo = this.clients.hubPoolClient.getL1TokenInfoForL2Token(tokenAddress, chainId); + let symbol: string; + let unfilledAmount: string; + try { + let decimals: number; + ({ symbol, decimals } = this.clients.hubPoolClient.getTokenInfoForAddress(tokenAddress, chainId)); + unfilledAmount = convertFromWei(amountByToken[tokenAddress].toString(), decimals); + } catch { + symbol = tokenAddress; // Using the address helps investigation. + unfilledAmount = amountByToken[tokenAddress].toString(); + } + // Convert to number of tokens for readability. - const unfilledAmount = convertFromWei(amountByToken[tokenAddress].toString(), tokenInfo.decimals); - mrkdwn += `${tokenInfo.symbol}: ${unfilledAmount}\n`; + mrkdwn += `${symbol}: ${unfilledAmount}\n`; } } @@ -215,14 +236,22 @@ export class Monitor { async reportRelayerBalances(): Promise { const relayers = this.monitorConfig.monitoredRelayers; - const allL1Tokens = this.clients.hubPoolClient.getL1Tokens(); + const allL1Tokens = [...this.clients.hubPoolClient.getL1Tokens()]; // @dev deep clone since we modify the + // array below and we don't want to modify the HubPoolClient's version + // @dev Handle special case for L1 USDC which is mapped to two L2 tokens on some chains, so we can more easily + // see L2 Bridged USDC balance versus Native USDC. Add USDC.e right after the USDC element. + const indexOfUsdc = allL1Tokens.findIndex(({ symbol }) => symbol === "USDC"); + allL1Tokens.splice(indexOfUsdc, 0, { + symbol: "USDC.e", + address: TOKEN_SYMBOLS_MAP["USDC.e"].addresses[this.clients.hubPoolClient.chainId], + decimals: 6, + }); const chainIds = this.monitorChains; - const allChainNames = chainIds.map(getNetworkName).concat([ALL_CHAINS_NAME, UNKNOWN_TRANSFERS_NAME]); + const allChainNames = chainIds.map(getNetworkName).concat([ALL_CHAINS_NAME]); const reports = this.initializeBalanceReports(relayers, allL1Tokens, allChainNames); await this.updateCurrentRelayerBalances(reports); await this.updateLatestAndFutureRelayerRefunds(reports); - this.updateUnknownTransfers(reports); for (const relayer of relayers) { const report = reports[relayer]; @@ -266,18 +295,13 @@ export class Monitor { // Update current balances of all tokens on each supported chain for each relayer. async updateCurrentRelayerBalances(relayerBalanceReport: RelayerBalanceReport): Promise { const { hubPoolClient } = this.clients; - const l1Tokens = hubPoolClient.getL1Tokens(); + const _l1Tokens = hubPoolClient.getL1Tokens(); for (const relayer of this.monitorConfig.monitoredRelayers) { for (const chainId of this.monitorChains) { - const l2ToL1Tokens = Object.fromEntries( - l1Tokens - .filter(({ address: l1Token }) => hubPoolClient.l2TokenEnabledForL1Token(l1Token, chainId)) - .map((l1Token) => { - const l2Token = hubPoolClient.getL2TokenForL1TokenAtBlock(l1Token.address, chainId); - return [l2Token, l1Token]; - }) + const l1Tokens = _l1Tokens.filter(({ address: l1Token }) => + hubPoolClient.l2TokenEnabledForL1Token(l1Token, chainId) ); - + const l2ToL1Tokens = this.getL2ToL1TokenMap(l1Tokens, chainId); const l2TokenAddresses = Object.keys(l2ToL1Tokens); const tokenBalances = await this._getBalances( l2TokenAddresses.map((address) => ({ @@ -289,9 +313,23 @@ export class Monitor { for (let i = 0; i < l2TokenAddresses.length; i++) { const tokenInfo = l2ToL1Tokens[l2TokenAddresses[i]]; + let l1TokenSymbol = tokenInfo.symbol; + + // @dev Handle special case for USDC so we can see Bridged USDC and Native USDC balances split out. + // HubChain USDC balance will be grouped with Native USDC balance arbitrarily. + const l2TokenAddress = l2TokenAddresses[i]; + if ( + l1TokenSymbol === "USDC" && + chainId !== hubPoolClient.chainId && + (compareAddressesSimple(TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId], l2TokenAddress) || + compareAddressesSimple(TOKEN_SYMBOLS_MAP["USDbC"].addresses[chainId], l2TokenAddress)) + ) { + l1TokenSymbol = "USDC.e"; + } + this.updateRelayerBalanceTable( relayerBalanceReport[relayer], - tokenInfo.symbol, + l1TokenSymbol, getNetworkName(chainId), BalanceType.CURRENT, tokenBalances[i] @@ -301,6 +339,32 @@ export class Monitor { } } + // Returns a dictionary of L2 token addresses on this chain to their mapped L1 token info. For example, this + // will return a dictionary for Optimism including WETH, WBTC, USDC, USDC.e, USDT entries where the key is + // the token's Optimism address and the value is the equivalent L1 token info. + protected getL2ToL1TokenMap(l1Tokens: L1Token[], chainId: number): { [l2TokenAddress: string]: L1Token } { + return Object.fromEntries( + l1Tokens + .map((l1Token) => { + // @dev l2TokenSymbols is a list of all keys in TOKEN_SYMBOLS_MAP where the hub chain address is equal to the + // l1 token address. + const l2TokenSymbols = Object.entries(TOKEN_SYMBOLS_MAP) + .filter( + ([, { addresses }]) => + addresses[this.clients.hubPoolClient.chainId]?.toLowerCase() === l1Token.address.toLowerCase() + ) + .map(([symbol]) => symbol); + + // Create an entry for all L2 tokens that share a symbol with the L1 token. This includes tokens + // like USDC which has multiple L2 tokens mapped to the same L1 token for a given chain ID. + return l2TokenSymbols + .filter((symbol) => TOKEN_SYMBOLS_MAP[symbol].addresses[chainId] !== undefined) + .map((symbol) => [TOKEN_SYMBOLS_MAP[symbol].addresses[chainId], l1Token]); + }) + .flat() + ); + } + async checkBalances(): Promise { const { monitoredBalances } = this.monitorConfig; const balances = await this._getBalances(monitoredBalances); @@ -509,10 +573,14 @@ export class Monitor { // Again, this would give false negatives for transfers that have been stuck for longer than one bundle if the // current time is within the grace period of last executed bundle. But this is a good trade off for simpler code. const lastFullyExecutedBundleTime = lastFullyExecutedBundle.challengePeriodEndTimestamp; - if ( - lastFullyExecutedBundleTime + REBALANCE_FINALIZE_GRACE_PERIOD > - this.clients.hubPoolClient.hubPool.getCurrentTime() - ) { + const currentTime = Number(await this.clients.hubPoolClient.hubPool.getCurrentTime()); + if (lastFullyExecutedBundleTime + REBALANCE_FINALIZE_GRACE_PERIOD > currentTime) { + this.logger.debug({ + at: "Monitor#checkStuckRebalances", + message: `Within ${REBALANCE_FINALIZE_GRACE_PERIOD / 60}min grace period of last bundle execution`, + lastFullyExecutedBundleTime, + currentTime, + }); return; } @@ -525,11 +593,18 @@ export class Monitor { } const spokePoolAddress = this.clients.spokePoolClients[chainId].spokePool.address; for (const l1Token of allL1Tokens) { - const transferBalance = this.clients.crossChainTransferClient.getOutstandingCrossChainTransferAmount( - spokePoolAddress, - chainId, - l1Token.address - ); + // Outstanding transfers are mapped to either the spoke pool or the hub pool, depending on which + // chain events are queried. Some only allow us to index on the fromAddress, the L1 originator or the + // HubPool, while others only allow us to index on the toAddress, the L2 recipient or the SpokePool. + const transferBalance = this.clients.crossChainTransferClient + .getOutstandingCrossChainTransferAmount(spokePoolAddress, chainId, l1Token.address) + .add( + this.clients.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + this.clients.hubPoolClient.hubPool.address, + chainId, + l1Token.address + ) + ); const outstandingDepositTxs = blockExplorerLinks( this.clients.crossChainTransferClient.getOutstandingCrossChainTransferTxs( spokePoolAddress, @@ -537,6 +612,15 @@ export class Monitor { l1Token.address ), 1 + ).concat( + blockExplorerLinks( + this.clients.crossChainTransferClient.getOutstandingCrossChainTransferTxs( + this.clients.hubPoolClient.hubPool.address, + chainId, + l1Token.address + ), + 1 + ) ); if (transferBalance.gt(0)) { @@ -567,22 +651,51 @@ export class Monitor { for (const refunds of nextBundleRefunds) { for (const relayer of this.monitorConfig.monitoredRelayers) { this.updateRelayerRefunds(refunds, relayerBalanceReport[relayer], relayer, BalanceType.NEXT); - this.updateCrossChainTransfers(relayer, relayerBalanceReport[relayer]); } } + for (const relayer of this.monitorConfig.monitoredRelayers) { + this.updateCrossChainTransfers(relayer, relayerBalanceReport[relayer]); + } } updateCrossChainTransfers(relayer: string, relayerBalanceTable: RelayerBalanceTable): void { const allL1Tokens = this.clients.hubPoolClient.getL1Tokens(); for (const chainId of this.crossChainAdapterSupportedChains) { for (const l1Token of allL1Tokens) { - const transferBalance = this.clients.crossChainTransferClient.getOutstandingCrossChainTransferAmount( - relayer, - chainId, - l1Token.address - ); + // Handle special case for USDC which has multiple L2 tokens we might hold in inventory mapped to a single + // L1 token. + if (l1Token.symbol === "USDC" && chainId !== this.clients.hubPoolClient.chainId) { + const bridgedUsdcAddress = + TOKEN_SYMBOLS_MAP[chainId === CHAIN_IDs.BASE ? "USDbC" : "USDC.e"].addresses[chainId]; + const nativeUsdcAddress = TOKEN_SYMBOLS_MAP["USDC"].addresses[chainId]; + for (const [l2Address, symbol] of [ + [bridgedUsdcAddress, "USDC.e"], + [nativeUsdcAddress, "USDC"], + ]) { + if (l2Address !== undefined) { + const bridgedTransferBalance = + this.clients.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + relayer, + chainId, + l1Token.address, + l2Address + ); + this.updateRelayerBalanceTable( + relayerBalanceTable, + symbol, + getNetworkName(chainId), + BalanceType.PENDING_TRANSFERS, + bridgedTransferBalance + ); + } + } + } else { + const transferBalance = this.clients.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + relayer, + chainId, + l1Token.address + ); - if (transferBalance.gt(bnZero)) { this.updateRelayerBalanceTable( relayerBalanceTable, l1Token.symbol, @@ -595,157 +708,6 @@ export class Monitor { } } - updateUnknownTransfers(relayerBalanceReport: RelayerBalanceReport): void { - const { hubPoolClient, spokePoolClients } = this.clients; - - for (const relayer of this.monitorConfig.monitoredRelayers) { - const report = relayerBalanceReport[relayer]; - const transfersPerChain: TransfersByChain = this.clients.tokenTransferClient.getTokenTransfers(relayer); - - let mrkdwn = ""; - for (const chainId of this.monitorChains) { - const spokePoolClient = spokePoolClients[chainId]; - const transfersPerToken: TransfersByTokens = transfersPerChain[chainId]; - const l2ToL1Tokens = Object.fromEntries( - Object.keys(transfersPerToken).map((l2Token) => [ - l2Token, - hubPoolClient.getL1TokenForL2TokenAtBlock(l2Token, chainId, hubPoolClient.latestBlockSearched), - ]) - ); - - let currentChainMrkdwn = ""; - for (const l2Token of Object.keys(l2ToL1Tokens)) { - let currentTokenMrkdwn = ""; - - const tokenInfo = hubPoolClient.getL1TokenInfoForL2Token(l2Token, chainId); - const transfers = transfersPerToken[l2Token]; - // Skip if there has been no transfers of this token. - if (!transfers) { - continue; - } - - let totalOutgoingAmount = bnZero; - // Filter v2 fills and bond payments from outgoing transfers. - const fillTransactionHashes = spokePoolClient.getFillsForRelayer(relayer).map((fill) => fill.transactionHash); - const outgoingTransfers = this.categorizeUnknownTransfers(transfers.outgoing, fillTransactionHashes); - if (outgoingTransfers.all.length > 0) { - currentTokenMrkdwn += "Outgoing:\n"; - totalOutgoingAmount = totalOutgoingAmount.add(this.getTotalTransferAmount(outgoingTransfers.all)); - currentTokenMrkdwn += this.formatCategorizedTransfers(outgoingTransfers, tokenInfo.decimals, chainId); - } - - let totalIncomingAmount = bnZero; - // Filter v2 refunds and bond repayments from incoming transfers. - const refundTransactionHashes = spokePoolClient - .getRelayerRefundExecutions() - .map((refund) => refund.transactionHash); - const incomingTransfers = this.categorizeUnknownTransfers(transfers.incoming, refundTransactionHashes); - if (incomingTransfers.all.length > 0) { - currentTokenMrkdwn += "Incoming:\n"; - totalIncomingAmount = totalIncomingAmount.add(this.getTotalTransferAmount(incomingTransfers.all)); - currentTokenMrkdwn += this.formatCategorizedTransfers(incomingTransfers, tokenInfo.decimals, chainId); - } - - // Record if there are net outgoing transfers. - const netTransfersAmount = totalIncomingAmount.sub(totalOutgoingAmount); - if (!netTransfersAmount.eq(bnZero)) { - const netAmount = convertFromWei(netTransfersAmount.toString(), tokenInfo.decimals); - currentTokenMrkdwn = `*${tokenInfo.symbol}: Net ${netAmount}*\n` + currentTokenMrkdwn; - currentChainMrkdwn += currentTokenMrkdwn; - - // Report (incoming, outgoing, net) amounts. - this.incrementBalance( - report, - tokenInfo.symbol, - UNKNOWN_TRANSFERS_NAME, - BalanceType.CURRENT, - totalIncomingAmount - ); - this.incrementBalance( - report, - tokenInfo.symbol, - UNKNOWN_TRANSFERS_NAME, - BalanceType.PENDING, - totalOutgoingAmount.mul(-1) - ); - this.incrementBalance( - report, - tokenInfo.symbol, - UNKNOWN_TRANSFERS_NAME, - BalanceType.NEXT, - netTransfersAmount - ); - } - } - - // We only add to the markdown message if there was any unknown transfer for any token on this current chain. - if (currentChainMrkdwn) { - currentChainMrkdwn = `*[${getNetworkName(chainId)}]*\n` + currentChainMrkdwn; - mrkdwn += currentChainMrkdwn + "\n\n"; - } - } - - if (mrkdwn) { - this.logger.info({ - at: "Monitor#updateUnknownTransfers", - message: `Transfers that are not fills for relayer ${relayer} 🦨`, - mrkdwn, - }); - } - } - } - - categorizeUnknownTransfers(transfers: TokenTransfer[], excludeTransactionHashes: string[]): CategorizedTransfers { - // Exclude specified transaction hashes. - const allUnknownOutgoingTransfers = transfers.filter((transfer) => { - return !excludeTransactionHashes.includes(transfer.transactionHash); - }); - - const hubPoolAddress = this.clients.hubPoolClient.hubPool.address; - const v1 = []; - const other = []; - const bond = []; - const v1Addresses = this.monitorConfig.knownV1Addresses; - for (const transfer of allUnknownOutgoingTransfers) { - if (transfer.from === hubPoolAddress || transfer.to === hubPoolAddress) { - bond.push(transfer); - } else if (v1Addresses.includes(transfer.from) || v1Addresses.includes(transfer.to)) { - v1.push(transfer); - } else { - other.push(transfer); - } - } - return { bond, v1, other, all: allUnknownOutgoingTransfers }; - } - - formatCategorizedTransfers(transfers: CategorizedTransfers, decimals: number, chainId: number): string { - let mrkdwn = this.formatKnownTransfers(transfers.bond, decimals, "bond"); - mrkdwn += this.formatKnownTransfers(transfers.v1, decimals, "v1"); - mrkdwn += this.formatOtherTransfers(transfers.other, decimals, chainId); - return mrkdwn + "\n"; - } - - formatKnownTransfers(transfers: TokenTransfer[], decimals: number, transferType: string): string { - if (transfers.length === 0) { - return ""; - } - - const totalAmount = this.getTotalTransferAmount(transfers); - return `${transferType}: ${convertFromWei(totalAmount.toString(), decimals)}\n`; - } - - formatOtherTransfers(transfers: TokenTransfer[], decimals: number, chainId: number): string { - if (transfers.length === 0) { - return ""; - } - - const totalAmount = this.getTotalTransferAmount(transfers); - let mrkdwn = `other: ${convertFromWei(totalAmount.toString(), decimals)}\n`; - const transactionHashes = [...new Set(transfers.map((transfer) => transfer.transactionHash))]; - mrkdwn += blockExplorerLinks(transactionHashes, chainId); - return mrkdwn; - } - getTotalTransferAmount(transfers: TokenTransfer[]): BigNumber { return transfers.map((transfer) => transfer.value).reduce((a, b) => a.add(b)); } @@ -789,14 +751,18 @@ export class Monitor { const totalRefundAmount = fillsToRefund[tokenAddress][relayer]; const tokenInfo = this.clients.hubPoolClient.getL1TokenInfoForL2Token(tokenAddress, chainId); + + let tokenSymbol = tokenInfo.symbol; + if ( + tokenSymbol === "USDC" && + chainId !== this.clients.hubPoolClient.chainId && + (compareAddressesSimple(TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId], tokenAddress) || + compareAddressesSimple(TOKEN_SYMBOLS_MAP["USDbC"].addresses[chainId], tokenAddress)) + ) { + tokenSymbol = "USDC.e"; + } const amount = totalRefundAmount ?? bnZero; - this.updateRelayerBalanceTable( - relayerBalanceTable, - tokenInfo.symbol, - getNetworkName(chainId), - balanceType, - amount - ); + this.updateRelayerBalanceTable(relayerBalanceTable, tokenSymbol, getNetworkName(chainId), balanceType, amount); } } } diff --git a/src/monitor/index.ts b/src/monitor/index.ts index d052e8aa7..9732a8f27 100644 --- a/src/monitor/index.ts +++ b/src/monitor/index.ts @@ -8,14 +8,11 @@ let logger: winston.Logger; export async function runMonitor(_logger: winston.Logger, baseSigner: Signer): Promise { logger = _logger; const config = new MonitorConfig(process.env); - let clients; + const clients = await constructMonitorClients(config, logger, baseSigner); + const acrossMonitor = new Monitor(logger, config, clients); try { logger[startupLogLevel(config)]({ at: "Monitor#index", message: "Monitor started 🔭", config }); - - clients = await constructMonitorClients(config, logger, baseSigner); - const acrossMonitor = new Monitor(logger, config, clients); - for (;;) { const loopStart = Date.now(); @@ -58,7 +55,7 @@ export async function runMonitor(_logger: winston.Logger, baseSigner: Signer): P logger.debug({ at: "Monitor#index", message: "CheckBalances monitor disabled" }); } - await clients.multiCallerClient.executeTransactionQueue(); + await clients.multiCallerClient.executeTxnQueues(); logger.debug({ at: "Monitor#index", message: `Time to loop: ${(Date.now() - loopStart) / 1000}s` }); diff --git a/src/relayer/Relayer.ts b/src/relayer/Relayer.ts index ca79d63c7..945c77fcd 100644 --- a/src/relayer/Relayer.ts +++ b/src/relayer/Relayer.ts @@ -1,10 +1,11 @@ import assert from "assert"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { utils as ethersUtils } from "ethers"; import { FillStatus, L1Token, V3Deposit, V3DepositWithBlock } from "../interfaces"; import { BigNumber, bnZero, + bnUint256Max, RelayerUnfilledDeposit, blockExplorerLink, createFormatFunction, @@ -13,9 +14,9 @@ import { getNetworkName, getUnfilledDeposits, isDefined, - toBNWei, winston, fixedPointAdjustment, + TransactionResponse, } from "../utils"; import { RelayerClients } from "./RelayerClientHelper"; import { RelayerConfig } from "./RelayerConfig"; @@ -26,9 +27,17 @@ const UNPROFITABLE_DEPOSIT_NOTICE_PERIOD = 60 * 60; // 1 hour type RepaymentFee = { paymentChainId: number; lpFeePct: BigNumber }; type BatchLPFees = { [depositKey: string]: RepaymentFee[] }; +type RepaymentChainProfitability = { + gasLimit: BigNumber; + gasCost: BigNumber; + relayerFeePct: BigNumber; + lpFeePct: BigNumber; +}; export class Relayer { public readonly relayerAddress: string; + public readonly fillStatus: { [depositHash: string]: number } = {}; + private pendingTxnReceipts: { [chainId: number]: Promise } = {}; constructor( relayerAddress: string, @@ -36,6 +45,19 @@ export class Relayer { readonly clients: RelayerClients, readonly config: RelayerConfig ) { + Object.values(clients.spokePoolClients).forEach(({ chainId }) => { + if (!isDefined(config.minDepositConfirmations[chainId])) { + const chain = getNetworkName(chainId); + logger.warn({ + at: "Relayer::constructor", + message: `${chain} deposit confirmation configuration is missing.`, + }); + config.minDepositConfirmations[chainId] = [ + { usdThreshold: bnUint256Max, minConfirmations: Number.MAX_SAFE_INTEGER }, + ]; + } + }); + this.relayerAddress = getAddress(relayerAddress); } @@ -47,9 +69,10 @@ export class Relayer { * @returns A boolean indicator determining whether the relayer configuration permits the deposit to be filled. */ filterDeposit({ deposit, version: depositVersion, invalidFills }: RelayerUnfilledDeposit): boolean { - const { depositId, originChainId, destinationChainId, depositor, recipient, inputToken, outputToken } = deposit; - const { acrossApiClient, configStoreClient, hubPoolClient } = this.clients; - const { ignoredAddresses, relayerTokens, acceptInvalidFills } = this.config; + const { depositId, originChainId, destinationChainId, depositor, recipient, inputToken, blockNumber } = deposit; + const { acrossApiClient, configStoreClient, hubPoolClient, profitClient, spokePoolClients } = this.clients; + const { ignoredAddresses, relayerTokens, acceptInvalidFills, minDepositConfirmations } = this.config; + const [srcChain, dstChain] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; // If we don't have the latest code to support this deposit, skip it. if (depositVersion > configStoreClient.configStoreVersion) { @@ -74,16 +97,34 @@ export class Relayer { return false; } + // Ensure that the individual deposit meets the minimum deposit confirmation requirements for its value. + const fillAmountUsd = profitClient.getFillAmountInUsd(deposit); + const { minConfirmations } = minDepositConfirmations[originChainId].find(({ usdThreshold }) => + usdThreshold.gte(fillAmountUsd) + ); + const { latestBlockSearched } = spokePoolClients[originChainId]; + if (latestBlockSearched - blockNumber < minConfirmations) { + this.logger.debug({ + at: "Relayer::evaluateFill", + message: `Skipping ${srcChain} deposit due to insufficient deposit confirmations.`, + depositId, + blockNumber, + confirmations: latestBlockSearched - blockNumber, + minConfirmations, + transactionHash: deposit.transactionHash, + }); + return false; + } + // Skip deposits with quoteTimestamp in the future (impossible to know HubPool utilization => LP fee cannot be computed). if (deposit.quoteTimestamp > hubPoolClient.currentTime) { return false; } if (ignoredAddresses?.includes(getAddress(depositor)) || ignoredAddresses?.includes(getAddress(recipient))) { - const [origin, destination] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; this.logger.debug({ at: "Relayer::filterDeposit", - message: `Ignoring ${origin} deposit destined for ${destination}.`, + message: `Ignoring ${srcChain} deposit destined for ${dstChain}.`, depositor, recipient, transactionHash: deposit.transactionHash, @@ -105,7 +146,7 @@ export class Relayer { } // It would be preferable to use host time since it's more reliably up-to-date, but this creates issues in test. - const currentTime = this.clients.spokePoolClients[destinationChainId].getCurrentTime(); + const currentTime = spokePoolClients[destinationChainId].getCurrentTime(); if (deposit.fillDeadline <= currentTime) { return false; } @@ -114,8 +155,8 @@ export class Relayer { return false; } - if (!hubPoolClient.areTokensEquivalent(inputToken, originChainId, outputToken, destinationChainId)) { - this.logger.warn({ + if (!this.clients.inventoryClient.validateOutputToken(deposit)) { + this.logger[this.config.sendingRelaysEnabled ? "warn" : "debug"]({ at: "Relayer::filterDeposit", message: "Skipping deposit including in-protocol token swap.", deposit, @@ -177,20 +218,21 @@ export class Relayer { * not to support. * @returns An array of filtered RelayerUnfilledDeposit objects. */ - private async _getUnfilledDeposits(): Promise> { + private _getUnfilledDeposits(): Record { const { hubPoolClient, spokePoolClients } = this.clients; + const { relayerDestinationChains } = this.config; - const unfilledDeposits = await getUnfilledDeposits(spokePoolClients, hubPoolClient, this.logger); - - // Filter the resulting unfilled deposits according to relayer configuration. - Object.keys(unfilledDeposits).forEach((_destinationChainId) => { - const destinationChainId = Number(_destinationChainId); - unfilledDeposits[destinationChainId] = unfilledDeposits[destinationChainId].filter((deposit) => - this.filterDeposit(deposit) - ); - }); - - return unfilledDeposits; + // Filter the resulting deposits according to relayer configuration. + return Object.fromEntries( + Object.values(spokePoolClients) + .filter(({ chainId }) => relayerDestinationChains?.includes(chainId) ?? true) + .map(({ chainId: destinationChainId }) => [ + destinationChainId, + getUnfilledDeposits(destinationChainId, spokePoolClients, hubPoolClient, this.fillStatus).filter((deposit) => + this.filterDeposit(deposit) + ), + ]) + ); } /** @@ -213,41 +255,41 @@ export class Relayer { return true; } - computeRequiredDepositConfirmations(deposits: V3Deposit[]): { [chainId: number]: number } { - const { profitClient } = this.clients; + computeRequiredDepositConfirmations( + deposits: V3Deposit[], + destinationChainId: number + ): { [chainId: number]: number } { + const { profitClient, tokenClient } = this.clients; const { minDepositConfirmations } = this.config; // Sum the total unfilled deposit amount per origin chain and set a MDC for that chain. - const unfilledDepositAmountsPerChain: { [chainId: number]: BigNumber } = deposits.reduce((agg, deposit) => { - const unfilledAmountUsd = profitClient.getFillAmountInUsd(deposit, deposit.outputAmount); - agg[deposit.originChainId] = (agg[deposit.originChainId] ?? bnZero).add(unfilledAmountUsd); - return agg; - }, {}); - - // Sort thresholds in ascending order. - const minimumDepositConfirmationThresholds = Object.keys(minDepositConfirmations) - .filter((x) => x !== "default") - .sort((x, y) => Number(x) - Number(y)); + // Filter out deposits where the relayer doesn't have the balance to make the fill. + const unfilledDepositAmountsPerChain: { [chainId: number]: BigNumber } = deposits + .filter((deposit) => tokenClient.hasBalanceForFill(deposit)) + .reduce((agg, deposit) => { + const unfilledAmountUsd = profitClient.getFillAmountInUsd(deposit); + agg[deposit.originChainId] = (agg[deposit.originChainId] ?? bnZero).add(unfilledAmountUsd); + return agg; + }, {}); // Set the MDC for each origin chain equal to lowest threshold greater than the unfilled USD deposit amount. - // If we can't find a threshold greater than the USD amount, then use the default. const mdcPerChain = Object.fromEntries( Object.entries(unfilledDepositAmountsPerChain).map(([chainId, unfilledAmount]) => { - const usdThreshold = minimumDepositConfirmationThresholds.find( - (usdThreshold) => - toBNWei(usdThreshold).gte(unfilledAmount) && isDefined(minDepositConfirmations[usdThreshold][chainId]) + const { minConfirmations } = minDepositConfirmations[chainId].find(({ usdThreshold }) => + usdThreshold.gte(unfilledAmount) ); // If no thresholds are greater than unfilled amount, then use fallback which should have largest MDCs. - return [chainId, minDepositConfirmations[usdThreshold ?? "default"][chainId]]; + return [chainId, minConfirmations]; }) ); + + const dstChain = getNetworkName(destinationChainId); this.logger.debug({ at: "Relayer::computeRequiredDepositConfirmations", - message: "Setting minimum deposit confirmation based on origin chain aggregate deposit amount", + message: `Setting minimum ${dstChain} deposit confirmation based on origin chain aggregate deposit amount.`, unfilledDepositAmountsPerChain, mdcPerChain, - minDepositConfirmations, }); return mdcPerChain; @@ -302,13 +344,12 @@ export class Relayer { const l1Token = hubPoolClient.getL1TokenInfoForL2Token(inputToken, originChainId); const selfRelay = [depositor, recipient].every((address) => address === this.relayerAddress); if (tokenClient.hasBalanceForFill(deposit) && !selfRelay) { - const { - repaymentChainId, - realizedLpFeePct, - relayerFeePct, - gasLimit: _gasLimit, - gasCost, - } = await this.resolveRepaymentChain(deposit, l1Token, lpFees); + const { repaymentChainId, repaymentChainProfitability } = await this.resolveRepaymentChain( + deposit, + l1Token, + lpFees + ); + const { relayerFeePct, gasCost, gasLimit: _gasLimit, lpFeePct: realizedLpFeePct } = repaymentChainProfitability; if (isDefined(repaymentChainId)) { const gasLimit = isMessageEmpty(resolveDepositMessage(deposit)) ? undefined : _gasLimit; this.fillRelay(deposit, repaymentChainId, realizedLpFeePct, gasLimit); @@ -319,15 +360,11 @@ export class Relayer { profitClient.captureUnprofitableFill(deposit, realizedLpFeePct, relayerFeePct, gasCost); } } else if (selfRelay) { - const { realizedLpFeePct } = await hubPoolClient.computeRealizedLpFeePct({ - ...deposit, - paymentChainId: destinationChainId, - }); - // A relayer can fill its own deposit without an ERC20 transfer. Only bypass profitability requirements if the // relayer is both the depositor and the recipient, because a deposit on a cheap SpokePool chain could cause // expensive fills on (for example) mainnet. - this.fillRelay(deposit, destinationChainId, realizedLpFeePct); + const { lpFeePct } = lpFees.find((lpFee) => lpFee.paymentChainId === destinationChainId); + this.fillRelay(deposit, destinationChainId, lpFeePct); } else { // TokenClient.getBalance returns that we don't have enough balance to submit the fast fill. // At this point, capture the shortfall so that the inventory manager can rebalance the token inventory. @@ -407,21 +444,42 @@ export class Relayer { return lpFees; } + protected async executeFills(chainId: number, simulate = false): Promise { + const { + pendingTxnReceipts, + clients: { multiCallerClient }, + } = this; + + if (isDefined(pendingTxnReceipts[chainId])) { + this.logger.info({ + at: "Relayer::executeFills", + message: `${getNetworkName(chainId)} transaction queue has pending fills; skipping...`, + }); + multiCallerClient.clearTransactionQueue(chainId); + return []; + } + pendingTxnReceipts[chainId] = multiCallerClient.executeTxnQueue(chainId, simulate); + const txnReceipts = await pendingTxnReceipts[chainId]; + delete pendingTxnReceipts[chainId]; + + return txnReceipts.map(({ hash }) => hash); + } + async checkForUnfilledDepositsAndFill( sendSlowRelays = true, simulate = false - ): Promise<{ [chainId: number]: string[] }> { + ): Promise<{ [chainId: number]: Promise }> { const { profitClient, spokePoolClients, tokenClient, multiCallerClient } = this.clients; // Flush any pre-existing enqueued transactions that might not have been executed. multiCallerClient.clearTransactionQueue(); - const txnReceipts: { [chainId: number]: string[] } = Object.fromEntries( + const txnReceipts: { [chainId: number]: Promise } = Object.fromEntries( Object.values(spokePoolClients).map(({ chainId }) => [chainId, []]) ); // Fetch unfilled deposits and filter out deposits upfront before we compute the minimum deposit confirmation // per chain, which is based on the deposit volume we could fill. - const unfilledDeposits = await this._getUnfilledDeposits(); + const unfilledDeposits = this._getUnfilledDeposits(); const allUnfilledDeposits = Object.values(unfilledDeposits) .flat() .map(({ deposit }) => deposit); @@ -434,31 +492,36 @@ export class Relayer { return txnReceipts; } - const mdcPerChain = this.computeRequiredDepositConfirmations(allUnfilledDeposits); - const maxBlockNumbers = Object.fromEntries( - Object.values(spokePoolClients).map(({ chainId, latestBlockSearched }) => [ - chainId, - latestBlockSearched - mdcPerChain[chainId], - ]) - ); - const lpFees = await this.batchComputeLpFees(allUnfilledDeposits); - await sdkUtils.forEachAsync(Object.entries(unfilledDeposits), async ([chainId, unfilledDeposits]) => { - if (unfilledDeposits.length === 0) { + await sdkUtils.forEachAsync(Object.entries(unfilledDeposits), async ([chainId, _deposits]) => { + if (_deposits.length === 0) { return; } - await this.evaluateFills( - unfilledDeposits.map(({ deposit, fillStatus }) => ({ ...deposit, fillStatus })), - lpFees, - maxBlockNumbers, - sendSlowRelays + const destinationChainId = Number(chainId); + const deposits = _deposits.map(({ deposit }) => deposit); + const fillStatus = await sdkUtils.fillStatusArray(spokePoolClients[destinationChainId].spokePool, deposits); + + const unfilledDeposits = deposits + .map((deposit, idx) => ({ ...deposit, fillStatus: fillStatus[idx] })) + .filter(({ fillStatus, ...deposit }) => { + // Track the fill status for faster filtering on subsequent loops. + const depositHash = spokePoolClients[deposit.destinationChainId].getDepositHash(deposit); + this.fillStatus[depositHash] = fillStatus; + return fillStatus !== FillStatus.Filled; + }); + + const mdcPerChain = this.computeRequiredDepositConfirmations(unfilledDeposits, destinationChainId); + const maxBlockNumbers = Object.fromEntries( + Object.values(spokePoolClients).map(({ chainId, latestBlockSearched }) => [ + chainId, + latestBlockSearched - mdcPerChain[chainId], + ]) ); + await this.evaluateFills(unfilledDeposits, lpFees, maxBlockNumbers, sendSlowRelays); - const destinationChainId = Number(chainId); if (multiCallerClient.getQueuedTransactions(destinationChainId).length > 0) { - const receipts = await multiCallerClient.executeTxnQueues(simulate, [destinationChainId]); - txnReceipts[destinationChainId] = receipts[destinationChainId]; + txnReceipts[destinationChainId] = this.executeFills(destinationChainId, simulate); } }); @@ -521,7 +584,7 @@ export class Relayer { const { spokePoolClients, multiCallerClient } = this.clients; this.logger.debug({ at: "Relayer::fillRelay", - message: "Filling v3 deposit.", + message: `Filling v3 deposit ${deposit.depositId} with repayment on ${repaymentChainId}.`, deposit, repaymentChainId, realizedLpFeePct, @@ -549,16 +612,23 @@ export class Relayer { multiCallerClient.enqueueTransaction({ contract, chainId, method, args, gasLimit, message, mrkdwn }); } + /** + * @notice Returns repayment chain choice for deposit given repayment fees and the hubPoolToken associated with the + * deposit inputToken. + * @param deposit + * @param hubPoolToken L1 token object associated with the deposit inputToken. + * @param repaymentFees + * @returns repaymentChainId is defined if and only if a profitable repayment chain is found. + * @returns repaymentChainProfitability contains the profitability data of the repaymentChainId if it is defined + * or the profitability data of the most preferred repayment chain otherwise. + */ protected async resolveRepaymentChain( deposit: V3DepositWithBlock, hubPoolToken: L1Token, repaymentFees: RepaymentFee[] ): Promise<{ - gasLimit: BigNumber; repaymentChainId?: number; - realizedLpFeePct: BigNumber; - relayerFeePct: BigNumber; - gasCost: BigNumber; + repaymentChainProfitability: RepaymentChainProfitability; }> { const { inventoryClient, profitClient } = this.clients; const { depositId, originChainId, destinationChainId, inputAmount, outputAmount, transactionHash } = deposit; @@ -566,54 +636,128 @@ export class Relayer { const destinationChain = getNetworkName(destinationChainId); const start = performance.now(); - const preferredChainId = await inventoryClient.determineRefundChainId(deposit, hubPoolToken.address); + const preferredChainIds = await inventoryClient.determineRefundChainId(deposit, hubPoolToken.address); + assert(preferredChainIds.length > 0, `No preferred repayment chains found for deposit ${depositId}.`); this.logger.debug({ at: "Relayer::resolveRepaymentChain", - message: `Determined preferred repayment chain ${preferredChainId} for deposit from ${originChain} to ${destinationChain} in ${ + message: `Determined eligible repayment chains ${JSON.stringify( + preferredChainIds + )} for deposit ${depositId} from ${originChain} to ${destinationChain} in ${ Math.round(performance.now() - start) / 1000 }s.`, }); - const repaymentFee = repaymentFees?.find(({ paymentChainId }) => paymentChainId === preferredChainId); - assert(isDefined(repaymentFee)); - const { lpFeePct } = repaymentFee; + const _repaymentFees = preferredChainIds.map((chainId) => + repaymentFees.find(({ paymentChainId }) => paymentChainId === chainId) + ); + const lpFeePcts = _repaymentFees.map(({ lpFeePct }) => lpFeePct); + + // For each eligible repayment chain, compute profitability and pick the one that is profitable. If none are + // profitable, then finally check the destination chain even if its not a preferred repayment chain. The idea + // here is that depositors are receiving quoted lp fees from the API that assumes repayment on the destination + // chain, so we should honor all repayments on the destination chain if it's profitable, even if it doesn't + // fit within our inventory management. + + const getRepaymentChainProfitability = async ( + preferredChainId: number, + lpFeePct: BigNumber + ): Promise<{ profitable: boolean; gasLimit: BigNumber; gasCost: BigNumber; relayerFeePct: BigNumber }> => { + const { + profitable, + nativeGasCost: gasLimit, + tokenGasCost: gasCost, + netRelayerFeePct: relayerFeePct, // net relayer fee is equal to total fee minus the lp fee. + } = await profitClient.isFillProfitable(deposit, lpFeePct, hubPoolToken, preferredChainId); + return { + profitable, + gasLimit, + gasCost, + relayerFeePct, + }; + }; - const { - profitable, - nativeGasCost: gasLimit, - tokenGasCost: gasCost, - grossRelayerFeePct: relayerFeePct, // gross relayer fee is equal to total fee minus the lp fee. - } = await profitClient.isFillProfitable(deposit, lpFeePct, hubPoolToken); - // If preferred chain is different from the destination chain and the preferred chain - // is not profitable, then check if the destination chain is profitable. + const repaymentChainProfitabilities = await Promise.all( + preferredChainIds.map(async (preferredChainId, i) => { + const lpFeePct = lpFeePcts[i]; + assert(isDefined(lpFeePct), `Missing lp fee pct for chain potential repayment chain ${preferredChainId}`); + return getRepaymentChainProfitability(preferredChainId, lpFeePcts[i]); + }) + ); + const profitableRepaymentChainIds = preferredChainIds.filter((_, i) => repaymentChainProfitabilities[i].profitable); + + // @dev preferredChainId will not be defined until a chain is found to be profitable. + let preferredChain: number | undefined = undefined; + + // @dev The following internal function should be the only one used to set `preferredChain` above. + const getProfitabilityDataForPreferredChainIndex = (preferredChainIndex: number): RepaymentChainProfitability => { + const lpFeePct = lpFeePcts[preferredChainIndex]; + const { gasLimit, gasCost, relayerFeePct } = repaymentChainProfitabilities[preferredChainIndex]; + return { + gasLimit, + gasCost, + relayerFeePct, + lpFeePct, + }; + }; + let profitabilityData: RepaymentChainProfitability = getProfitabilityDataForPreferredChainIndex(0); + + // If there are any profitable repayment chains, then set preferred chain to the first one since the preferred + // chains are given to us by the InventoryClient sorted in priority order. + + if (profitableRepaymentChainIds.length > 0) { + preferredChain = profitableRepaymentChainIds[0]; + const preferredChainIndex = preferredChainIds.indexOf(preferredChain); + profitabilityData = getProfitabilityDataForPreferredChainIndex(preferredChainIndex); + this.logger.debug({ + at: "Relayer::resolveRepaymentChain", + message: `Selected preferred repayment chain ${preferredChain} for deposit ${depositId}, #${ + preferredChainIndex + 1 + } in eligible chains ${JSON.stringify(preferredChainIds)} list.`, + profitableRepaymentChainIds, + }); + } + + // If none of the preferred chains are profitable and they also don't include the destination chain, + // then check if the destination chain is profitable. // This assumes that the depositor is getting quotes from the /suggested-fees endpoint - // in the frontend-v2 repo which assumes that repayment is the destination chain. If this is profitable, then + // in the frontend repo which assumes that repayment is the destination chain. If this is profitable, then // go ahead and use the preferred chain as repayment and log the lp fee delta. This is a temporary solution // so that depositors can continue to quote lp fees assuming repayment is on the destination chain until - // we come up with a smarter profitability check. - if (!profitable && preferredChainId !== destinationChainId) { + // we come up with a smarter fee quoting algorithm that takes into account relayer inventory management more + // accurately. + if (!isDefined(preferredChain) && !preferredChainIds.includes(destinationChainId)) { this.logger.debug({ at: "Relayer::resolveRepaymentChain", - message: `Preferred chain ${preferredChainId} is not profitable. Checking destination chain ${destinationChainId} profitability.`, + message: `Preferred chains ${JSON.stringify( + preferredChainIds + )} are not profitable. Checking destination chain ${destinationChainId} profitability.`, deposit: { originChain, depositId, destinationChain, transactionHash }, }); + // Evaluate destination chain profitability to see if we can reset preferred chain. const { lpFeePct: destinationChainLpFeePct } = repaymentFees.find( ({ paymentChainId }) => paymentChainId === destinationChainId ); - assert(isDefined(lpFeePct)); - + assert(isDefined(destinationChainLpFeePct)); const fallbackProfitability = await profitClient.isFillProfitable( deposit, destinationChainLpFeePct, - hubPoolToken + hubPoolToken, + destinationChainId ); + + // If destination chain is profitable, then use the top preferred chain as a favor to the depositor + // but log that we might be taking a loss. This is to not penalize an honest depositor who set their + // fees according to the API that assumes destination chain repayment. if (fallbackProfitability.profitable) { + preferredChain = preferredChainIds[0]; + const deltaRelayerFee = profitabilityData.relayerFeePct.sub(fallbackProfitability.netRelayerFeePct); // This is the delta in the gross relayer fee. If negative, then the destination chain would have had a higher // gross relayer fee, and therefore represents a virtual loss to the relayer. However, the relayer is // maintaining its inventory allocation by sticking to its preferred repayment chain. - const deltaRelayerFee = relayerFeePct.sub(fallbackProfitability.grossRelayerFeePct); this.logger[this.config.sendingRelaysEnabled ? "info" : "debug"]({ at: "Relayer::resolveRepaymentChain", - message: `🦦 Taking repayment for filling deposit ${depositId} on preferred chain ${preferredChainId} is unprofitable but taking repayment on destination chain ${destinationChainId} is profitable. Electing to take repayment on preferred chain as favor to depositor who assumed repayment on destination chain in their quote. Delta in gross relayer fee: ${formatFeePct( + message: `🦦 Taking repayment for filling deposit ${depositId} on preferred chains ${JSON.stringify( + preferredChainIds + )} is unprofitable but taking repayment on destination chain ${destinationChainId} is profitable. Electing to take repayment on top preferred chain ${preferredChain} as favor to depositor who assumed repayment on destination chain in their quote. Delta in net relayer fee: ${formatFeePct( deltaRelayerFee )}%`, deposit: { @@ -622,33 +766,24 @@ export class Relayer { token: hubPoolToken.symbol, txnHash: blockExplorerLink(transactionHash, originChainId), }, - preferredChain: getNetworkName(preferredChainId), - preferredChainLpFeePct: `${formatFeePct(lpFeePct)}%`, + preferredChain: getNetworkName(preferredChain), + preferredChainLpFeePct: `${formatFeePct(profitabilityData.lpFeePct)}%`, destinationChainLpFeePct: `${formatFeePct(destinationChainLpFeePct)}%`, // The delta will cut into the gross relayer fee. If negative, then taking the repayment on destination chain // would have been more profitable to the relayer because the lp fee would have been lower. - deltaLpFeePct: `${formatFeePct(destinationChainLpFeePct.sub(lpFeePct))}%`, + deltaLpFeePct: `${formatFeePct(destinationChainLpFeePct.sub(profitabilityData.lpFeePct))}%`, // relayer fee is the gross relayer fee using the destination chain lp fee: inputAmount - outputAmount - lpFee. - preferredChainRelayerFeePct: `${formatFeePct(relayerFeePct)}%`, - destinationChainRelayerFeePct: `${formatFeePct(fallbackProfitability.grossRelayerFeePct)}%`, + preferredChainRelayerFeePct: `${formatFeePct(profitabilityData.relayerFeePct)}%`, + destinationChainRelayerFeePct: `${formatFeePct(fallbackProfitability.netRelayerFeePct)}%`, deltaRelayerFee: `${formatFeePct(deltaRelayerFee)}%`, }); - - // We've checked that the user set the output amount honestly and assumed that the payment would be on - // destination chain, therefore we will fill them using the original preferred chain to maintain - // inventory assumptions and also quote the original relayer fee pct. - return { - repaymentChainId: preferredChainId, - realizedLpFeePct: lpFeePct, - relayerFeePct, - gasCost, - gasLimit, - }; } else { // If preferred chain is not profitable and neither is fallback, then return the original profitability result. this.logger.debug({ at: "Relayer::resolveRepaymentChain", - message: `Taking repayment on destination chain ${destinationChainId} would also not be profitable.`, + message: `Taking repayment for deposit ${depositId} with preferred chains ${JSON.stringify( + preferredChainIds + )} on destination chain ${destinationChainId} would also not be profitable.`, deposit: { originChain, depositId, @@ -658,37 +793,18 @@ export class Relayer { inputAmount, outputAmount, }, - preferredChain: getNetworkName(preferredChainId), - preferredChainLpFeePct: `${formatFeePct(lpFeePct)}%`, + preferredChain: getNetworkName(preferredChainIds[0]), + preferredChainLpFeePct: `${formatFeePct(profitabilityData.lpFeePct)}%`, destinationChainLpFeePct: `${formatFeePct(destinationChainLpFeePct)}%`, - preferredChainRelayerFeePct: `${formatFeePct(relayerFeePct)}%`, - destinationChainRelayerFeePct: `${formatFeePct(fallbackProfitability.grossRelayerFeePct)}%`, + preferredChainRelayerFeePct: `${formatFeePct(profitabilityData.relayerFeePct)}%`, + destinationChainRelayerFeePct: `${formatFeePct(fallbackProfitability.netRelayerFeePct)}%`, }); } } - this.logger.debug({ - at: "Relayer::resolveRepaymentChain", - message: `Preferred chain ${preferredChainId} is${profitable ? "" : " not"} profitable.`, - deposit: { - originChain, - depositId, - destinationChain, - transactionHash, - token: hubPoolToken.symbol, - inputAmount, - outputAmount, - }, - preferredChainLpFeePct: `${formatFeePct(lpFeePct)}%`, - preferredChainRelayerFeePct: `${formatFeePct(relayerFeePct)}%`, - }); - return { - repaymentChainId: profitable ? preferredChainId : undefined, - realizedLpFeePct: lpFeePct, - relayerFeePct, - gasCost, - gasLimit, + repaymentChainProfitability: profitabilityData, + repaymentChainId: preferredChain, }; } @@ -700,16 +816,17 @@ export class Relayer { const chainId = Number(_chainId); mrkdwn += `*Shortfall on ${getNetworkName(chainId)}:*\n`; Object.entries(shortfallForChain).forEach(([token, { shortfall, balance, needed, deposits }]) => { - const { symbol, decimals } = this.clients.hubPoolClient.getTokenInfo(chainId, token); - const formatter = createFormatFunction(2, 4, false, decimals); + const { symbol, formatter } = this.formatAmount(chainId, token); let crossChainLog = ""; if (this.clients.inventoryClient.isInventoryManagementEnabled() && chainId !== 1) { - const l1Token = this.clients.hubPoolClient.getL1TokenInfoForL2Token(token, chainId); + // Shortfalls are mapped to deposit output tokens so look up output token in token symbol map. + const l1Token = this.clients.hubPoolClient.getL1TokenInfoForAddress(token, chainId); crossChainLog = "There is " + formatter( this.clients.inventoryClient.crossChainTransferClient - .getOutstandingCrossChainTransferAmount(this.relayerAddress, chainId, l1Token.address) + .getOutstandingCrossChainTransferAmount(this.relayerAddress, chainId, l1Token.address, token) + // TODO: Add in additional l2Token param here once we can specify it .toString() ) + ` inbound L1->L2 ${symbol} transfers. `; @@ -730,15 +847,18 @@ export class Relayer { }); } + private formatAmount( + chainId: number, + tokenAddress: string + ): { symbol: string; decimals: number; formatter: (amount: string) => string } { + const { symbol, decimals } = this.clients.hubPoolClient.getTokenInfoForAddress(tokenAddress, chainId); + return { symbol, decimals, formatter: createFormatFunction(2, 4, false, decimals) }; + } + private handleUnprofitableFill() { - const { hubPoolClient, profitClient } = this.clients; + const { profitClient } = this.clients; const unprofitableDeposits = profitClient.getUnprofitableFills(); - const formatAmount = (chainId: number, token: string, amount: BigNumber): { symbol: string; amount: string } => { - const { symbol, decimals } = hubPoolClient.getL1TokenInfoForL2Token(token, chainId); - return { symbol, amount: createFormatFunction(2, 4, false, decimals)(amount.toString()) }; - }; - let mrkdwn = ""; Object.keys(unprofitableDeposits).forEach((chainId) => { let depositMrkdwn = ""; @@ -752,17 +872,10 @@ export class Relayer { const { originChainId, destinationChainId, inputToken, outputToken, inputAmount, outputAmount } = deposit; const depositblockExplorerLink = blockExplorerLink(deposit.transactionHash, originChainId); - - const { symbol: inputSymbol, amount: formattedInputAmount } = formatAmount( - originChainId, - inputToken, - inputAmount - ); - const { symbol: outputSymbol, amount: formattedOutputAmount } = formatAmount( - destinationChainId, - outputToken, - outputAmount - ); + const { symbol: inputSymbol, formatter: inputFormatter } = this.formatAmount(originChainId, inputToken); + const formattedInputAmount = inputFormatter(inputAmount.toString()); + const { symbol: outputSymbol, formatter: outputFormatter } = this.formatAmount(destinationChainId, outputToken); + const formattedOutputAmount = outputFormatter(outputAmount.toString()); const { symbol: gasTokenSymbol, decimals: gasTokenDecimals } = profitClient.resolveGasToken(destinationChainId); const formattedGasCost = createFormatFunction(2, 10, false, gasTokenDecimals)(gasCost.toString()); @@ -828,7 +941,7 @@ export class Relayer { .div(deposit.inputAmount); const totalFeePct = formatFeePct(_totalFeePct); const { symbol: outputTokenSymbol, decimals: outputTokenDecimals } = - this.clients.hubPoolClient.getTokenInfoForDeposit(deposit); + this.clients.hubPoolClient.getTokenInfoForAddress(deposit.outputToken, deposit.destinationChainId); const _outputAmount = createFormatFunction(2, 4, false, outputTokenDecimals)(deposit.outputAmount.toString()); msg += ` and output ${_outputAmount} ${outputTokenSymbol}, with depositor ${depositor}.` + diff --git a/src/relayer/RelayerClientHelper.ts b/src/relayer/RelayerClientHelper.ts index 2ff40ad28..aba79e2fe 100644 --- a/src/relayer/RelayerClientHelper.ts +++ b/src/relayer/RelayerClientHelper.ts @@ -1,6 +1,4 @@ -import { ChildProcess } from "child_process"; -import { Contract } from "ethers"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import winston from "winston"; import { AcrossApiClient, @@ -8,27 +6,20 @@ import { HubPoolClient, InventoryClient, ProfitClient, - IndexedSpokePoolClient, TokenClient, } from "../clients"; +import { IndexedSpokePoolClient, IndexerOpts } from "../clients/SpokePoolClient"; import { AdapterManager, CrossChainTransferClient } from "../clients/bridges"; import { - CONTRACT_ADDRESSES, Clients, constructClients, constructSpokePoolClientsWithLookback, + resolveSpokePoolActivationBlock, updateClients, updateSpokePoolClients, } from "../common"; import { SpokePoolClientsByChain } from "../interfaces"; -import { - getBlockForTimestamp, - getDeploymentBlockNumber, - getProvider, - getRedisCache, - Signer, - SpokePool, -} from "../utils"; +import { getBlockForTimestamp, getCurrentTime, getProvider, getRedisCache, Signer, SpokePool } from "../utils"; import { RelayerConfig } from "./RelayerConfig"; export interface RelayerClients extends Clients { @@ -43,34 +34,30 @@ async function indexedSpokePoolClient( baseSigner: Signer, hubPoolClient: HubPoolClient, chainId: number, - worker: ChildProcess + opts: IndexerOpts & { lookback: number; blockRange: number } ): Promise { const { logger } = hubPoolClient; // Set up Spoke signers and connect them to spoke pool contract objects. const signer = baseSigner.connect(await getProvider(chainId)); + const spokePoolAddr = hubPoolClient.getSpokePoolForBlock(chainId); const blockFinder = undefined; const redis = await getRedisCache(hubPoolClient.logger); - - const spokePoolAddr = hubPoolClient.getSpokePoolForBlock(chainId, hubPoolClient.latestBlockSearched); - const spokePool = new Contract(spokePoolAddr, SpokePool.abi, signer); - const spokePoolActivationBlock = hubPoolClient.getSpokePoolActivationBlock(chainId, spokePoolAddr); - const time = (await hubPoolClient.hubPool.provider.getBlock(spokePoolActivationBlock)).timestamp; - - // Improve BlockFinder efficiency by clamping its search space lower bound to the SpokePool deployment block. - const hints = { lowBlock: getDeploymentBlockNumber("SpokePool", chainId) }; - const registrationBlock = await getBlockForTimestamp(chainId, time, blockFinder, redis, hints); + const [activationBlock, fromBlock] = await Promise.all([ + resolveSpokePoolActivationBlock(chainId, hubPoolClient), + getBlockForTimestamp(chainId, getCurrentTime() - opts.lookback, blockFinder, redis), + ]); const spokePoolClient = new IndexedSpokePoolClient( logger, - spokePool, + SpokePool.connect(spokePoolAddr, signer), hubPoolClient, chainId, - registrationBlock, - worker + activationBlock, + { fromBlock, maxBlockLookBack: opts.blockRange }, + opts ); - spokePoolClient.init(); return spokePoolClient; } @@ -78,8 +65,7 @@ async function indexedSpokePoolClient( export async function constructRelayerClients( logger: winston.Logger, config: RelayerConfig, - baseSigner: Signer, - workers: { [chainId: number]: ChildProcess } + baseSigner: Signer ): Promise { const signerAddr = await baseSigner.getAddress(); // The relayer only uses the HubPoolClient to query repayments refunds for the latest validated @@ -102,10 +88,15 @@ export async function constructRelayerClients( let spokePoolClients: SpokePoolClientsByChain; if (config.externalIndexer) { spokePoolClients = Object.fromEntries( - await sdkUtils.mapAsync(enabledChains ?? configStoreClient.getEnabledChains(), async (chainId) => [ - chainId, - await indexedSpokePoolClient(baseSigner, hubPoolClient, chainId, workers[chainId]), - ]) + await sdkUtils.mapAsync(enabledChains ?? configStoreClient.getEnabledChains(), async (chainId) => { + const finality = config.minDepositConfirmations[chainId].at(0)?.minConfirmations ?? 1024; + const opts = { + finality, + lookback: config.maxRelayerLookBack, + blockRange: config.maxBlockLookBack[chainId], + }; + return [chainId, await indexedSpokePoolClient(baseSigner, hubPoolClient, chainId, opts)]; + }) ); } else { spokePoolClients = await constructSpokePoolClientsWithLookback( @@ -148,10 +139,7 @@ export async function constructRelayerClients( ); await profitClient.update(); - // The relayer will originate cross chain rebalances from both its own EOA address and the atomic depositor address - // so we should track both for accurate cross-chain inventory management. - const atomicDepositor = CONTRACT_ADDRESSES[hubPoolClient.chainId]?.atomicDepositor; - const monitoredAddresses = [signerAddr, atomicDepositor?.address]; + const monitoredAddresses = [signerAddr]; const adapterManager = new AdapterManager( logger, spokePoolClients, @@ -197,7 +185,7 @@ export async function updateRelayerClients(clients: RelayerClients, config: Rela // TODO: the code below can be refined by grouping with promise.all. however you need to consider the inter // dependencies of the clients. some clients need to be updated before others. when doing this refactor consider // having a "first run" update and then a "normal" update that considers this. see previous implementation here - // https://github.com/across-protocol/relayer-v2/pull/37/files#r883371256 as a reference. + // https://github.com/across-protocol/relayer/pull/37/files#r883371256 as a reference. await updateSpokePoolClients(spokePoolClients, [ "V3FundsDeposited", "RequestedSpeedUpV3Deposit", @@ -210,18 +198,14 @@ export async function updateRelayerClients(clients: RelayerClients, config: Rela await clients.tokenClient.update(); // We can update the inventory client at the same time as checking for eth wrapping as these do not depend on each other. + const inventoryChainIds = Object.values(spokePoolClients) + .filter(({ latestBlockSearched, deploymentBlock }) => latestBlockSearched > deploymentBlock) + .map(({ chainId }) => chainId); await Promise.all([ clients.acrossApiClient.update(config.ignoreLimits), - clients.inventoryClient.update(), + clients.inventoryClient.update(inventoryChainIds), clients.inventoryClient.wrapL2EthIfAboveThreshold(), clients.inventoryClient.setL1TokenApprovals(), + config.sendingRelaysEnabled ? clients.tokenClient.setOriginTokenApprovals() : Promise.resolve(), ]); - - // Update the token client after the inventory client has done its wrapping of L2 ETH to ensure latest WETH ballance. - // The token client needs route data, so wait for update before checking approvals. - clients.tokenClient.clearTokenData(); - await clients.tokenClient.update(); - if (config.sendingRelaysEnabled) { - await clients.tokenClient.setOriginTokenApprovals(); - } } diff --git a/src/relayer/RelayerConfig.ts b/src/relayer/RelayerConfig.ts index ffd7bd867..fe28d6d77 100644 --- a/src/relayer/RelayerConfig.ts +++ b/src/relayer/RelayerConfig.ts @@ -1,8 +1,26 @@ -import { typeguards } from "@across-protocol/sdk-v2"; -import { BigNumber, toBNWei, assert, isDefined, readFileSync, toBN, replaceAddressCase, ethers } from "../utils"; +import { utils as ethersUtils } from "ethers"; +import { typeguards } from "@across-protocol/sdk"; +import { + BigNumber, + bnUint256Max, + toBNWei, + assert, + getNetworkName, + isDefined, + readFileSync, + toBN, + replaceAddressCase, + ethers, + TOKEN_SYMBOLS_MAP, +} from "../utils"; import { CommonConfig, ProcessEnv } from "../common"; import * as Constants from "../common/Constants"; -import { InventoryConfig } from "../interfaces"; +import { InventoryConfig, TokenBalanceConfig, isAliasConfig } from "../interfaces/InventoryManagement"; + +type DepositConfirmationConfig = { + usdThreshold: BigNumber; + minConfirmations: number; +}; export class RelayerConfig extends CommonConfig { readonly externalIndexer: boolean; @@ -29,7 +47,7 @@ export class RelayerConfig extends CommonConfig { readonly slowDepositors: string[]; // Following distances in blocks to guarantee finality on each chain. readonly minDepositConfirmations: { - [threshold: number]: { [chainId: number]: number }; + [chainId: number]: DepositConfirmationConfig[]; }; // Set to false to skip querying max deposit limit from /limits Vercel API endpoint. Otherwise relayer will not // fill any deposit over the limit which is based on liquidReserves in the HubPool. @@ -95,69 +113,113 @@ export class RelayerConfig extends CommonConfig { if (Object.keys(this.inventoryConfig).length > 0) { this.inventoryConfig = replaceAddressCase(this.inventoryConfig); // Cast any non-address case addresses. - this.inventoryConfig.wrapEtherThreshold = this.inventoryConfig.wrapEtherThreshold - ? toBNWei(this.inventoryConfig.wrapEtherThreshold) - : toBNWei(1); // default to keeping 2 Eth on the target chains and wrapping the rest to WETH. - this.inventoryConfig.wrapEtherThresholdPerChain ??= {}; - this.inventoryConfig.wrapEtherTarget = this.inventoryConfig.wrapEtherTarget - ? toBNWei(this.inventoryConfig.wrapEtherTarget) - : this.inventoryConfig.wrapEtherThreshold; // default to wrapping ETH to threshold, same as target. - this.inventoryConfig.wrapEtherTargetPerChain ??= {}; + + const { inventoryConfig } = this; + + // Default to 1 Eth on the target chains and wrapping the rest to WETH. + inventoryConfig.wrapEtherThreshold = toBNWei(inventoryConfig.wrapEtherThreshold ?? 1); + + inventoryConfig.wrapEtherThresholdPerChain ??= {}; + inventoryConfig.wrapEtherTarget = inventoryConfig.wrapEtherTarget + ? toBNWei(inventoryConfig.wrapEtherTarget) + : inventoryConfig.wrapEtherThreshold; // default to wrapping ETH to threshold, same as target. + + inventoryConfig.wrapEtherTargetPerChain ??= {}; assert( - this.inventoryConfig.wrapEtherThreshold.gte(this.inventoryConfig.wrapEtherTarget), - `default wrapEtherThreshold ${this.inventoryConfig.wrapEtherThreshold} must be >= default wrapEtherTarget ${this.inventoryConfig.wrapEtherTarget}` + inventoryConfig.wrapEtherThreshold.gte(inventoryConfig.wrapEtherTarget), + `default wrapEtherThreshold ${inventoryConfig.wrapEtherThreshold} must be >= default wrapEtherTarget ${inventoryConfig.wrapEtherTarget}` ); // Validate the per chain target and thresholds for wrapping ETH: - Object.keys(this.inventoryConfig.wrapEtherThresholdPerChain).forEach((chainId) => { - if (this.inventoryConfig.wrapEtherThresholdPerChain[chainId] !== undefined) { - this.inventoryConfig.wrapEtherThresholdPerChain[chainId] = toBNWei( - this.inventoryConfig.wrapEtherThresholdPerChain[chainId] - ); + const wrapThresholds = inventoryConfig.wrapEtherThresholdPerChain; + const wrapTargets = inventoryConfig.wrapEtherTargetPerChain; + Object.keys(inventoryConfig.wrapEtherThresholdPerChain).forEach((chainId) => { + if (wrapThresholds[chainId] !== undefined) { + wrapThresholds[chainId] = toBNWei(wrapThresholds[chainId]); // Promote to 18 decimals. } }); - Object.keys(this.inventoryConfig.wrapEtherTargetPerChain).forEach((chainId) => { - if (this.inventoryConfig.wrapEtherTargetPerChain[chainId] !== undefined) { - this.inventoryConfig.wrapEtherTargetPerChain[chainId] = toBNWei( - this.inventoryConfig.wrapEtherTargetPerChain[chainId] - ); + + Object.keys(inventoryConfig.wrapEtherTargetPerChain).forEach((chainId) => { + if (wrapTargets[chainId] !== undefined) { + wrapTargets[chainId] = toBNWei(wrapTargets[chainId]); // Promote to 18 decimals. + // Check newly set target against threshold - const threshold = - this.inventoryConfig.wrapEtherThresholdPerChain[chainId] ?? this.inventoryConfig.wrapEtherThreshold; - const target = this.inventoryConfig.wrapEtherTargetPerChain[chainId]; + const threshold = wrapThresholds[chainId] ?? inventoryConfig.wrapEtherThreshold; + const target = wrapTargets[chainId]; assert( threshold.gte(target), - `wrapEtherThresholdPerChain ${threshold.toString()} must be >= wrapEtherTargetPerChain ${target}` + `Chain ${chainId} wrapEtherThresholdPerChain ${threshold} must be >= wrapEtherTargetPerChain ${target}` ); } }); - Object.keys(this.inventoryConfig?.tokenConfig ?? {}).forEach((l1Token) => { - Object.keys(this.inventoryConfig.tokenConfig[l1Token]).forEach((chainId) => { - const { targetPct, thresholdPct, unwrapWethThreshold, unwrapWethTarget, targetOverageBuffer } = - this.inventoryConfig.tokenConfig[l1Token][chainId]; - assert( - targetPct !== undefined && thresholdPct !== undefined, - `Bad config. Must specify targetPct, thresholdPct for ${l1Token} on ${chainId}` - ); - assert( - toBN(thresholdPct).lte(toBN(targetPct)), - `Bad config. thresholdPct<=targetPct for ${l1Token} on ${chainId}` - ); - this.inventoryConfig.tokenConfig[l1Token][chainId].targetPct = toBNWei(targetPct).div(100); - this.inventoryConfig.tokenConfig[l1Token][chainId].thresholdPct = toBNWei(thresholdPct).div(100); - // Default to 150% the targetPct. targetOverageBuffer does not have to be defined so that no existing configs - // are broken. This is a reasonable default because it allows the relayer to be a bit more flexible in - // holding more tokens than the targetPct, but perhaps a better default is 100% - this.inventoryConfig.tokenConfig[l1Token][chainId].targetOverageBuffer = toBNWei( - targetOverageBuffer ?? "1.5" - ); + + const parseTokenConfig = ( + l1Token: string, + chainId: string, + rawTokenConfig: TokenBalanceConfig + ): TokenBalanceConfig => { + const { targetPct, thresholdPct, unwrapWethThreshold, unwrapWethTarget, targetOverageBuffer } = rawTokenConfig; + const tokenConfig: TokenBalanceConfig = { targetPct, thresholdPct, targetOverageBuffer }; + + assert( + targetPct !== undefined && thresholdPct !== undefined, + `Bad config. Must specify targetPct, thresholdPct for ${l1Token} on ${chainId}` + ); + assert( + toBN(thresholdPct).lte(toBN(targetPct)), + `Bad config. thresholdPct<=targetPct for ${l1Token} on ${chainId}` + ); + tokenConfig.targetPct = toBNWei(targetPct).div(100); + tokenConfig.thresholdPct = toBNWei(thresholdPct).div(100); + + // Default to 150% the targetPct. targetOverageBuffer does not have to be defined so that no existing configs + // are broken. This is a reasonable default because it allows the relayer to be a bit more flexible in + // holding more tokens than the targetPct, but perhaps a better default is 100% + tokenConfig.targetOverageBuffer = toBNWei(targetOverageBuffer ?? "1.5"); + + // For WETH, also consider any unwrap target/threshold. + if (l1Token === TOKEN_SYMBOLS_MAP.WETH.symbol) { if (unwrapWethThreshold !== undefined) { - this.inventoryConfig.tokenConfig[l1Token][chainId].unwrapWethThreshold = toBNWei(unwrapWethThreshold); + tokenConfig.unwrapWethThreshold = toBNWei(unwrapWethThreshold); } - this.inventoryConfig.tokenConfig[l1Token][chainId].unwrapWethTarget = unwrapWethTarget - ? toBNWei(unwrapWethTarget) - : toBNWei(2); - }); + tokenConfig.unwrapWethTarget = toBNWei(unwrapWethTarget ?? 2); + } + + return tokenConfig; + }; + + const rawTokenConfigs = inventoryConfig?.tokenConfig ?? {}; + const tokenConfigs = (inventoryConfig.tokenConfig = {}); + Object.keys(rawTokenConfigs).forEach((l1Token) => { + // If the l1Token is a symbol, resolve the correct address. + const effectiveL1Token = ethersUtils.isAddress(l1Token) + ? l1Token + : TOKEN_SYMBOLS_MAP[l1Token].addresses[this.hubPoolChainId]; + assert(effectiveL1Token !== undefined, `No token identified for ${l1Token}`); + + tokenConfigs[effectiveL1Token] ??= {}; + const hubTokenConfig = rawTokenConfigs[l1Token]; + + if (isAliasConfig(hubTokenConfig)) { + Object.keys(hubTokenConfig).forEach((symbol) => { + Object.keys(hubTokenConfig[symbol]).forEach((chainId) => { + const rawTokenConfig = hubTokenConfig[symbol][chainId]; + const effectiveSpokeToken = TOKEN_SYMBOLS_MAP[symbol].addresses[chainId]; + + tokenConfigs[effectiveL1Token][effectiveSpokeToken] ??= {}; + tokenConfigs[effectiveL1Token][effectiveSpokeToken][chainId] = parseTokenConfig( + l1Token, + chainId, + rawTokenConfig + ); + }); + }); + } else { + Object.keys(hubTokenConfig).forEach((chainId) => { + const rawTokenConfig = hubTokenConfig[chainId]; + tokenConfigs[effectiveL1Token][chainId] = parseTokenConfig(l1Token, chainId, rawTokenConfig); + }); + } }); } @@ -174,20 +236,45 @@ export class RelayerConfig extends CommonConfig { this.skipRebalancing = SKIP_REBALANCING === "true"; this.sendingSlowRelaysEnabled = SEND_SLOW_RELAYS === "true"; this.acceptInvalidFills = ACCEPT_INVALID_FILLS === "true"; - (this.minDepositConfirmations = MIN_DEPOSIT_CONFIRMATIONS + + const minDepositConfirmations = MIN_DEPOSIT_CONFIRMATIONS ? JSON.parse(MIN_DEPOSIT_CONFIRMATIONS) - : Constants.MIN_DEPOSIT_CONFIRMATIONS), - Object.keys(this.minDepositConfirmations).forEach((threshold) => { - Object.keys(this.minDepositConfirmations[threshold]).forEach((chainId) => { - const nBlocks: number = this.minDepositConfirmations[threshold][chainId]; + : Constants.MIN_DEPOSIT_CONFIRMATIONS; + + // Transform deposit confirmation requirements into an array of ascending + // deposit confirmations, sorted by the corresponding threshold in USD. + this.minDepositConfirmations = {}; + Object.keys(minDepositConfirmations) + .map((_threshold) => { + const threshold = Number(_threshold); + assert(!isNaN(threshold) && threshold >= 0, `Invalid deposit confirmation threshold (${_threshold})`); + return Number(threshold); + }) + .sort((x, y) => x - y) + .forEach((usdThreshold) => { + const config = minDepositConfirmations[usdThreshold]; + + Object.entries(config).forEach(([chainId, _minConfirmations]) => { + const minConfirmations = Number(_minConfirmations); assert( - !isNaN(nBlocks) && nBlocks >= 0, - `Chain ${chainId} minimum deposit confirmations for "${threshold}" threshold missing or invalid (${nBlocks}).` + !isNaN(minConfirmations) && minConfirmations >= 0, + `${getNetworkName(chainId)} deposit confirmations for` + + ` ${usdThreshold} threshold missing or invalid (${_minConfirmations}).` ); + + this.minDepositConfirmations[chainId] ??= []; + this.minDepositConfirmations[chainId].push({ usdThreshold: toBNWei(usdThreshold), minConfirmations }); }); }); - // Force default thresholds in MDC config. - this.minDepositConfirmations["default"] = Constants.DEFAULT_MIN_DEPOSIT_CONFIRMATIONS; + + // Append default thresholds as a safe upper-bound. + Object.keys(this.minDepositConfirmations).forEach((chainId) => + this.minDepositConfirmations[chainId].push({ + usdThreshold: bnUint256Max, + minConfirmations: Number.MAX_SAFE_INTEGER, + }) + ); + this.ignoreLimits = RELAYER_IGNORE_LIMITS === "true"; } } diff --git a/src/relayer/index.ts b/src/relayer/index.ts index bc09c5aa8..b6ee7ae60 100644 --- a/src/relayer/index.ts +++ b/src/relayer/index.ts @@ -1,6 +1,4 @@ -import assert from "assert"; -import { ChildProcess, spawn } from "child_process"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { config, delay, @@ -19,52 +17,6 @@ let logger: winston.Logger; const randomNumber = () => Math.floor(Math.random() * 1_000_000); -type IndexerOpts = { - lookback?: number; - blockRange?: number; -}; - -function startWorker(cmd: string, path: string, chainId: number, opts: IndexerOpts): ChildProcess { - const args = Object.entries(opts) - .map(([k, v]) => [`--${k}`, `${v}`]) - .flat(); - return spawn(cmd, [path, "--chainId", chainId.toString(), ...args], { - stdio: ["ignore", "inherit", "inherit", "ipc"], - }); -} - -function startWorkers(config: RelayerConfig): { [chainId: number]: ChildProcess } { - const sampleOpts = { lookback: config.maxRelayerLookBack }; - - const chainIds = sdkUtils.dedupArray([...config.relayerOriginChains, ...config.relayerDestinationChains]); - assert(chainIds.length > 0); // @todo: Fix to work with undefined chain IDs (default to the complete set). - - // Identify the lowest configured deposit confirmation threshold. - // Configure the indexer to relay any events that meet that threshold. - const mdcs = config.minDepositConfirmations; - const [depositThreshold] = Object.keys(config.minDepositConfirmations) - .filter((n) => !Number.isNaN(n)) - .sort((x, y) => Number(x) - Number(y)); - - return Object.fromEntries( - chainIds.map((chainId: number) => { - const opts = { - ...sampleOpts, - finality: mdcs[depositThreshold][chainId] ?? mdcs["default"][chainId] ?? 1024, - blockRange: config.maxRelayerLookBack[chainId] ?? 5_000, - }; - const chain = getNetworkName(chainId); - const child = startWorker("node", config.indexerPath, chainId, opts); - logger.debug({ - at: "Relayer#run", - message: `Spawned ${chain} SpokePool indexer.`, - args: child.spawnargs, - }); - return [chainId, child]; - }) - ); -} - export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): Promise { const relayerRun = randomNumber(); const startTime = getCurrentTime(); @@ -72,11 +24,6 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P logger = _logger; const config = new RelayerConfig(process.env); - let workers: { [chainId: number]: ChildProcess }; - if (config.externalIndexer) { - workers = startWorkers(config); - } - const loop = config.pollingDelay > 0; let stop = !loop; process.on("SIGHUP", () => { @@ -88,10 +35,11 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P }); logger[startupLogLevel(config)]({ at: "Relayer#run", message: "Relayer started 🏃‍♂️", config, relayerRun }); - const relayerClients = await constructRelayerClients(logger, config, baseSigner, workers); + const relayerClients = await constructRelayerClients(logger, config, baseSigner); const relayer = new Relayer(await baseSigner.getAddress(), logger, relayerClients, config); let run = 1; + let txnReceipts: { [chainId: number]: Promise }; try { do { if (loop) { @@ -106,8 +54,11 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P await updateRelayerClients(relayerClients, config); if (!config.skipRelays) { + // Since the above spoke pool updates are slow, refresh token client before sending rebalances now: + relayerClients.tokenClient.clearTokenData(); + await relayerClients.tokenClient.update(); const simulate = !config.sendingRelaysEnabled; - await relayer.checkForUnfilledDepositsAndFill(config.sendingSlowRelaysEnabled, simulate); + txnReceipts = await relayer.checkForUnfilledDepositsAndFill(config.sendingSlowRelaysEnabled, simulate); } // Unwrap WETH after filling deposits so we don't mess up slow fill logic, but before rebalancing @@ -115,6 +66,9 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P await relayerClients.inventoryClient.unwrapWeth(); if (!config.skipRebalancing) { + // Since the above spoke pool updates are slow, refresh token client before sending rebalances now: + relayerClients.tokenClient.clearTokenData(); + await relayerClients.tokenClient.update(); await relayerClients.inventoryClient.rebalanceInventoryIfNeeded(); } @@ -122,9 +76,8 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P relayerClients.profitClient.clearUnprofitableFills(); relayerClients.tokenClient.clearTokenShortfall(); - const tLoopStop = performance.now(); - const runTime = Math.round((tLoopStop - tLoopStart) / 1000); if (loop) { + const runTime = Math.round((performance.now() - tLoopStart) / 1000); logger.debug({ at: "Relayer#run", message: `Completed relayer execution loop ${run++} in ${runTime} seconds.`, @@ -140,13 +93,19 @@ export async function runRelayer(_logger: winston.Logger, baseSigner: Signer): P } } } while (!stop); - } finally { - if (config.externalIndexer) { - Object.entries(workers).forEach(([_chainId, worker]) => { - logger.debug({ at: "Relayer::runRelayer", message: `Cleaning up indexer for chainId ${_chainId}.` }); - worker.kill("SIGHUP"); - }); + + // Before exiting, wait for transaction submission to complete. + for (const [chainId, submission] of Object.entries(txnReceipts)) { + const [result] = await Promise.allSettled([submission]); + if (sdkUtils.isPromiseRejected(result)) { + logger.warn({ + at: "Relayer#runRelayer", + message: `Failed transaction submission on ${getNetworkName(Number(chainId))}.`, + reason: result.reason, + }); + } } + } finally { await disconnectRedisClients(logger); } diff --git a/src/scripts/validateRunningBalances.ts b/src/scripts/validateRunningBalances.ts index cfebf6617..139992c43 100644 --- a/src/scripts/validateRunningBalances.ts +++ b/src/scripts/validateRunningBalances.ts @@ -44,13 +44,18 @@ import { getWidestPossibleExpectedBlockRange } from "../dataworker/PoolRebalance import { getBlockForChain, getEndBlockBuffers } from "../dataworker/DataworkerUtils"; import { ProposedRootBundle, SpokePoolClientsByChain, V3SlowFillLeaf } from "../interfaces"; import { CONTRACT_ADDRESSES, constructSpokePoolClientsWithStartBlocks, updateSpokePoolClients } from "../common"; -import { createConsoleTransport } from "@uma/financial-templates-lib"; +import { createConsoleTransport } from "@uma/logger"; config(); let logger: winston.Logger; const slowRootCache = {}; +const expectedExcesses: { [chainId: number]: { [token: string]: number } } = { + [10]: { ["USDC"]: 15.336508 }, // On May 4th, USDC was sent to the SpokePool here: https://optimistic.etherscan.io/tx/0x5f53293fe6a27ff9897d4dde445fd6aab46f841ca641befea48beef62014a549 + [42161]: { ["WBTC"]: 1.9988628 }, // On May 15th, WBTC slow fill was produced here that is not executed: https://etherscan.io/tx/0xe339869271cb4f558faedbf9beed6f5b5440d395367743e5f12b13a4c199bdd6 +}; + export async function runScript(_logger: winston.Logger, baseSigner: Signer): Promise { logger = _logger; @@ -367,14 +372,17 @@ export async function runScript(_logger: winston.Logger, baseSigner: Signer): Pr logger.debug({ at: "validateRunningBalances#index", message: "Historical excesses", + expectedExcesses, excesses, }); - const unexpectedExcess = Object.entries(excesses).some(([, tokenExcesses]) => { - return Object.entries(tokenExcesses).some(([, excesses]) => { + const unexpectedExcess = Object.entries(excesses).some(([chainId, tokenExcesses]) => { + return Object.entries(tokenExcesses).some(([l1Token, excesses]) => { // We only care about the latest excess, because sometimes excesses can appear in historical bundles // due to ordering of executing leaves. As long as the excess resets back to 0 eventually it is fine. const excess = Number(excesses[0]); - return excess > 0.05 || excess < -0.05; + // Subtract any expected excesses + const excessForChain = excess - (expectedExcesses[chainId]?.[l1Token] ?? 0); + return excessForChain > 0.05 || excessForChain < -0.05; }); }); if (unexpectedExcess) { diff --git a/src/utils/AddressUtils.ts b/src/utils/AddressUtils.ts index c30240641..2d59dd800 100644 --- a/src/utils/AddressUtils.ts +++ b/src/utils/AddressUtils.ts @@ -1,5 +1,5 @@ -import { TOKEN_SYMBOLS_MAP } from "@across-protocol/constants-v2"; -import { BigNumber, ethers } from "."; +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { BigNumber, ethers, isDefined } from "."; export function compareAddresses(addressA: string, addressB: string): 1 | -1 | 0 { // Convert address strings to BigNumbers and then sort numerical value of the BigNumber, which sorts the addresses @@ -15,10 +15,20 @@ export function compareAddresses(addressA: string, addressB: string): 1 | -1 | 0 } } -export function compareAddressesSimple(addressA: string, addressB: string): boolean { +export function compareAddressesSimple(addressA?: string, addressB?: string): boolean { + if (addressA === undefined || addressB === undefined) { + return false; + } return addressA.toLowerCase() === addressB.toLowerCase(); } +export function includesAddressSimple(address: string | undefined, list: string[]): boolean { + if (!isDefined(address)) { + return false; + } + return list.filter((listAddress) => compareAddressesSimple(address, listAddress)).length > 0; +} + /** * Match the token symbol for the given token address and chain ID. * @param tokenAddress The token address to resolve the symbol for. @@ -48,6 +58,8 @@ export function resolveTokenDecimals(tokenSymbol: string): number { /** * Resolves a list of token symbols for a list of token addresses and a chain ID. + * @dev This function is dangerous because multiple token addresses can map to the same token symbol + * so the output can be unexpected. * @param tokenAddresses The token addresses to resolve the symbols for. * @param chainId The chain ID to resolve the symbols for. * @returns The token symbols for the given token addresses and chain ID. Undefined symbols are filtered out. @@ -71,6 +83,34 @@ export function getTokenAddress(tokenAddress: string, chainId: number, targetCha return targetAddress; } +export function getTokenAddressWithCCTP( + l1Token: string, + hubChainId: number, + l2ChainId: number, + isNativeUsdc = false +): string { + // Base Case + if (hubChainId === l2ChainId) { + return l1Token; + } + if (compareAddressesSimple(l1Token, TOKEN_SYMBOLS_MAP.USDC.addresses[hubChainId])) { + const onBase = l2ChainId === CHAIN_IDs.BASE || l2ChainId === CHAIN_IDs.BASE_SEPOLIA; + return TOKEN_SYMBOLS_MAP[isNativeUsdc ? "USDC" : onBase ? "USDbC" : "USDC.e"].addresses[l2ChainId]; + } + return getTokenAddress(l1Token, hubChainId, l2ChainId); +} + +/** + * Get the USDC symbol for the given token address and chain ID. + * @param l2Token A Web3 token address (not case sensitive) + * @param chainId A chain Id to reference + * @returns Either USDC (if native) or USDbC/USDC.e (if bridged) or undefined if the token address is not recognized. + */ +export function getUsdcSymbol(l2Token: string, chainId: number): string | undefined { + const compareToken = (token?: string) => isDefined(token) && compareAddressesSimple(l2Token, token); + return ["USDC", "USDbC", "USDC.e"].find((token) => compareToken(TOKEN_SYMBOLS_MAP[token]?.addresses?.[chainId])); +} + export function checkAddressChecksum(tokenAddress: string): boolean { return ethers.utils.getAddress(tokenAddress) === tokenAddress; } diff --git a/src/utils/BNUtils.ts b/src/utils/BNUtils.ts index 1c33d1dee..6fe5d8ad8 100644 --- a/src/utils/BNUtils.ts +++ b/src/utils/BNUtils.ts @@ -1,6 +1,6 @@ import { BigNumber } from "ethers"; -export function bnComparatorDescending(a: BigNumber, b: BigNumber) { +export function bnComparatorDescending(a: BigNumber, b: BigNumber): -1 | 0 | 1 { if (b.gt(a)) { return 1; } else if (a.gt(b)) { @@ -10,7 +10,7 @@ export function bnComparatorDescending(a: BigNumber, b: BigNumber) { } } -export function bnComparatorAscending(a: BigNumber, b: BigNumber) { +export function bnComparatorAscending(a: BigNumber, b: BigNumber): -1 | 0 | 1 { if (a.gt(b)) { return 1; } else if (b.gt(a)) { diff --git a/src/utils/BlockUtils.ts b/src/utils/BlockUtils.ts index 45203a099..5da81c8ed 100644 --- a/src/utils/BlockUtils.ts +++ b/src/utils/BlockUtils.ts @@ -1,4 +1,4 @@ -import { interfaces, utils } from "@across-protocol/sdk-v2"; +import { interfaces, utils } from "@across-protocol/sdk"; import { isDefined } from "./"; import { BlockFinder, BlockFinderHints } from "./SDKUtils"; import { getProvider } from "./ProviderUtils"; diff --git a/src/utils/CCTPUtils.ts b/src/utils/CCTPUtils.ts index 5892bb76c..77661e1ef 100644 --- a/src/utils/CCTPUtils.ts +++ b/src/utils/CCTPUtils.ts @@ -1,10 +1,13 @@ +import { utils } from "@across-protocol/sdk"; import { TransactionReceipt } from "@ethersproject/abstract-provider"; -import axios, { AxiosError } from "axios"; -import { ethers, BigNumber } from "ethers"; +import axios from "axios"; +import { BigNumber, ethers } from "ethers"; import { CONTRACT_ADDRESSES, chainIdsToCctpDomains } from "../common"; -import { isDefined } from "./TypeGuards"; -import { utils } from "@across-protocol/sdk-v2"; import { compareAddressesSimple } from "./AddressUtils"; +import { EventSearchConfig, paginatedEventQuery } from "./EventUtils"; +import { bnZero } from "./SDKUtils"; +import { isDefined } from "./TypeGuards"; +import { getProvider } from "./ProviderUtils"; export type DecodedCCTPMessage = { messageHash: string; @@ -15,20 +18,128 @@ export type DecodedCCTPMessage = { destinationDomain: number; attestation: string; sender: string; + nonce: number; + status: CCTPMessageStatus; }; +export type Attestation = { status: string; attestation: string }; +export type CCTPMessageStatus = "finalized" | "ready" | "pending"; + +/** + * Used to convert an ETH Address string to a 32-byte hex string. + * @param address The address to convert. + * @returns The 32-byte hex string representation of the address - required for CCTP messages. + */ +export function cctpAddressToBytes32(address: string): string { + return ethers.utils.hexZeroPad(address, 32); +} + +/** + * Used to convert a 32-byte hex string with padding to a standard ETH address. + * @param bytes32 The 32-byte hex string to convert. + * @returns The ETH address representation of the 32-byte hex string. + */ +export function cctpBytes32ToAddress(bytes32: string): string { + // Grab the last 20 bytes of the 32-byte hex string + return ethers.utils.getAddress(ethers.utils.hexDataSlice(bytes32, 12)); +} + +/** + * The CCTP Message Transmitter contract updates a local dictionary for each source domain / nonce it receives. It won't + * attempt to process a message if the nonce has been seen before. If the nonce has been used before, the message has + * been received and processed already. This function replicates the `function _hashSourceAndNonce(uint32 _source, uint64 _nonce)` function + * in the MessageTransmitter contract. + * @link https://github.com/circlefin/evm-cctp-contracts/blob/817397db0a12963accc08ff86065491577bbc0e5/src/MessageTransmitter.sol#L279-L308 + * @link https://github.com/circlefin/evm-cctp-contracts/blob/817397db0a12963accc08ff86065491577bbc0e5/src/MessageTransmitter.sol#L369-L381 + * @param source The source domain + * @param nonce The nonce provided by the destination transaction (DepositForBurn event) + * @returns The hash of the source and nonce following the hashing algorithm of the MessageTransmitter contract. + */ +export function hashCCTPSourceAndNonce(source: number, nonce: number): string { + // Encode and hash the values directly + return ethers.utils.keccak256(ethers.utils.solidityPack(["uint32", "uint64"], [source, nonce])); +} + +/** + * Retrieves all outstanding CCTP bridge transfers for a given target -> destination chain, a source token address, and a from address. + * @param sourceTokenMessenger The CCTP TokenMessenger contract on the source chain. The "Bridge Contract" of CCTP + * @param destinationMessageTransmitter The CCTP MessageTransmitter contract on the destination chain. The "Message Handler Contract" of CCTP + * @param sourceSearchConfig The search configuration to use when querying the sourceTokenMessenger contract via `paginatedEventQuery`. + * @param sourceToken The token address of the token being transferred. + * @param sourceChainId The chainId of the source chain. + * @param destinationChainId The chainId of the destination chain. + * @param fromAddress The address that initiated the transfer. + * @returns A list of outstanding CCTP bridge transfers. These are transfers that have been initiated but not yet finalized on the destination chain. + * @dev Reference `hasCCTPMessageBeenProcessed` for more information on how the message is determined to be processed. + */ +export async function retrieveOutstandingCCTPBridgeUSDCTransfers( + sourceTokenMessenger: ethers.Contract, + destinationMessageTransmitter: ethers.Contract, + sourceSearchConfig: EventSearchConfig, + sourceToken: string, + sourceChainId: number, + destinationChainId: number, + fromAddress: string +): Promise { + const sourceDomain = chainIdsToCctpDomains[sourceChainId]; + const targetDestinationDomain = chainIdsToCctpDomains[destinationChainId]; + + const sourceFilter = sourceTokenMessenger.filters.DepositForBurn(undefined, sourceToken, undefined, fromAddress); + const initializationTransactions = await paginatedEventQuery(sourceTokenMessenger, sourceFilter, sourceSearchConfig); + + const outstandingTransactions = await Promise.all( + initializationTransactions.map(async (event) => { + const { nonce, destinationDomain } = event.args; + // Ensure that the destination domain matches the target destination domain so that we don't + // have any double counting of messages. + if (destinationDomain !== targetDestinationDomain) { + return undefined; + } + // Call into the destinationMessageTransmitter contract to determine if the message has been processed + // on the destionation chain. We want to make sure the message **hasn't** been processed. + const isMessageProcessed = await hasCCTPMessageBeenProcessed(sourceDomain, nonce, destinationMessageTransmitter); + if (isMessageProcessed) { + return undefined; + } + return event; + }) + ); + + return outstandingTransactions.filter(isDefined); +} + +/** + * Calls into the CCTP MessageTransmitter contract and determines whether or not a message has been processed. + * @param sourceDomain The source domain of the message. + * @param nonce The nonce of the message. + * @param contract The CCTP MessageTransmitter contract to call. + * @returns Whether or not the message has been processed. + */ +export async function hasCCTPMessageBeenProcessed( + sourceDomain: number, + nonce: number, + contract: ethers.Contract +): Promise { + const nonceHash = hashCCTPSourceAndNonce(sourceDomain, nonce); + const resultingCall: BigNumber = await contract.callStatic.usedNonces(nonceHash); + // If the resulting call is 1, the message has been processed. If it is 0, the message has not been processed. + return (resultingCall ?? bnZero).toNumber() === 1; +} + /** * Used to map a CCTP domain to a chain id. This is the inverse of chainIdsToCctpDomains. * Note: due to the nature of testnet/mainnet chain ids mapping to the same CCTP domain, we * actually have a mapping of CCTP Domain -> [chainId]. */ -const cctpDomainsToChainIds = Object.entries(chainIdsToCctpDomains).reduce((acc, [chainId, cctpDomain]) => { - if (!acc[cctpDomain]) { - acc[cctpDomain] = []; - } - acc[cctpDomain].push(Number(chainId)); - return acc; -}, {} as Record); +export function getCctpDomainsToChainIds(): Record { + return Object.entries(chainIdsToCctpDomains).reduce((acc, [chainId, cctpDomain]) => { + if (!acc[cctpDomain]) { + acc[cctpDomain] = []; + } + acc[cctpDomain].push(Number(chainId)); + return acc; + }, {}); +} /** * Resolves a list of TransactionReceipt objects into a list of DecodedCCTPMessage objects. Each transaction receipt @@ -77,6 +188,8 @@ async function _resolveCCTPRelatedTxns( compareAddressesSimple(l.address, CONTRACT_ADDRESSES[sourceChainId].cctpMessageTransmitter.address) ); + const cctpDomainsToChainIds = getCctpDomainsToChainIds(); + // We can resolve all of the logs in parallel and produce a flat list of DecodedCCTPMessage objects return ( ( @@ -88,17 +201,17 @@ async function _resolveCCTPRelatedTxns( const messageBytes = ethers.utils.defaultAbiCoder.decode(["bytes"], log.data)[0]; const messageBytesArray = ethers.utils.arrayify(messageBytes); - const sourceDomain = ethers.utils.hexlify(messageBytesArray.slice(4, 8)); // sourceDomain 4 bytes starting index 4 - const destinationDomain = ethers.utils.hexlify(messageBytesArray.slice(8, 12)); // destinationDomain 4 bytes starting index 8 - const nonce = ethers.utils.hexlify(messageBytesArray.slice(12, 20)); // nonce 8 bytes starting index 12 + const sourceDomain = Number(ethers.utils.hexlify(messageBytesArray.slice(4, 8))); // sourceDomain 4 bytes starting index 4 + const destinationDomain = Number(ethers.utils.hexlify(messageBytesArray.slice(8, 12))); // destinationDomain 4 bytes starting index 8 + const nonce = BigNumber.from(ethers.utils.hexlify(messageBytesArray.slice(12, 20))).toNumber(); // nonce 8 bytes starting index 12 const sender = ethers.utils.hexlify(messageBytesArray.slice(32, 52)); // sender 32 bytes starting index 20, but we only need the last 20 bytes so we can start our index at 32 const nonceHash = ethers.utils.solidityKeccak256(["uint32", "uint64"], [sourceDomain, nonce]); const messageHash = ethers.utils.keccak256(messageBytes); const amountSent = ethers.utils.hexlify(messageBytesArray.slice(184, 216)); // amount 32 bytes starting index 216 (idx 68 of body after idx 116 which ends the header) // Perform some extra steps to get the source and destination chain ids - const resolvedPossibleSourceChainIds = cctpDomainsToChainIds[Number(sourceDomain)]; - const resolvedPossibleDestinationChainIds = cctpDomainsToChainIds[Number(destinationDomain)]; + const resolvedPossibleSourceChainIds = cctpDomainsToChainIds[sourceDomain]; + const resolvedPossibleDestinationChainIds = cctpDomainsToChainIds[destinationDomain]; // Ensure that we're only processing CCTP messages that are both from the source chain and destined for the target destination chain if ( @@ -111,9 +224,20 @@ async function _resolveCCTPRelatedTxns( // Generate the attestation proof for the message. This is required to finalize the message. const attestation = await generateCCTPAttestationProof(messageHash, utils.chainIsProd(destinationChainId)); - // If we can't generate an attestation proof, we should return undefined - if (!attestation) { - return undefined; + let status: CCTPMessageStatus; + if (attestation.status === "pending_confirmations") { + status = "pending"; + } else { + // attestation proof is available so now check if its already been processed + const destinationProvider = await getProvider(destinationChainId); + const destinationMessageTransmitterContract = CONTRACT_ADDRESSES[destinationChainId].cctpMessageTransmitter; + const destinationMessageTransmitter = new ethers.Contract( + destinationMessageTransmitterContract.address, + destinationMessageTransmitterContract.abi, + destinationProvider + ); + const processed = await hasCCTPMessageBeenProcessed(sourceDomain, nonce, destinationMessageTransmitter); + status = processed ? "finalized" : "ready"; } return { @@ -122,9 +246,11 @@ async function _resolveCCTPRelatedTxns( messageBytes, nonceHash, amount: BigNumber.from(amountSent).toString(), - sourceDomain: Number(sourceDomain), - destinationDomain: Number(destinationDomain), - attestation, + sourceDomain: sourceDomain, + destinationDomain: destinationDomain, + attestation: attestation?.attestation, + status, + nonce, }; }) ) @@ -138,37 +264,14 @@ async function _resolveCCTPRelatedTxns( * Generates an attestation proof for a given message hash. This is required to finalize a CCTP message. * @param messageHash The message hash to generate an attestation proof for. This is generated by taking the keccak256 hash of the message bytes of the initial transaction log. * @param isMainnet Whether or not the attestation proof should be generated on mainnet. If this is false, the attestation proof will be generated on the sandbox environment. - * @returns The attestation proof for the given message hash. This is a string of the form "0x". - * @throws An error if the attestation proof cannot be generated. We wait a maximum of 10 seconds for the attestation to be generated. If it is not generated in that time, we throw an error. + * @returns The attestation status and proof for the given message hash. This is a string of the form "0x". If the status is pending_confirmation + * then the proof will be null according to the CCTP dev docs. + * @link https://developers.circle.com/stablecoins/reference/getattestation */ -async function generateCCTPAttestationProof(messageHash: string, isMainnet: boolean) { - let maxTries = 5; - let attestationResponse: { status: string; attestation: string } = { status: "pending", attestation: "" }; - while (attestationResponse.status !== "complete" && maxTries-- > 0) { - try { - const httpResponse = await axios.get<{ status: string; attestation: string }>( - `https://iris-api${isMainnet ? "" : "-sandbox"}.circle.com/attestations/${messageHash}` - ); - attestationResponse = httpResponse.data; - if (attestationResponse.status === "complete") { - break; - } - await new Promise((resolve) => setTimeout(resolve, 2_000)); - } catch (e) { - if (e instanceof AxiosError && e.response?.status === 404) { - // Not enough time has passed for the attestation to be generated - // We should return and try again later - return undefined; - } else { - // An unknown error occurred. We should throw it up the stack - throw e; - } - } - } - // Attetestation was not able to be generated. We should throw an error - if (attestationResponse.status !== "complete") { - throw new Error("Failed to generate attestation proof"); - } - // Return the attestation proof since it was generated - return attestationResponse.attestation; +async function generateCCTPAttestationProof(messageHash: string, isMainnet: boolean): Promise { + const httpResponse = await axios.get( + `https://iris-api${isMainnet ? "" : "-sandbox"}.circle.com/attestations/${messageHash}` + ); + const attestationResponse = httpResponse.data; + return attestationResponse; } diff --git a/src/utils/CLIUtils.ts b/src/utils/CLIUtils.ts index 6253737fb..a4bc2efc7 100644 --- a/src/utils/CLIUtils.ts +++ b/src/utils/CLIUtils.ts @@ -1,6 +1,6 @@ import minimist from "minimist"; import { Signer } from "ethers"; -import { constants as sdkConsts } from "@across-protocol/sdk-v2"; +import { constants as sdkConsts } from "@across-protocol/sdk"; import { SignerOptions, getSigner } from "./SignerUtils"; import { isDefined } from "./TypeGuards"; diff --git a/src/utils/ContractUtils.ts b/src/utils/ContractUtils.ts index 7afe1218d..31bf09e60 100644 --- a/src/utils/ContractUtils.ts +++ b/src/utils/ContractUtils.ts @@ -1,6 +1,6 @@ import { getNetworkName, Contract, Signer, getDeployedAddress, getDeployedBlockNumber } from "."; -import * as typechain from "@across-protocol/contracts-v2"; // TODO: refactor once we've fixed export from contract repo +import * as typechain from "@across-protocol/contracts"; // TODO: refactor once we've fixed export from contract repo // Return an ethers contract instance for a deployed contract, imported from the Across-protocol contracts repo. export function getDeployedContract(contractName: string, networkId: number, signer?: Signer): Contract { diff --git a/src/utils/DepositUtils.ts b/src/utils/DepositUtils.ts index 60679dfce..daa1b56e4 100644 --- a/src/utils/DepositUtils.ts +++ b/src/utils/DepositUtils.ts @@ -1,4 +1,4 @@ -import { utils } from "@across-protocol/sdk-v2"; +import { utils } from "@across-protocol/sdk"; import { Fill, SlowFillRequest } from "../interfaces"; import { SpokePoolClient } from "../clients"; import { getRedisCache } from "./"; diff --git a/src/utils/EventUtils.ts b/src/utils/EventUtils.ts index 7e9c3c91a..d4ebb98e1 100644 --- a/src/utils/EventUtils.ts +++ b/src/utils/EventUtils.ts @@ -4,7 +4,7 @@ import { Event, utils as ethersUtils } from "ethers"; import { getNetworkName } from "./NetworkUtils"; import { dedupArray } from "./SDKUtils"; import { isDefined } from "./TypeGuards"; -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; export type EventSearchConfig = sdkUtils.EventSearchConfig; @@ -60,17 +60,19 @@ export function getUniqueLogIndex(events: { transactionHash: string }[]): number export class EventManager { public readonly chain: string; public readonly events: { [blockNumber: number]: (Event & { providers: string[] })[] } = {}; + public readonly finality: number; private blockNumber: number; constructor( private readonly logger: winston.Logger, public readonly chainId: number, - public readonly finality: number, + finality: number, public readonly quorum: number ) { this.chain = getNetworkName(chainId); this.blockNumber = 0; + this.finality = Math.max(finality, 1); } /** @@ -174,9 +176,20 @@ export class EventManager { // This is configurable and will almost always be less than chain finality guarantees. const finalised = blockNumber - this.finality; - // Collect the events that met quorum, stripping out the provider information. + // Collect the events that met quorum, stripping out the provider information; drop any that didn't. + // This can be brittle when finality is low (i.e. 1). @todo: Support querying back over multiple blocks + // to account for RPC notification delays. const events = (this.events[finalised] ?? []) - .filter((event) => this.getEventQuorum(event) >= this.quorum) + .filter((event) => { + const eventQuorum = this.getEventQuorum(event); + if (this.quorum > eventQuorum) { + this.logger.debug({ + at: "EventManager::tick", + message: `Dropped ${this.chain} ${event.event} event due to insufficient quorum.`, + }); + } + return eventQuorum >= this.quorum; + }) .map(({ providers, ...event }) => event); // Flush the events that were just submitted. diff --git a/src/utils/ExecutionUtils.ts b/src/utils/ExecutionUtils.ts index 8f97b39b1..e4fc8c5b4 100644 --- a/src/utils/ExecutionUtils.ts +++ b/src/utils/ExecutionUtils.ts @@ -1,6 +1,6 @@ import { delay, winston } from "./"; -export function exit(code: number) { +export function exit(code: number): void { // eslint-disable-next-line no-process-exit process.exit(code); } @@ -12,7 +12,6 @@ export async function processEndPollingLoop( ): Promise { if (pollingDelay === 0) { logger.debug({ at: `${fileName}#index`, message: "End of serverless execution loop - terminating process" }); - await delay(5); // Add a small delay to ensure the transports have fully flushed upstream. return true; } diff --git a/src/utils/FillUtils.ts b/src/utils/FillUtils.ts index c9c24f166..3b7ecf017 100644 --- a/src/utils/FillUtils.ts +++ b/src/utils/FillUtils.ts @@ -1,7 +1,7 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { HubPoolClient } from "../clients"; import { Fill, FillStatus, SpokePoolClientsByChain, V3DepositWithBlock } from "../interfaces"; -import { bnZero, getNetworkError, getNetworkName, winston } from "../utils"; +import { bnZero } from "../utils"; import { getBlockRangeForChain } from "../dataworker/DataworkerUtils"; export function getRefundInformationFromFill( @@ -42,63 +42,49 @@ export function getRefundInformationFromFill( } export type RelayerUnfilledDeposit = { deposit: V3DepositWithBlock; - fillStatus: number; version: number; invalidFills: Fill[]; }; // @description Returns all unfilled deposits, indexed by destination chain. +// @param destinationChainId Chain ID to query outstanding deposits on. // @param spokePoolClients Mapping of chainIds to SpokePoolClient objects. -// @param configStoreClient ConfigStoreClient instance. -// @param depositLookBack Deposit lookback (in seconds) since SpokePoolClient time as at last update. +// @param hubPoolClient HubPoolClient instance. // @returns Array of unfilled deposits. -export async function getUnfilledDeposits( +export function getUnfilledDeposits( + destinationChainId: number, spokePoolClients: SpokePoolClientsByChain, hubPoolClient: HubPoolClient, - logger?: winston.Logger -): Promise<{ [chainId: number]: RelayerUnfilledDeposit[] }> { - const unfilledDeposits: { [chainId: number]: RelayerUnfilledDeposit[] } = {}; - const chainIds = Object.values(spokePoolClients) - .filter(({ isUpdated }) => isUpdated) - .map(({ chainId }) => chainId); + fillStatus: { [deposit: string]: number } = {} +): RelayerUnfilledDeposit[] { + const destinationClient = spokePoolClients[destinationChainId]; // Iterate over each chainId and check for unfilled deposits. - await sdkUtils.mapAsync(chainIds, async (destinationChainId: number) => { - const destinationClient = spokePoolClients[destinationChainId]; + const deposits = Object.values(spokePoolClients) + .filter(({ chainId, isUpdated }) => isUpdated && chainId !== destinationChainId) + .flatMap((spokePoolClient) => spokePoolClient.getDepositsForDestinationChain(destinationChainId)) + .filter((deposit) => { + const depositHash = spokePoolClients[deposit.originChainId].getDepositHash(deposit); + return (fillStatus[depositHash] ?? FillStatus.Unfilled) !== FillStatus.Filled; + }); - // For each destination chain, query each _other_ SpokePool for deposits within the lookback. - const deposits = chainIds - .filter((chainId) => chainId !== destinationChainId) - .map((originChainId) => spokePoolClients[originChainId].getDepositsForDestinationChain(destinationChainId)) - .flat(); - - // Resolve the latest fill status for each deposit and filter out any that are now filled. - let fillStatus: number[]; - try { - fillStatus = await sdkUtils.fillStatusArray(destinationClient.spokePool, deposits); - } catch (err) { - const chain = getNetworkName(destinationClient.chainId); - logger?.warn({ - at: "getUnfilledDeposits", - message: `Failed to resolve status of ${deposits.length} fills on on ${chain}, reverting to iterative pairing.`, - reason: getNetworkError(err), - }); - - // Fall back to matching fills against deposits and infer FillStatus from that. - fillStatus = deposits - .map((deposit) => destinationClient.getValidUnfilledAmountForDeposit(deposit)) - .map(({ unfilledAmount }) => (unfilledAmount.eq(bnZero) ? FillStatus.Filled : FillStatus.Unfilled)); - } - - unfilledDeposits[destinationChainId] = deposits - .map((deposit, idx) => ({ deposit, fillStatus: fillStatus[idx] })) - .filter(({ fillStatus }) => fillStatus !== FillStatus.Filled) - .map(({ deposit, fillStatus }) => { - const version = hubPoolClient.configStoreClient.getConfigStoreVersionForTimestamp(deposit.quoteTimestamp); - const { invalidFills } = destinationClient.getValidUnfilledAmountForDeposit(deposit); - return { deposit, version, fillStatus, invalidFills }; - }); - }); + return deposits + .map((deposit) => { + const version = hubPoolClient.configStoreClient.getConfigStoreVersionForTimestamp(deposit.quoteTimestamp); + const { unfilledAmount, invalidFills } = destinationClient.getValidUnfilledAmountForDeposit(deposit); + return { deposit, version, unfilledAmount, invalidFills }; + }) + .filter(({ unfilledAmount }) => unfilledAmount.gt(bnZero)); +} - return unfilledDeposits; +export function getAllUnfilledDeposits( + spokePoolClients: SpokePoolClientsByChain, + hubPoolClient: HubPoolClient +): Record { + return Object.fromEntries( + Object.values(spokePoolClients).map(({ chainId: destinationChainId }) => [ + destinationChainId, + getUnfilledDeposits(destinationChainId, spokePoolClients, hubPoolClient), + ]) + ); } diff --git a/src/utils/Help.ts b/src/utils/Help.ts index d05c6887b..f18dcdbc8 100644 --- a/src/utils/Help.ts +++ b/src/utils/Help.ts @@ -18,8 +18,8 @@ export function usage(badInput: string | undefined = undefined): boolean { } export function help(): void { - const botRepoUrl = "https://github.com/across-protocol/relayer-v2"; - const relayerDocsUrl = "https://docs.across.to/v2/developers/running-a-relayer"; + const botRepoUrl = "https://github.com/across-protocol/relayer"; + const relayerDocsUrl = "https://docs.across.to/relayers/running-a-relayer"; const helpStr = ` Across v2 Bot diff --git a/src/utils/MerkleTreeUtils.ts b/src/utils/MerkleTreeUtils.ts index 72384fb75..3b299d436 100644 --- a/src/utils/MerkleTreeUtils.ts +++ b/src/utils/MerkleTreeUtils.ts @@ -1,4 +1,4 @@ -import { MerkleTree, EMPTY_MERKLE_ROOT } from "@across-protocol/contracts-v2"; +import { MerkleTree, EMPTY_MERKLE_ROOT } from "@across-protocol/contracts"; import { PoolRebalanceLeaf, RelayerRefundLeaf, RelayerRefundLeafWithGroup, V3SlowFillLeaf } from "../interfaces"; import { getParamType, utils } from "."; diff --git a/src/utils/NetworkUtils.ts b/src/utils/NetworkUtils.ts index 2079d5615..0c4fd3290 100644 --- a/src/utils/NetworkUtils.ts +++ b/src/utils/NetworkUtils.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; export const { getNetworkName } = sdkUtils; diff --git a/src/utils/ProviderUtils.ts b/src/utils/ProviderUtils.ts index be3e39428..1e9772735 100644 --- a/src/utils/ProviderUtils.ts +++ b/src/utils/ProviderUtils.ts @@ -14,7 +14,7 @@ import { } from "../common"; import { delay, getOriginFromURL, Logger } from "./"; import { compareArrayResultsWithIgnoredKeys, compareResultsAndFilterIgnoredKeys } from "./ObjectUtils"; -import { MAINNET_CHAIN_IDs } from "@across-protocol/constants-v2"; +import { MAINNET_CHAIN_IDs } from "@across-protocol/constants"; const logger = Logger; @@ -126,7 +126,7 @@ function createSendErrorWithMessage(message: string, sendError: any) { function compareRpcResults(method: string, rpcResultA: any, rpcResultB: any): boolean { if (method === "eth_getBlockByNumber") { // We've seen RPC's disagree on the miner field, for example when Polygon nodes updated software that - // led alchemy and quicknode to disagree on the the miner field's value. + // led alchemy and quicknode to disagree on the miner field's value. return compareResultsAndFilterIgnoredKeys( [ "miner", // polygon (sometimes) @@ -142,7 +142,12 @@ function compareRpcResults(method: string, rpcResultA: any, rpcResultB: any): bo // JSON RPC spec: https://ethereum.org/en/developers/docs/apis/json-rpc/#eth_getfilterchanges // Additional reference: https://github.com/ethers-io/ethers.js/issues/1721 // 2023-08-31 Added blockHash because of upstream zkSync provider disagreements. Consider removing later. - return compareArrayResultsWithIgnoredKeys(["transactionLogIndex"], rpcResultA, rpcResultB); + // 2024-05-07 Added l1BatchNumber and logType due to Alchemy. Consider removing later. + return compareArrayResultsWithIgnoredKeys( + ["transactionLogIndex", "l1BatchNumber", "logType"], + rpcResultA, + rpcResultB + ); } else { return lodash.isEqual(rpcResultA, rpcResultB); } @@ -574,6 +579,15 @@ export function getCachedProvider(chainId: number, redisEnabled = true): RetryPr return providerCache[getProviderCacheKey(chainId, redisEnabled)]; } +/** + * Return the env-defined quorum configured for `chainId`, or 1 if no quorum has been defined. + * @param chainId Chain ID to query for quorum. + * @returns Applicable quorum. + */ +export function getChainQuorum(chainId: number): number { + return Number(process.env[`NODE_QUORUM_${chainId}`] || process.env.NODE_QUORUM || "1"); +} + /** * @notice Returns retry provider for specified chain ID. Optimistically tries to instantiate the provider * with a redis client attached so that all RPC requests are cached. Will load the provider from an in memory @@ -590,7 +604,6 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC const { NODE_RETRIES, NODE_RETRY_DELAY, - NODE_QUORUM, NODE_TIMEOUT, NODE_MAX_CONCURRENCY, NODE_DISABLE_PROVIDER_CACHING, @@ -608,8 +621,7 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC // Default to a delay of 1 second between retries. const retryDelay = Number(process.env[`NODE_RETRY_DELAY_${chainId}`] || NODE_RETRY_DELAY || "1"); - // Default to a node quorum of 1 node. - const nodeQuorumThreshold = Number(process.env[`NODE_QUORUM_${chainId}`] || NODE_QUORUM || "1"); + const nodeQuorumThreshold = getChainQuorum(chainId); const nodeMaxConcurrency = Number(process.env[`NODE_MAX_CONCURRENCY_${chainId}`] || NODE_MAX_CONCURRENCY || "25"); @@ -710,7 +722,8 @@ export async function getProvider(chainId: number, logger?: winston.Logger, useC return provider; } -export function getWSProviders(chainId: number, quorum = 1): ethers.providers.WebSocketProvider[] { +export function getWSProviders(chainId: number, quorum?: number): ethers.providers.WebSocketProvider[] { + quorum ??= getChainQuorum(chainId); const urls = getNodeUrlList(chainId, quorum, "wss"); return urls.map((url) => new ethers.providers.WebSocketProvider(url)); } diff --git a/src/utils/RedisUtils.ts b/src/utils/RedisUtils.ts index ddad602b4..8e8a16d72 100644 --- a/src/utils/RedisUtils.ts +++ b/src/utils/RedisUtils.ts @@ -5,7 +5,7 @@ import winston from "winston"; import { Deposit, Fill, CachingMechanismInterface } from "../interfaces"; import dotenv from "dotenv"; import { RedisCache } from "../caching/RedisCache"; -import { constants } from "@across-protocol/sdk-v2"; +import { constants } from "@across-protocol/sdk"; dotenv.config(); const globalNamespace: string | undefined = process.env.GLOBAL_CACHE_NAMESPACE diff --git a/src/utils/RetryUtils.ts b/src/utils/RetryUtils.ts new file mode 100644 index 000000000..495fb2a3a --- /dev/null +++ b/src/utils/RetryUtils.ts @@ -0,0 +1,17 @@ +import { delay } from "./TimeUtils"; + +export function retryAsync( + fn: (...args: U) => Promise, + numRetries: number, + delayS: number, + ...args: U +): Promise { + let ret = fn(...args); + for (let i = 0; i < numRetries; i++) { + ret = ret.catch(async () => { + await delay(delayS); + return fn(...args); + }); + } + return ret; +} diff --git a/src/utils/SDKUtils.ts b/src/utils/SDKUtils.ts index 37b9c7806..5fd725ed0 100644 --- a/src/utils/SDKUtils.ts +++ b/src/utils/SDKUtils.ts @@ -1,4 +1,4 @@ -import * as sdk from "@across-protocol/sdk-v2"; +import * as sdk from "@across-protocol/sdk"; export class BlockFinder extends sdk.utils.BlockFinder {} export type BlockFinderHints = sdk.utils.BlockFinderHints; @@ -13,7 +13,10 @@ export const { bnUint256Max, chainIsOPStack, dedupArray, + fillStatusArray, fixedPointAdjustment, + forEachAsync, + mapAsync, toBN, bnToHex, toWei, diff --git a/src/utils/SignerUtils.ts b/src/utils/SignerUtils.ts index eb744602f..98205b53d 100644 --- a/src/utils/SignerUtils.ts +++ b/src/utils/SignerUtils.ts @@ -1,6 +1,6 @@ import { readFile } from "fs/promises"; import { constants as ethersConsts, VoidSigner } from "ethers"; -import { typeguards } from "@across-protocol/sdk-v2"; +import { typeguards } from "@across-protocol/sdk"; import { Signer, Wallet, retrieveGckmsKeys, getGckmsConfig, isDefined, assert } from "./"; import { ArweaveWalletJWKInterface, ArweaveWalletJWKInterfaceSS } from "../interfaces"; diff --git a/src/utils/TokenUtils.ts b/src/utils/TokenUtils.ts index 02aa2c197..f5f28b9d6 100644 --- a/src/utils/TokenUtils.ts +++ b/src/utils/TokenUtils.ts @@ -1,7 +1,8 @@ -import { constants, utils } from "@across-protocol/sdk-v2"; +import { constants, utils } from "@across-protocol/sdk"; import { CONTRACT_ADDRESSES } from "../common"; import { BigNumberish, utils as ethersUtils } from "ethers"; -import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants-v2"; +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { L1Token } from "../interfaces"; const { ZERO_ADDRESS } = constants; export const { fetchTokenInfo } = utils; @@ -16,6 +17,37 @@ export function getEthAddressForChain(chainId: number): string { return CONTRACT_ADDRESSES[chainId]?.eth?.address ?? ZERO_ADDRESS; } +export function getTokenInfo(l2TokenAddress: string, chainId: number): L1Token { + // @dev This might give false positives if tokens on different networks have the same address. I'm not sure how + // to get around this... + const tokenObject = Object.values(TOKEN_SYMBOLS_MAP).find(({ addresses }) => addresses[chainId] === l2TokenAddress); + if (!tokenObject) { + throw new Error( + `TokenUtils#getTokenInfo: Unable to resolve token in TOKEN_SYMBOLS_MAP for ${l2TokenAddress} on chain ${chainId}` + ); + } + return { + address: l2TokenAddress, + symbol: tokenObject.symbol, + decimals: tokenObject.decimals, + }; +} + +export function getL1TokenInfo(l2TokenAddress: string, chainId: number): L1Token { + const tokenObject = Object.values(TOKEN_SYMBOLS_MAP).find(({ addresses }) => addresses[chainId] === l2TokenAddress); + const l1TokenAddress = tokenObject?.addresses[CHAIN_IDs.MAINNET]; + if (!l1TokenAddress) { + throw new Error( + `TokenUtils#getL1TokenInfo: Unable to resolve l1 token address in TOKEN_SYMBOLS_MAP for L2 token ${l2TokenAddress} on chain ${chainId}` + ); + } + return { + address: l1TokenAddress, + symbol: tokenObject.symbol, + decimals: tokenObject.decimals, + }; +} + /** * Format the given amount of tokens to the correct number of decimals for the given token symbol. * @param symbol The token symbol to format the amount for. diff --git a/src/utils/TransactionUtils.ts b/src/utils/TransactionUtils.ts index 16703a8d4..0b138f607 100644 --- a/src/utils/TransactionUtils.ts +++ b/src/utils/TransactionUtils.ts @@ -1,4 +1,4 @@ -import { gasPriceOracle, typeguards, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { gasPriceOracle, typeguards, utils as sdkUtils } from "@across-protocol/sdk"; import { FeeData } from "@ethersproject/abstract-provider"; import dotenv from "dotenv"; import { AugmentedTransaction } from "../clients"; @@ -158,13 +158,13 @@ export async function getGasPrice( } // Handle chains with legacy pricing. - if (feeData.maxPriorityFeePerGas.eq(0)) { + if (feeData.maxPriorityFeePerGas.eq(bnZero)) { return { gasPrice: scaleByNumber(feeData.maxFeePerGas, priorityScaler) }; } // Default to EIP-1559 (type 2) pricing. return { - maxFeePerGas: scaleByNumber(feeData.maxFeePerGas, priorityScaler * maxFeePerGasScaler), + maxFeePerGas: scaleByNumber(feeData.maxFeePerGas, Math.max(priorityScaler * maxFeePerGasScaler, 1)), maxPriorityFeePerGas: scaleByNumber(feeData.maxPriorityFeePerGas, priorityScaler), }; } diff --git a/src/utils/TypeGuards.ts b/src/utils/TypeGuards.ts index c8bdfa2b1..cc57d74d5 100644 --- a/src/utils/TypeGuards.ts +++ b/src/utils/TypeGuards.ts @@ -1,4 +1,4 @@ -import { utils } from "@across-protocol/sdk-v2"; +import { utils } from "@across-protocol/sdk"; export const { isDefined, isPromiseFulfilled, isPromiseRejected } = utils; diff --git a/src/utils/fsUtils.ts b/src/utils/fsUtils.ts index 18b38b715..d910bcfd6 100644 --- a/src/utils/fsUtils.ts +++ b/src/utils/fsUtils.ts @@ -1,6 +1,6 @@ import * as fs from "fs/promises"; import { readFileSync as _readFileSync } from "node:fs"; -import { typeguards } from "@across-protocol/sdk-v2"; +import { typeguards } from "@across-protocol/sdk"; export function readFileSync(fileName: string): string { try { diff --git a/src/utils/index.ts b/src/utils/index.ts index 5c1b43259..70e204836 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -1,5 +1,5 @@ // Utils from other packages. -import { constants as sdkConstants } from "@across-protocol/sdk-v2"; +import { constants as sdkConstants } from "@across-protocol/sdk"; import { constants as ethersConstants } from "ethers"; import winston from "winston"; @@ -29,9 +29,9 @@ export type { Block, TransactionResponse, TransactionReceipt, Provider } from "@ export { config } from "dotenv"; export { replaceAddressCase } from "@uma/common"; -export { Logger } from "@uma/financial-templates-lib"; +export { Logger, waitForLogger } from "@uma/logger"; -export { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants-v2"; +export { CHAIN_IDs, TOKEN_SYMBOLS_MAP, TOKEN_EQUIVALENCE_REMAPPING } from "@across-protocol/constants"; // TypeChain exports used in the bot. export { @@ -44,7 +44,7 @@ export { AcrossConfigStore__factory as AcrossConfigStore, PolygonTokenBridger__factory as PolygonTokenBridger, WETH9__factory as WETH9, -} from "@across-protocol/contracts-v2"; +} from "@across-protocol/contracts"; // Utils specifically for this bot. export * from "./SDKUtils"; @@ -74,3 +74,5 @@ export * from "./UmaUtils"; export * from "./TokenUtils"; export * from "./CLIUtils"; export * from "./BNUtils"; +export * from "./CCTPUtils"; +export * from "./RetryUtils"; diff --git a/tasks/integration-tests.ts b/tasks/integration-tests.ts index baf3ae9a8..d09cd1f01 100644 --- a/tasks/integration-tests.ts +++ b/tasks/integration-tests.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { task } from "hardhat/config"; import { getSigner, winston } from "../src/utils"; -import { SpyTransport, bigNumberFormatter } from "@uma/financial-templates-lib"; +import { SpyTransport, bigNumberFormatter } from "@uma/logger"; import { runDataworker } from "../src/dataworker"; import { runRelayer } from "../src/relayer"; import { runFinalizer } from "../src/finalizer"; diff --git a/test/AdapterManager.SendTokensCrossChain.ts b/test/AdapterManager.SendTokensCrossChain.ts index 183336538..357bda6c8 100644 --- a/test/AdapterManager.SendTokensCrossChain.ts +++ b/test/AdapterManager.SendTokensCrossChain.ts @@ -1,8 +1,15 @@ import * as zksync from "zksync-web3"; import { SpokePoolClient } from "../src/clients"; import { AdapterManager } from "../src/clients/bridges"; // Tested -import { CONTRACT_ADDRESSES } from "../src/common"; -import { bnToHex, getL2TokenAddresses, toBNWei, CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "../src/utils"; +import { CONTRACT_ADDRESSES, chainIdsToCctpDomains } from "../src/common"; +import { + bnToHex, + getL2TokenAddresses, + toBNWei, + CHAIN_IDs, + TOKEN_SYMBOLS_MAP, + cctpAddressToBytes32, +} from "../src/utils"; import { MockConfigStoreClient, MockHubPoolClient } from "./mocks"; import { BigNumber, @@ -44,6 +51,9 @@ let l1ZkSyncBridge: FakeContract; // Base contracts let l1BaseBridge: FakeContract; +// CCTP L1 Contracts +let l1CCTPTokenMessager: FakeContract; + const enabledChainIds = [1, 10, 137, 288, 42161, 324, 8453]; const mainnetTokens = { @@ -52,6 +62,7 @@ const mainnetTokens = { dai: TOKEN_SYMBOLS_MAP.DAI.addresses[CHAIN_IDs.MAINNET], wbtc: TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.MAINNET], snx: TOKEN_SYMBOLS_MAP.SNX.addresses[CHAIN_IDs.MAINNET], + bal: TOKEN_SYMBOLS_MAP.BAL.addresses[CHAIN_IDs.MAINNET], } as const; const addAttrib = (obj: unknown) => @@ -108,16 +119,49 @@ describe("AdapterManager: Send tokens cross-chain", async function () { }); it("Correctly sends tokens to chain: Optimism", async function () { const chainId = CHAIN_IDs.OPTIMISM; + // ERC20 tokens: - await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.usdc, amountToSend); + await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.bal, amountToSend); + expect(l1OptimismBridge.depositERC20).to.have.been.calledWith( + mainnetTokens.bal, // l1 token + getL2TokenAddresses(mainnetTokens.bal)[chainId], // l2 token + amountToSend, // amount + addAttrib(adapterManager.adapters[chainId]).l2Gas, // l2Gas + "0x" // data + ); + + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + undefined, + TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId] + ); expect(l1OptimismBridge.depositERC20).to.have.been.calledWith( mainnetTokens.usdc, // l1 token - getL2TokenAddresses(mainnetTokens.usdc)[chainId], // l2 token + TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId], // l2 token amountToSend, // amount addAttrib(adapterManager.adapters[chainId]).l2Gas, // l2Gas "0x" // data ); + // CCTP tokens: + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + undefined, + TOKEN_SYMBOLS_MAP.USDC.addresses[chainId] + ); + expect(l1CCTPTokenMessager.depositForBurn).to.have.been.calledWith( + amountToSend, // amount + chainIdsToCctpDomains[chainId], // destinationDomain + cctpAddressToBytes32(relayer.address).toLowerCase(), // recipient + mainnetTokens.usdc // token + ); + await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.snx, amountToSend); expect(l1OptimismSnxBridge.depositTo).to.have.been.calledWith( relayer.address, // to @@ -147,14 +191,24 @@ describe("AdapterManager: Send tokens cross-chain", async function () { it("Correctly sends tokens to chain: Polygon", async function () { const chainId = CHAIN_IDs.POLYGON; - // ERC20 tokens: - await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.usdc, amountToSend); - expect(l1PolygonRootChainManager.depositFor).to.have.been.calledWith( - relayer.address, // user - mainnetTokens.usdc, // root token - bnToHex(amountToSend) // deposit data. bytes encoding of the amount to send. + + // CCTP tokens: + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + false, + TOKEN_SYMBOLS_MAP.USDC.addresses[chainId] + ); + expect(l1CCTPTokenMessager.depositForBurn).to.have.been.calledWith( + amountToSend, // amount + chainIdsToCctpDomains[chainId], // destinationDomain + cctpAddressToBytes32(relayer.address).toLowerCase(), // recipient + mainnetTokens.usdc // token ); + // ERC20 tokens: await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.dai, amountToSend); expect(l1PolygonRootChainManager.depositFor).to.have.been.calledWith( relayer.address, // user @@ -179,16 +233,24 @@ describe("AdapterManager: Send tokens cross-chain", async function () { it("Correctly sends tokens to chain: Arbitrum", async function () { const chainId = CHAIN_IDs.ARBITRUM; - // ERC20 tokens: - await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.usdc, amountToSend); - expect(l1ArbitrumBridge.outboundTransfer).to.have.been.calledWith( - mainnetTokens.usdc, // token - relayer.address, // to + + // CCTP tokens: + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + false, + TOKEN_SYMBOLS_MAP.USDC.addresses[chainId] + ); + expect(l1CCTPTokenMessager.depositForBurn).to.have.been.calledWith( amountToSend, // amount - addAttrib(adapterManager.adapters[chainId]).l2GasLimit, // maxGas - addAttrib(adapterManager.adapters[chainId]).l2GasPrice, // gasPriceBid - addAttrib(adapterManager.adapters[chainId]).transactionSubmissionData // data + chainIdsToCctpDomains[chainId], // destinationDomain + cctpAddressToBytes32(relayer.address).toLowerCase(), // recipient + mainnetTokens.usdc // token ); + + // ERC20 tokens: await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.wbtc, amountToSend); expect(l1ArbitrumBridge.outboundTransfer).to.have.been.calledWith( mainnetTokens.wbtc, // token @@ -223,7 +285,14 @@ describe("AdapterManager: Send tokens cross-chain", async function () { const chainId = CHAIN_IDs.ZK_SYNC; l1MailboxContract.l2TransactionBaseCost.returns(toBNWei("0.2")); // ERC20 tokens: - await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.usdc, amountToSend); + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + false, + TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId] + ); expect(l1ZkSyncBridge.deposit).to.have.been.calledWith( relayer.address, // user mainnetTokens.usdc, // root token @@ -256,11 +325,43 @@ describe("AdapterManager: Send tokens cross-chain", async function () { }); it("Correctly sends tokens to chain: Base", async function () { const chainId = CHAIN_IDs.BASE; + // CCTP tokens: + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + undefined, + TOKEN_SYMBOLS_MAP.USDC.addresses[chainId] + ); + expect(l1CCTPTokenMessager.depositForBurn).to.have.been.calledWith( + amountToSend, // amount + chainIdsToCctpDomains[chainId], // destinationDomain + cctpAddressToBytes32(relayer.address).toLowerCase(), // recipient + mainnetTokens.usdc // token + ); + // ERC20 tokens: - await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.usdc, amountToSend); + await adapterManager.sendTokenCrossChain( + relayer.address, + chainId, + mainnetTokens.usdc, + amountToSend, + undefined, + TOKEN_SYMBOLS_MAP.USDbC.addresses[chainId] + ); expect(l1BaseBridge.depositERC20).to.have.been.calledWith( mainnetTokens.usdc, // l1 token - getL2TokenAddresses(mainnetTokens.usdc)[chainId], // l2 token + TOKEN_SYMBOLS_MAP.USDbC.addresses[chainId], // l2 token + amountToSend, // amount + addAttrib(adapterManager.adapters[chainId]).l2Gas, // l2Gas + "0x" // data + ); + + await adapterManager.sendTokenCrossChain(relayer.address, chainId, mainnetTokens.bal, amountToSend); + expect(l1BaseBridge.depositERC20).to.have.been.calledWith( + mainnetTokens.bal, // l1 token + getL2TokenAddresses(mainnetTokens.bal)[chainId], // l2 token amountToSend, // amount addAttrib(adapterManager.adapters[chainId]).l2Gas, // l2Gas "0x" // data @@ -336,6 +437,9 @@ async function constructChainSpecificFakes() { // Base contracts l1BaseBridge = await makeFake("ovmStandardBridge_8453", CONTRACT_ADDRESSES[1].ovmStandardBridge_8453.address); + + // CCTP contracts + l1CCTPTokenMessager = await makeFake("cctpTokenMessenger", CONTRACT_ADDRESSES[1].cctpTokenMessenger.address); } async function makeFake(contractName: string, address: string) { diff --git a/test/AdapterManager.getOutstandingCrossChainTokenTransferAmount.ts b/test/AdapterManager.getOutstandingCrossChainTokenTransferAmount.ts index 29f7fe49f..17593930a 100644 --- a/test/AdapterManager.getOutstandingCrossChainTokenTransferAmount.ts +++ b/test/AdapterManager.getOutstandingCrossChainTokenTransferAmount.ts @@ -21,14 +21,14 @@ class TestAdapter extends BaseAdapter { const deposits = amounts.map((amount) => { return { amount: toBN(amount) }; }); - this.l1DepositInitiatedEvents = { "0xmonitored": { token: deposits as unknown as DepositEvent[] } }; + this.l1DepositInitiatedEvents = { "0xmonitored": { token: { token: deposits as unknown as DepositEvent[] } } }; } public setFinalizationEvents(amounts: number[]) { const deposits = amounts.map((amount) => { return { amount: toBN(amount) }; }); - this.l2DepositFinalizedEvents = { "0xmonitored": { token: deposits as unknown as DepositEvent[] } }; + this.l2DepositFinalizedEvents = { "0xmonitored": { token: { token: deposits as unknown as DepositEvent[] } } }; } getOutstandingCrossChainTransfers(): Promise { @@ -80,6 +80,6 @@ describe("AdapterManager: Get outstanding cross chain token transfer amounts", a }); const expectOutstandingTransfersAmount = (transfers: OutstandingTransfers, amount: number) => { - const actualAmount = transfers["0xmonitored"]?.["token"]?.totalAmount || toBN(0); + const actualAmount = transfers["0xmonitored"]?.["token"]?.["token"]?.totalAmount || toBN(0); expect(actualAmount).to.eq(toBN(amount)); }; diff --git a/test/Dataworker.blockRangeUtils.ts b/test/Dataworker.blockRangeUtils.ts index fcb2904b1..395fae6ad 100644 --- a/test/Dataworker.blockRangeUtils.ts +++ b/test/Dataworker.blockRangeUtils.ts @@ -7,7 +7,7 @@ import { HubPoolClient, SpokePoolClient } from "../src/clients"; import { getWidestPossibleExpectedBlockRange } from "../src/dataworker/PoolRebalanceUtils"; import { originChainId } from "./constants"; import { blockRangesAreInvalidForSpokeClients, getEndBlockBuffers } from "../src/dataworker/DataworkerUtils"; -import { getDeployedBlockNumber } from "@across-protocol/contracts-v2"; +import { getDeployedBlockNumber } from "@across-protocol/contracts"; import { MockHubPoolClient, MockSpokePoolClient } from "./mocks"; import { getTimestampsForBundleEndBlocks } from "../src/utils/BlockUtils"; import { assert } from "../src/utils"; diff --git a/test/Dataworker.buildRoots.ts b/test/Dataworker.buildRoots.ts index 1443fad44..9dba449f7 100644 --- a/test/Dataworker.buildRoots.ts +++ b/test/Dataworker.buildRoots.ts @@ -1,4 +1,4 @@ -import { interfaces } from "@across-protocol/sdk-v2"; +import { interfaces } from "@across-protocol/sdk"; import { HubPoolClient, SpokePoolClient } from "../src/clients"; import { RelayerRefundLeaf, RunningBalances } from "../src/interfaces"; import { assert, bnZero, fixedPointAdjustment } from "../src/utils"; diff --git a/test/Dataworker.executePoolRebalances.ts b/test/Dataworker.executePoolRebalances.ts index 2dadbd7b5..5a1472e0c 100644 --- a/test/Dataworker.executePoolRebalances.ts +++ b/test/Dataworker.executePoolRebalances.ts @@ -1,5 +1,5 @@ import { HubPoolClient, MultiCallerClient, SpokePoolClient } from "../src/clients"; -import { bnZero, MAX_UINT_VAL, toBNWei } from "../src/utils"; +import { bnZero, getCurrentTime, MAX_UINT_VAL, toBNWei } from "../src/utils"; import { MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, @@ -90,7 +90,7 @@ describe("Dataworker: Execute pool rebalances", async function () { // Execute queue and check that root bundle is pending: await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Advance time and execute leaves: await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); @@ -105,7 +105,7 @@ describe("Dataworker: Execute pool rebalances", async function () { // arbitrum gas fees, and 1 to update the exchangeRate to execute the destination chain leaf. // console.log(spy.getCall(-1)) expect(multiCallerClient.transactionCount()).to.equal(4); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // TEST 3: // Submit another root bundle proposal and check bundle block range. There should be no leaves in the new range @@ -129,6 +129,14 @@ describe("Dataworker: Execute pool rebalances", async function () { mockHubPoolClient = new MockHubPoolClient(hubPoolClient.logger, fakeHubPool, hubPoolClient.configStoreClient); mockHubPoolClient.setTokenInfoToReturn({ address: l1Token_1.address, decimals: 18, symbol: "TEST" }); dataworkerInstance.clients.hubPoolClient = mockHubPoolClient; + + // Sub in a dummy root bundle proposal for use in HubPoolClient update. + const zero = "0x0000000000000000000000000000000000000000000000000000000000000000"; + fakeHubPool.multicall.returns([ + hubPool.interface.encodeFunctionResult("getCurrentTime", [getCurrentTime().toString()]), + hubPool.interface.encodeFunctionResult("rootBundleProposal", [zero, zero, zero, 0, ZERO_ADDRESS, 0, 0]), + ]); + await updateAllClients(); }); describe("_updateExchangeRatesBeforeExecutingHubChainLeaves", function () { diff --git a/test/Dataworker.executeRelayerRefunds.ts b/test/Dataworker.executeRelayerRefunds.ts index 1a845fa03..a25a5a367 100644 --- a/test/Dataworker.executeRelayerRefunds.ts +++ b/test/Dataworker.executeRelayerRefunds.ts @@ -69,13 +69,13 @@ describe("Dataworker: Execute relayer refunds", async function () { await dataworkerInstance.proposeRootBundle(spokePoolClients); // Execute queue and check that root bundle is pending: - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Advance time and execute rebalance leaves: await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, await getNewBalanceAllocator()); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Manually relay the roots to spoke pools since adapter is a dummy and won't actually relay messages. await updateAllClients(); @@ -90,7 +90,7 @@ describe("Dataworker: Execute relayer refunds", async function () { // Note: without sending tokens, only one of the leaves will be executable. // This is the leaf with the deposit that is being pulled back to the hub pool. expect(multiCallerClient.transactionCount()).to.equal(1); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); @@ -101,7 +101,7 @@ describe("Dataworker: Execute relayer refunds", async function () { // The other transaction should now be enqueued. expect(multiCallerClient.transactionCount()).to.equal(1); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); }); describe("Computing refunds for bundles", function () { let relayer: SignerWithAddress; @@ -132,7 +132,7 @@ describe("Dataworker: Execute relayer refunds", async function () { it("No validated bundle refunds", async function () { // Propose a bundle: await dataworkerInstance.proposeRootBundle(spokePoolClients); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); // No bundle is validated so no refunds. @@ -145,13 +145,13 @@ describe("Dataworker: Execute relayer refunds", async function () { await updateAllClients(); // Propose a bundle: await dataworkerInstance.proposeRootBundle(spokePoolClients); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Advance time and execute leaves: await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, await getNewBalanceAllocator()); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Before relayer refund leaves are not executed, should have pending refunds: await updateAllClients(); @@ -183,7 +183,7 @@ describe("Dataworker: Execute relayer refunds", async function () { // Execute relayer refund leaves. Send funds to spoke pools to execute the leaves. await erc20_2.mint(spokePool_2.address, amountToDeposit); await dataworkerInstance.executeRelayerRefundLeaves(spokePoolClients, await getNewBalanceAllocator()); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Should now have zero pending refunds await updateAllClients(); @@ -216,11 +216,11 @@ describe("Dataworker: Execute relayer refunds", async function () { // Validate another bundle: await dataworkerInstance.proposeRootBundle(spokePoolClients); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, await getNewBalanceAllocator()); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); expect(hubPoolClient.getValidatedRootBundles().length).to.equal(2); @@ -243,7 +243,7 @@ describe("Dataworker: Execute relayer refunds", async function () { // Propose a bundle: await dataworkerInstance.proposeRootBundle(spokePoolClients); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); // After proposal but before execution should show upcoming refund: @@ -260,7 +260,7 @@ describe("Dataworker: Execute relayer refunds", async function () { await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, await getNewBalanceAllocator()); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Should reset to no refunds in "next bundle", though these will show up in pending bundle. await updateAllClients(); diff --git a/test/Dataworker.executeSlowRelay.ts b/test/Dataworker.executeSlowRelay.ts index 325903854..b1d3a105f 100644 --- a/test/Dataworker.executeSlowRelay.ts +++ b/test/Dataworker.executeSlowRelay.ts @@ -11,7 +11,7 @@ import { } from "./constants"; import { setupDataworker } from "./fixtures/Dataworker.Fixture"; import { Contract, SignerWithAddress, depositV3, ethers, expect, fillV3, requestSlowFill } from "./utils"; -import { interfaces } from "@across-protocol/sdk-v2"; +import { interfaces } from "@across-protocol/sdk"; // Tested import { Dataworker } from "../src/dataworker/Dataworker"; @@ -68,7 +68,7 @@ describe("Dataworker: Execute slow relays", async function () { // Execute queue and check that root bundle is pending: expect(multiCallerClient.transactionCount()).to.equal(1); await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); const providers = { ...spokePoolClientsToProviders(spokePoolClients), @@ -79,7 +79,7 @@ describe("Dataworker: Execute slow relays", async function () { await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, new BalanceAllocator(providers)); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Manually relay the roots to spoke pools since adapter is a dummy and won't actually relay messages. await updateAllClients(); @@ -97,7 +97,7 @@ describe("Dataworker: Execute slow relays", async function () { await dataworkerInstance.executeSlowRelayLeaves(spokePoolClients, new BalanceAllocator(providers)); expect(multiCallerClient.transactionCount()).to.equal(1); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); const fills = spokePoolClients[destinationChainId].getFills(); @@ -126,7 +126,7 @@ describe("Dataworker: Execute slow relays", async function () { // Execute queue and check that root bundle is pending: expect(multiCallerClient.transactionCount()).to.equal(1); await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); const providers = { ...spokePoolClientsToProviders(spokePoolClients), @@ -137,7 +137,7 @@ describe("Dataworker: Execute slow relays", async function () { await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); await updateAllClients(); await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, new BalanceAllocator(providers)); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Manually relay the roots to spoke pools since adapter is a dummy and won't actually relay messages. await updateAllClients(); @@ -164,4 +164,58 @@ describe("Dataworker: Execute slow relays", async function () { await dataworkerInstance.executeSlowRelayLeaves(spokePoolClients, new BalanceAllocator(providers)); expect(multiCallerClient.transactionCount()).to.equal(0); }); + it("Ignores expired deposits", async function () { + await updateAllClients(); + + const deposit = await depositV3( + spokePool_1, + destinationChainId, + depositor, + erc20_1.address, + amountToDeposit, + erc20_2.address, + amountToDeposit + ); + await updateAllClients(); + await requestSlowFill(spokePool_2, depositor, deposit); + + await updateAllClients(); + + await dataworkerInstance.proposeRootBundle(spokePoolClients); + + // Execute queue and check that root bundle is pending: + expect(multiCallerClient.transactionCount()).to.equal(1); + await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); + await multiCallerClient.executeTxnQueues(); + + const providers = { + ...spokePoolClientsToProviders(spokePoolClients), + [(await hubPool.provider.getNetwork()).chainId]: hubPool.provider, + }; + // Advance time and execute rebalance leaves: + await hubPool.setCurrentTime(Number(await hubPool.getCurrentTime()) + Number(await hubPool.liveness()) + 1); + await updateAllClients(); + await dataworkerInstance.executePoolRebalanceLeaves(spokePoolClients, new BalanceAllocator(providers)); + await multiCallerClient.executeTxnQueues(); + + // Manually relay the roots to spoke pools since adapter is a dummy and won't actually relay messages. + await updateAllClients(); + const validatedRootBundles = hubPoolClient.getValidatedRootBundles(); + for (const rootBundle of validatedRootBundles) { + await spokePool_2.relayRootBundle(rootBundle.relayerRefundRoot, rootBundle.slowRelayRoot); + } + await updateAllClients(); + + // Note: we need to manually supply the tokens since the L1 tokens won't be recognized in the spoke pool. + // It should only require ~1/2 of the amount because there was a prev fill that provided the other half. + await erc20_2.mint(spokePool_2.address, amountToDeposit); + + await updateAllClients(); + + // Check that dataworker skips the slow fill once we're past the deadline. + await spokePool_2.setCurrentTime(deposit.fillDeadline + 1); + await updateAllClients(); + await dataworkerInstance.executeSlowRelayLeaves(spokePoolClients, new BalanceAllocator(providers)); + expect(multiCallerClient.transactionCount()).to.equal(0); + }); }); diff --git a/test/Dataworker.loadData.ts b/test/Dataworker.loadData.ts index ec707c88a..818dc83d3 100644 --- a/test/Dataworker.loadData.ts +++ b/test/Dataworker.loadData.ts @@ -21,10 +21,21 @@ import { } from "./utils"; import { Dataworker } from "../src/dataworker/Dataworker"; // Tested -import { getCurrentTime, toBN, Event, bnZero, toBNWei, fixedPointAdjustment, assert, ZERO_ADDRESS } from "../src/utils"; +import { + getCurrentTime, + toBN, + Event, + bnZero, + toBNWei, + fixedPointAdjustment, + assert, + ZERO_ADDRESS, + BigNumber, +} from "../src/utils"; import { MockHubPoolClient, MockSpokePoolClient } from "./mocks"; -import { interfaces, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { interfaces, utils as sdkUtils } from "@across-protocol/sdk"; import { cloneDeep } from "lodash"; +import { CombinedRefunds } from "../src/dataworker/DataworkerUtils"; let spokePool_1: Contract, erc20_1: Contract, spokePool_2: Contract, erc20_2: Contract; let l1Token_1: Contract; @@ -366,7 +377,7 @@ describe("Dataworker: Load data used in all functions", async function () { [originChainId]: spokePoolClient_1, [destinationChainId]: spokePoolClient_2, }); - expect(spyLogIncludes(spy, -2, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; + expect(spyLogIncludes(spy, -4, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; expect(data1.bundleFillsV3[repaymentChainId][l1Token_1.address].fills.length).to.equal(1); expect(data1.bundleDepositsV3).to.deep.equal({}); }); @@ -728,7 +739,7 @@ describe("Dataworker: Load data used in all functions", async function () { [originChainId]: spokePoolClient_1, [destinationChainId]: spokePoolClient_2, }); - expect(spyLogIncludes(spy, -2, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; + expect(spyLogIncludes(spy, -4, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; expect(data1.bundleSlowFillsV3[destinationChainId][erc20_2.address].length).to.equal(1); expect(data1.bundleDepositsV3).to.deep.equal({}); }); @@ -869,7 +880,7 @@ describe("Dataworker: Load data used in all functions", async function () { }); // Here we can see that the historical query for the deposit actually succeeds, but the deposit itself // was not one eligible to be slow filled. - expect(spyLogIncludes(spy, -2, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; + expect(spyLogIncludes(spy, -4, "Located V3 deposit outside of SpokePoolClient's search range")).is.true; expect(data1.bundleSlowFillsV3).to.deep.equal({}); expect(data1.bundleDepositsV3).to.deep.equal({}); @@ -1188,30 +1199,55 @@ describe("Dataworker: Load data used in all functions", async function () { // Approximate refunds should count both fills await updateAllClients(); - const refunds = await bundleDataClient.getApproximateRefundsForBlockRange( + const refunds = bundleDataClient.getApproximateRefundsForBlockRange( [originChainId, destinationChainId], getDefaultBlockRange(5) ); - expect(refunds).to.deep.equal({ + const expectedRefunds = { [originChainId]: { [erc20_1.address]: { - [relayer.address]: amountToDeposit.mul(2), + [relayer.address]: BigNumber.from(amountToDeposit.mul(2)).toString(), }, }, - }); + }; + + // Convert refunds to have a nested string instead of BigNumber. It's three levels deep + // which is a bit ugly but it's the easiest way to compare the two objects that are having + // these BN issues. + const convertToNumericStrings = (data: CombinedRefunds) => + Object.entries(data).reduce( + (acc, [chainId, refunds]) => ({ + ...acc, + [chainId]: Object.entries(refunds).reduce( + (acc, [token, refunds]) => ({ + ...acc, + [token]: Object.entries(refunds).reduce( + (acc, [address, amount]) => ({ ...acc, [address]: amount.toString() }), + {} + ), + }), + {} + ), + }), + {} + ); + + expect(convertToNumericStrings(refunds)).to.deep.equal(expectedRefunds); // Send an invalid fill and check it is not included. await fillV3(spokePool_1, relayer, { ...deposit1, depositId: deposit1.depositId + 1 }, originChainId); await updateAllClients(); expect( - await bundleDataClient.getApproximateRefundsForBlockRange( - [originChainId, destinationChainId], - getDefaultBlockRange(5) + convertToNumericStrings( + bundleDataClient.getApproximateRefundsForBlockRange( + [originChainId, destinationChainId], + getDefaultBlockRange(5) + ) ) ).to.deep.equal({ [originChainId]: { [erc20_1.address]: { - [relayer.address]: amountToDeposit.mul(2), + [relayer.address]: amountToDeposit.mul(2).toString(), }, }, }); diff --git a/test/Dataworker.proposeRootBundle.ts b/test/Dataworker.proposeRootBundle.ts index 2a6a02aa2..08078c545 100644 --- a/test/Dataworker.proposeRootBundle.ts +++ b/test/Dataworker.proposeRootBundle.ts @@ -96,7 +96,7 @@ describe("Dataworker: Propose root bundle", async function () { // Execute queue and check that root bundle is pending: await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); expect(hubPoolClient.hasPendingProposal()).to.equal(true); @@ -162,7 +162,7 @@ describe("Dataworker: Propose root bundle", async function () { expect(spy.getCall(-1).lastArg.slowRelayRoot).to.equal(expectedSlowRelayRefundRoot4.tree.getHexRoot()); // Execute queue and check that root bundle is pending: - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); await updateAllClients(); expect(hubPoolClient.hasPendingProposal()).to.equal(true); diff --git a/test/Dataworker.validateRootBundle.ts b/test/Dataworker.validateRootBundle.ts index 4dd492c13..1982e5804 100644 --- a/test/Dataworker.validateRootBundle.ts +++ b/test/Dataworker.validateRootBundle.ts @@ -98,7 +98,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await dataworkerInstance.buildSlowRelayRoot(blockRange2, spokePoolClients); await dataworkerInstance.proposeRootBundle(spokePoolClients); await l1Token_1.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Exit early if no pending bundle. There shouldn't be a bundle seen yet because we haven't passed enough blocks // beyond the block buffer. @@ -138,7 +138,7 @@ describe("Dataworker: Validate pending root bundle", async function () { } await updateAllClients(); await dataworkerInstance.proposeRootBundle(spokePoolClients); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Constructs same roots as proposed root bundle await updateAllClients(); @@ -191,7 +191,7 @@ describe("Dataworker: Validate pending root bundle", async function () { expect(spy.getCall(-2).lastArg.message).to.equal( "A bundle end block is < expected start block, submitting dispute" ); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Bundle range end blocks are above latest block but within buffer, should skip. await updateAllClients(); @@ -231,7 +231,7 @@ describe("Dataworker: Validate pending root bundle", async function () { expect(spy.getCall(-2).lastArg.message).to.equal( "A bundle end block is > latest block + buffer for its chain, submitting dispute" ); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Bundle range length doesn't match expected chain ID list. await updateAllClients(); @@ -246,7 +246,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Unexpected bundle block range length, disputing"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // PoolRebalance root is empty await updateAllClients(); @@ -260,7 +260,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Empty pool rebalance root, submitting dispute"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // PoolRebalance leaf count is too high await updateAllClients(); @@ -275,7 +275,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Unexpected pool rebalance root, submitting dispute"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // PoolRebalance root is off await updateAllClients(); @@ -289,7 +289,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Unexpected pool rebalance root, submitting dispute"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // RelayerRefund root is off await updateAllClients(); @@ -303,7 +303,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Unexpected relayer refund root, submitting dispute"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // SlowRelay root is off await updateAllClients(); @@ -317,7 +317,7 @@ describe("Dataworker: Validate pending root bundle", async function () { await updateAllClients(); await dataworkerInstance.validatePendingRootBundle(spokePoolClients); expect(spy.getCall(-2).lastArg.message).to.equal("Unexpected slow relay root, submitting dispute"); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); }); it("Validates root bundle with large bundleEvaluationBlockNumbers", async function () { await updateAllClients(); diff --git a/test/EventManager.ts b/test/EventManager.ts index cb5847120..fc8a3a678 100644 --- a/test/EventManager.ts +++ b/test/EventManager.ts @@ -1,7 +1,7 @@ import { Event, providers, utils as ethersUtils } from "ethers"; import winston from "winston"; import { Result } from "@ethersproject/abi"; -import { CHAIN_IDs } from "@across-protocol/constants-v2"; +import { CHAIN_IDs } from "@across-protocol/constants"; import { EventManager } from "../src/utils"; import { createSpyLogger, expect, randomAddress } from "./utils"; diff --git a/test/InventoryClient.InventoryRebalance.ts b/test/InventoryClient.InventoryRebalance.ts index 97c686ff4..424a9ff4f 100644 --- a/test/InventoryClient.InventoryRebalance.ts +++ b/test/InventoryClient.InventoryRebalance.ts @@ -8,7 +8,6 @@ import { expect, hubPoolFixture, lastSpyLogIncludes, - randomAddress, sinon, smock, spyLogIncludes, @@ -21,7 +20,15 @@ import { ConfigStoreClient, InventoryClient } from "../src/clients"; // Tested import { CrossChainTransferClient } from "../src/clients/bridges"; import { InventoryConfig } from "../src/interfaces"; import { MockAdapterManager, MockBundleDataClient, MockHubPoolClient, MockTokenClient } from "./mocks/"; -import { ERC20 } from "../src/utils"; +import { + bnZero, + CHAIN_IDs, + createFormatFunction, + ERC20, + fixedPointAdjustment as fixedPoint, + getNetworkName, + TOKEN_SYMBOLS_MAP, +} from "../src/utils"; const toMegaWei = (num: string | number | BigNumber) => ethers.utils.parseUnits(num.toString(), 6); @@ -31,50 +38,58 @@ let owner: SignerWithAddress, spy: sinon.SinonSpy, spyLogger: winston.Logger; let inventoryClient: InventoryClient; // tested let crossChainTransferClient: CrossChainTransferClient; -const enabledChainIds = [1, 10, 137, 42161]; - -const mainnetWeth = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2"; -const mainnetUsdc = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"; +const { MAINNET, OPTIMISM, POLYGON, BASE, ARBITRUM } = CHAIN_IDs; +const enabledChainIds = [MAINNET, OPTIMISM, POLYGON, BASE, ARBITRUM]; +const mainnetWeth = TOKEN_SYMBOLS_MAP.WETH.addresses[MAINNET]; +const mainnetUsdc = TOKEN_SYMBOLS_MAP.USDC.addresses[MAINNET]; let mainnetWethContract: FakeContract; let mainnetUsdcContract: FakeContract; // construct two mappings of chainId to token address. Set the l1 token address to the "real" token address. -const l2TokensForWeth = { 1: mainnetWeth }; -const l2TokensForUsdc = { 1: mainnetUsdc }; -enabledChainIds.slice(1).forEach((chainId) => { - l2TokensForWeth[chainId] = randomAddress(); - l2TokensForUsdc[chainId] = randomAddress(); -}); +const l2TokensForWeth = { [MAINNET]: mainnetWeth }; +const l2TokensForUsdc = { [MAINNET]: mainnetUsdc }; +enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + l2TokensForWeth[chainId] = TOKEN_SYMBOLS_MAP.WETH.addresses[chainId]; + l2TokensForUsdc[chainId] = TOKEN_SYMBOLS_MAP.USDC.addresses[chainId]; + }); // Configure target percentages as 80% mainnet, 10% optimism, 5% polygon and 5% Arbitrum. +const targetOverageBuffer = toWei(1); const inventoryConfig: InventoryConfig = { + wrapEtherTargetPerChain: {}, + wrapEtherTarget: toWei(1), + wrapEtherThresholdPerChain: {}, + wrapEtherThreshold: toWei(1), tokenConfig: { [mainnetWeth]: { - 10: { targetPct: toWei(0.12), thresholdPct: toWei(0.1) }, - 137: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, - 42161: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [BASE]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, }, - [mainnetUsdc]: { - 10: { targetPct: toWei(0.12), thresholdPct: toWei(0.1) }, - 137: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, - 42161: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [BASE]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, }, }, - wrapEtherThreshold: toWei(1), }; // Construct an initial distribution that keeps these values within the above thresholds. const initialAllocation = { - 1: { [mainnetWeth]: toWei(100), [mainnetUsdc]: toMegaWei(10000) }, // seed 100 WETH and 10000 USDC on Mainnet - 10: { [mainnetWeth]: toWei(20), [mainnetUsdc]: toMegaWei(2000) }, // seed 20 WETH and 2000 USDC on Optimism - 137: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Polygon - 42161: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Arbitrum + [MAINNET]: { [mainnetWeth]: toWei(100), [mainnetUsdc]: toMegaWei(10000) }, // seed 100 WETH and 10000 USDC + [OPTIMISM]: { [mainnetWeth]: toWei(20), [mainnetUsdc]: toMegaWei(2000) }, // seed 20 WETH and 2000 USDC + [POLYGON]: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC + [BASE]: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC + [ARBITRUM]: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC }; -const initialWethTotal = toWei(140); // Sum over all 4 chains is 140 -const initialUsdcTotal = toMegaWei(14000); // Sum over all 4 chains is 14000 +const initialWethTotal = toWei(150); // Sum over all 5 chains is 150 +const initialUsdcTotal = toMegaWei(15000); // Sum over all 5 chains is 15000 const initialTotals = { [mainnetWeth]: initialWethTotal, [mainnetUsdc]: initialUsdcTotal }; describe("InventoryClient: Rebalancing inventory", async function () { @@ -89,6 +104,10 @@ describe("InventoryClient: Rebalancing inventory", async function () { await configStoreClient.update(); hubPoolClient = new MockHubPoolClient(spyLogger, hubPool, configStoreClient); + enabledChainIds.forEach((chainId) => { + hubPoolClient.mapTokenInfo(l2TokensForWeth[chainId], "WETH", 18); + hubPoolClient.mapTokenInfo(l2TokensForUsdc[chainId], "USDC", 6); + }); await hubPoolClient.update(); adapterManager = new MockAdapterManager(null, null, null, null); @@ -112,8 +131,8 @@ describe("InventoryClient: Rebalancing inventory", async function () { mainnetWethContract = await smock.fake(ERC20.abi, { address: mainnetWeth }); mainnetUsdcContract = await smock.fake(ERC20.abi, { address: mainnetUsdc }); - mainnetWethContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[1][mainnetWeth]); - mainnetUsdcContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[1][mainnetUsdc]); + mainnetWethContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[MAINNET][mainnetWeth]); + mainnetUsdcContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[MAINNET][mainnetUsdc]); seedMocks(initialAllocation); }); @@ -121,28 +140,25 @@ describe("InventoryClient: Rebalancing inventory", async function () { it("Accessors work as expected", async function () { expect(inventoryClient.getEnabledChains()).to.deep.equal(enabledChainIds); expect(inventoryClient.getL1Tokens()).to.deep.equal(Object.keys(inventoryConfig.tokenConfig)); - expect(inventoryClient.getEnabledL2Chains()).to.deep.equal([10, 137, 42161]); + expect(inventoryClient.getEnabledL2Chains()).to.deep.equal([OPTIMISM, POLYGON, BASE, ARBITRUM]); - expect(inventoryClient.getCumulativeBalance(mainnetWeth)).to.equal(initialWethTotal); - expect(inventoryClient.getCumulativeBalance(mainnetUsdc)).to.equal(initialUsdcTotal); + expect(inventoryClient.getCumulativeBalance(mainnetWeth).eq(initialWethTotal)).to.be.true; + expect(inventoryClient.getCumulativeBalance(mainnetUsdc).eq(initialUsdcTotal)).to.be.true; // Check the allocation matches to what is expected in the seed state of the mock. Check more complex matchers. const tokenDistribution = inventoryClient.getTokenDistributionPerL1Token(); for (const chainId of enabledChainIds) { for (const l1Token of inventoryClient.getL1Tokens()) { - expect(inventoryClient.getBalanceOnChainForL1Token(chainId, l1Token)).to.equal( - initialAllocation[chainId][l1Token] - ); + expect(inventoryClient.getBalanceOnChain(chainId, l1Token)).to.equal(initialAllocation[chainId][l1Token]); expect( - inventoryClient.crossChainTransferClient.getOutstandingCrossChainTransferAmount( - owner.address, - chainId, - l1Token - ) - ).to.equal(toBN(0)); // For now no cross-chain transfers + inventoryClient.crossChainTransferClient + .getOutstandingCrossChainTransferAmount(owner.address, chainId, l1Token) + .eq(bnZero) + ).to.be.true; // For now no cross-chain transfers const expectedShare = initialAllocation[chainId][l1Token].mul(toWei(1)).div(initialTotals[l1Token]); - expect(tokenDistribution[l1Token][chainId]).to.equal(expectedShare); + const l2Token = (l1Token === mainnetWeth ? l2TokensForWeth : l2TokensForUsdc)[chainId]; + expect(tokenDistribution[l1Token][chainId][l2Token]).to.equal(expectedShare); } } }); @@ -155,54 +171,53 @@ describe("InventoryClient: Rebalancing inventory", async function () { expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; // Now, simulate the re-allocation of funds. Say that the USDC on arbitrum is half used up. This will leave arbitrum - // with 500 USDC, giving a percentage of 500/14000 = 0.035. This is below the threshold of 0.5 so we should see + // with 500 USDC, giving a percentage of 500/15000 = 0.035. This is below the threshold of 0.5 so we should see // a re-balance executed in size of the target allocation + overshoot percentage. - const initialBalance = initialAllocation[42161][mainnetUsdc]; - expect(tokenClient.getBalance(42161, l2TokensForUsdc[42161])).to.equal(initialBalance); + const initialBalance = initialAllocation[ARBITRUM][mainnetUsdc]; + expect(tokenClient.getBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM]).eq(initialBalance)).to.be.true; const withdrawAmount = toMegaWei(500); - tokenClient.decrementLocalBalance(42161, l2TokensForUsdc[42161], withdrawAmount); - expect(tokenClient.getBalance(42161, l2TokensForUsdc[42161])).to.equal(withdrawAmount); + tokenClient.decrementLocalBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM], withdrawAmount); + expect(tokenClient.getBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM]).eq(initialBalance.sub(withdrawAmount))).to.be + .true; // The allocation of this should now be below the threshold of 5% so the inventory client should instruct a rebalance. const expectedAlloc = withdrawAmount.mul(toWei(1)).div(initialUsdcTotal.sub(withdrawAmount)); - expect(inventoryClient.getCurrentAllocationPct(mainnetUsdc, 42161)).to.equal(expectedAlloc); + expect(inventoryClient.getCurrentAllocationPct(mainnetUsdc, ARBITRUM).eq(expectedAlloc)).to.be.true; // Execute rebalance. Check logs and enqueued transaction in Adapter manager. Given the total amount over all chains // and the amount still on arbitrum we would expect the module to instruct the relayer to send over: - // (0.05 + 0.02) * (14000 - 500) - 500 = 445. Note the -500 component is there as arbitrum already has 500. our left + // (0.05 + 0.02) * (15000 - 500) - 500 = 515. Note the -500 component is there as arbitrum already has 500 remaining // post previous relay. - const expectedBridgedAmount = toMegaWei(445); + const expectedBridgedAmount = toMegaWei(515); await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "Executed Inventory rebalances")).to.be.true; expect(lastSpyLogIncludes(spy, "Rebalances sent to Arbitrum")).to.be.true; - expect(lastSpyLogIncludes(spy, "445.00 USDC rebalanced")).to.be.true; // cast to formatting expected by client. + expect(lastSpyLogIncludes(spy, "515.00 USDC rebalanced")).to.be.true; // cast to formatting expected by client. expect(lastSpyLogIncludes(spy, "This meets target allocation of 7.00%")).to.be.true; // config from client. // The mock adapter manager should have been called with the expected transaction. - expect(adapterManager.tokensSentCrossChain[42161][mainnetUsdc].amount).to.equal(expectedBridgedAmount); + expect(adapterManager.tokensSentCrossChain[ARBITRUM][mainnetUsdc].amount.eq(expectedBridgedAmount)).to.be.true; // Now, mock these funds having entered the canonical bridge. - adapterManager.setMockedOutstandingCrossChainTransfers(42161, owner.address, mainnetUsdc, expectedBridgedAmount); + adapterManager.setMockedOutstandingCrossChainTransfers(ARBITRUM, owner.address, mainnetUsdc, expectedBridgedAmount); // Now that funds are "in the bridge" re-running the rebalance should not execute any transactions. await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; - expect(spyLogIncludes(spy, -2, '"outstandingTransfers":"445.00"')).to.be.true; + expect(spyLogIncludes(spy, -2, '"outstandingTransfers":"515.00"')).to.be.true; // Now mock that funds have finished coming over the bridge and check behavior is as expected. - adapterManager.setMockedOutstandingCrossChainTransfers(42161, owner.address, mainnetUsdc, toBN(0)); // zero the transfer. mock conclusion. - // Balance after the relay concludes should be initial - withdrawn + bridged as 1000-500+445=945 - const expectedPostRelayBalance = initialBalance.sub(withdrawAmount).add(expectedBridgedAmount); - tokenClient.setTokenData(42161, l2TokensForUsdc[42161], expectedPostRelayBalance, toBN(0)); + adapterManager.setMockedOutstandingCrossChainTransfers(ARBITRUM, owner.address, mainnetUsdc, bnZero); // zero the transfer. mock conclusion. await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; - // We should see a log for chain 42161 that shows the actual balance after the relay concluded and the share. - // actual balance should be listed above at 945. share should be 945/(13500) =0.7 (initial total - withdrawAmount). - expect(spyLogIncludes(spy, -2, '"42161":{"actualBalanceOnChain":"945.00"')).to.be.true; + // We should see a log for Arbitrum that shows the actual balance after the relay concluded and the share. The + // actual balance should be listed above at 1015. share should be 1015/(14500) = 0.7 (initial total - withdrawAmount). + expect(spyLogIncludes(spy, -2, '"actualBalanceOnChain":"500.00"')).to.be.true; + expect(spyLogIncludes(spy, -2, '"outstandingTransfers":"515.00"')).to.be.true; expect(spyLogIncludes(spy, -2, '"proRataShare":"7.00%"')).to.be.true; }); @@ -211,62 +226,66 @@ describe("InventoryClient: Rebalancing inventory", async function () { await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); - expect(tokenClient.getBalance(137, l2TokensForWeth[137])).to.equal(toWei(10)); // Starting balance. + expect(tokenClient.getBalance(POLYGON, l2TokensForWeth[POLYGON]).eq(toWei(10))).to.be.true; // Starting balance. // Construct a token shortfall of 18. const shortfallAmount = toWei(18); - tokenClient.setTokenShortFallData(137, l2TokensForWeth[137], [6969], shortfallAmount); + tokenClient.setTokenShortFallData(POLYGON, l2TokensForWeth[POLYGON], [6969], shortfallAmount); await inventoryClient.update(); // If we now consider how much should be sent over the bridge. The spoke pool, considering the shortfall, has an - // allocation of -5.7%. The target is, however, 5% of the total supply. factoring in the overshoot parameter we - // should see a transfer of 5 + 2 - (-5.7)=12.714% of total inventory. This should be an amount of 0.127*140=17.79. - const expectedBridgedAmount = toBN("17799999999999999880"); + // allocation of -5.3%. The target is, however, 5% of the total supply. factoring in the overshoot parameter we + // should see a transfer of 5 + 2 - (-5.3)=12.3% of total inventory. This should be an amount of 0.1233*150=18.49. + const expectedBridgedAmount = toBN("18499999999999999950"); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "Executed Inventory rebalances")).to.be.true; expect(lastSpyLogIncludes(spy, "Rebalances sent to Polygon")).to.be.true; - expect(lastSpyLogIncludes(spy, "17.79 WETH rebalanced")).to.be.true; // expected bridge amount rounded for logs. + expect(lastSpyLogIncludes(spy, "18.49 WETH rebalanced")).to.be.true; // expected bridge amount rounded for logs. expect(lastSpyLogIncludes(spy, "This meets target allocation of 7.00%")).to.be.true; // config from client. // Note that there should be some additional state updates that we should check. In particular the token balance // on L1 should have been decremented by the amount sent over the bridge and the Inventory client should be tracking // the cross-chain transfers. - expect(tokenClient.getBalance(1, mainnetWeth)).to.equal(toWei(100).sub(expectedBridgedAmount)); + expect(tokenClient.getBalance(MAINNET, mainnetWeth).eq(toWei(100).sub(expectedBridgedAmount))).to.be.true; expect( - inventoryClient.crossChainTransferClient.getOutstandingCrossChainTransferAmount(owner.address, 137, mainnetWeth) + inventoryClient.crossChainTransferClient.getOutstandingCrossChainTransferAmount( + owner.address, + POLYGON, + mainnetWeth + ) ).to.equal(expectedBridgedAmount); // The mock adapter manager should have been called with the expected transaction. - expect(adapterManager.tokensSentCrossChain[137][mainnetWeth].amount).to.equal(expectedBridgedAmount); + expect(adapterManager.tokensSentCrossChain[POLYGON][mainnetWeth].amount.eq(expectedBridgedAmount)).to.be.true; // Now, mock these funds having entered the canonical bridge. - adapterManager.setMockedOutstandingCrossChainTransfers(137, owner.address, mainnetWeth, expectedBridgedAmount); + adapterManager.setMockedOutstandingCrossChainTransfers(POLYGON, owner.address, mainnetWeth, expectedBridgedAmount); // Now that funds are "in the bridge" re-running the rebalance should not execute any transactions as the util // should consider the funds in transit as part of the balance and therefore should not send more. await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; - expect(spyLogIncludes(spy, -2, '"outstandingTransfers":"17.79"')).to.be.true; + expect(spyLogIncludes(spy, -2, '"outstandingTransfers":"18.49"')).to.be.true; expect(spyLogIncludes(spy, -2, '"actualBalanceOnChain":"10.00"')).to.be.true; - expect(spyLogIncludes(spy, -2, '"virtualBalanceOnChain":"27.79"')).to.be.true; + expect(spyLogIncludes(spy, -2, '"virtualBalanceOnChain":"28.49"')).to.be.true; // Now mock that funds have finished coming over the bridge and check behavior is as expected. // Zero the transfer. mock conclusion. - adapterManager.setMockedOutstandingCrossChainTransfers(137, owner.address, mainnetWeth, toBN(0)); + adapterManager.setMockedOutstandingCrossChainTransfers(POLYGON, owner.address, mainnetWeth, bnZero); // Balance after the relay concludes should be initial + bridged amount as 10+17.9=27.9 const expectedPostRelayBalance = toWei(10).add(expectedBridgedAmount); - tokenClient.setTokenData(137, l2TokensForWeth[137], expectedPostRelayBalance, toBN(0)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], expectedPostRelayBalance, bnZero); // The token shortfall should now no longer be an issue. This means we can fill the relay of 18 size now. - tokenClient.setTokenShortFallData(137, l2TokensForWeth[137], [6969], toBN(0)); - tokenClient.decrementLocalBalance(137, l2TokensForWeth[137], shortfallAmount); // mock the relay actually filling. + tokenClient.setTokenShortFallData(POLYGON, l2TokensForWeth[POLYGON], [6969], bnZero); + tokenClient.decrementLocalBalance(POLYGON, l2TokensForWeth[POLYGON], shortfallAmount); // mock the relay actually filling. await inventoryClient.update(); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; - // We should see a log for chain 42161 that shows the actual balance after the relay concluded and the share. + // We should see a log for chain Arbitrum that shows the actual balance after the relay concluded and the share. // actual balance should be listed above at 945. share should be 945/(13500) =0.7 (initial total - withdrawAmount). - // expect(spyLogIncludes(spy, -2, `"42161":{"actualBalanceOnChain":"945.00"`)).to.be.true; + // expect(spyLogIncludes(spy, -2, `"${ARBITRUM}":{"actualBalanceOnChain":"945.00"`)).to.be.true; // expect(spyLogIncludes(spy, -2, `"proRataShare":"7.00%"`)).to.be.true; }); @@ -277,38 +296,229 @@ describe("InventoryClient: Rebalancing inventory", async function () { // Now, simulate the re-allocation of funds. Say that the USDC on arbitrum is half used up. This will leave arbitrum // with 500 USDC, giving a percentage of 500/14000 = 0.035. This is below the threshold of 0.5 so we should see // a re-balance executed in size of the target allocation + overshoot percentage. - const initialBalance = initialAllocation[42161][mainnetUsdc]; - expect(tokenClient.getBalance(42161, l2TokensForUsdc[42161])).to.equal(initialBalance); + const initialBalance = initialAllocation[ARBITRUM][mainnetUsdc]; + expect(tokenClient.getBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM])).to.equal(initialBalance); const withdrawAmount = toMegaWei(500); - tokenClient.decrementLocalBalance(42161, l2TokensForUsdc[42161], withdrawAmount); - expect(tokenClient.getBalance(42161, l2TokensForUsdc[42161])).to.equal(withdrawAmount); + tokenClient.decrementLocalBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM], withdrawAmount); + expect(tokenClient.getBalance(ARBITRUM, l2TokensForUsdc[ARBITRUM])).to.equal(withdrawAmount); // The allocation of this should now be below the threshold of 5% so the inventory client should instruct a rebalance. const expectedAlloc = withdrawAmount.mul(toWei(1)).div(initialUsdcTotal.sub(withdrawAmount)); - expect(inventoryClient.getCurrentAllocationPct(mainnetUsdc, 42161)).to.equal(expectedAlloc); + expect(inventoryClient.getCurrentAllocationPct(mainnetUsdc, ARBITRUM)).to.equal(expectedAlloc); // Set USDC balance to be lower than expected. mainnetUsdcContract.balanceOf .whenCalledWith(owner.address) - .returns(initialAllocation[1][mainnetUsdc].sub(toMegaWei(1))); + .returns(initialAllocation[MAINNET][mainnetUsdc].sub(toMegaWei(1))); await inventoryClient.rebalanceInventoryIfNeeded(); - expect(spyLogIncludes(spy, -2, "Token balance on Ethereum changed")).to.be.true; + expect(spyLogIncludes(spy, -2, "Token balance on mainnet changed")).to.be.true; // Reset and check again. - mainnetUsdcContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[1][mainnetUsdc]); + mainnetUsdcContract.balanceOf.whenCalledWith(owner.address).returns(initialAllocation[MAINNET][mainnetUsdc]); await inventoryClient.rebalanceInventoryIfNeeded(); expect(lastSpyLogIncludes(spy, "Executed Inventory rebalances")).to.be.true; }); + + describe("Remote chain token mappings", async function () { + const nativeUSDC = TOKEN_SYMBOLS_MAP.USDC.addresses; + const bridgedUSDC = { ...TOKEN_SYMBOLS_MAP["USDC.e"].addresses, ...TOKEN_SYMBOLS_MAP["USDbC"].addresses }; + const usdcConfig = { + [nativeUSDC[OPTIMISM]]: { + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + }, + [nativeUSDC[POLYGON]]: { + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [nativeUSDC[BASE]]: { + [BASE]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [nativeUSDC[ARBITRUM]]: { + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [bridgedUSDC[OPTIMISM]]: { + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + }, + [bridgedUSDC[POLYGON]]: { + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [bridgedUSDC[BASE]]: { + [BASE]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [bridgedUSDC[ARBITRUM]]: { + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + }; + + beforeEach(async function () { + // Sub in a nested USDC config for the existing USDC single-token config. + inventoryConfig.tokenConfig[mainnetUsdc] = usdcConfig; + + enabledChainIds.forEach((chainId) => { + hubPoolClient.mapTokenInfo(nativeUSDC[chainId], "USDC", 6); + }); + }); + + it("Correctly resolves 1:many token mappings", async function () { + // Caller must specify l2Token for 1:many mappings. + expect(() => inventoryClient.getTokenConfig(mainnetUsdc, BASE)).to.throw; + + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + const config = inventoryClient.getTokenConfig(mainnetUsdc, chainId, bridgedUSDC[chainId]); + expect(config).to.exist; + + const expectedConfig = inventoryConfig.tokenConfig[mainnetUsdc][bridgedUSDC[chainId]][chainId]; + expect(expectedConfig).to.exist; + expect(expectedConfig).to.deep.equal(expectedConfig); + }); + }); + + it("Correctly isolates 1:many token balances", async function () { + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + // Non-zero native USDC balance, zero bridged balance. + const nativeBalance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, nativeUSDC[chainId]); + expect(nativeBalance.gt(bnZero)).to.be.true; + + let bridgedBalance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, bridgedUSDC[chainId]); + expect(bridgedBalance.eq(bnZero)).to.be.true; + + // Add bridged balance. + tokenClient.setTokenData(chainId, bridgedUSDC[chainId], nativeBalance); + + // Native balance should now match bridged balance. + bridgedBalance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, bridgedUSDC[chainId]); + expect(nativeBalance.eq(bridgedBalance)).to.be.true; + }); + }); + + it("Correctly sums 1:many token balances", async function () { + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + const bridgedBalance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, bridgedUSDC[chainId]); + expect(bridgedBalance.eq(bnZero)).to.be.true; + + const nativeBalance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, nativeUSDC[chainId]); + expect(nativeBalance.gt(bnZero)).to.be.true; + + const cumulativeBalance = inventoryClient.getCumulativeBalance(mainnetUsdc); + expect(cumulativeBalance.eq(initialUsdcTotal)).to.be.true; + + tokenClient.setTokenData(chainId, bridgedUSDC[chainId], nativeBalance); + + const newBalance = inventoryClient.getCumulativeBalance(mainnetUsdc); + expect(newBalance.eq(initialUsdcTotal.add(nativeBalance))).to.be.true; + + // Revert to 0 balance for bridged USDC. + tokenClient.setTokenData(chainId, bridgedUSDC[chainId], bnZero); + }); + }); + + it("Correctly tracks 1:many token distributions", async function () { + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + // Total USDC across all chains. + let cumulativeBalance = inventoryClient.getCumulativeBalance(mainnetUsdc); + expect(cumulativeBalance.gt(bnZero)).to.be.true; + expect(cumulativeBalance.eq(initialUsdcTotal)).to.be.true; + + // The initial allocation is all native USDC, 0 bridged. + const nativeAllocation = inventoryClient.getCurrentAllocationPct(mainnetUsdc, chainId, nativeUSDC[chainId]); + expect(nativeAllocation.gt(bnZero)).to.be.true; + let balance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, nativeUSDC[chainId]); + expect(nativeAllocation.eq(balance.mul(fixedPoint).div(cumulativeBalance))).to.be.true; + + let bridgedAllocation = inventoryClient.getCurrentAllocationPct(mainnetUsdc, chainId, bridgedUSDC[chainId]); + expect(bridgedAllocation.eq(bnZero)).to.be.true; + + balance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, bridgedUSDC[chainId]); + expect(bridgedAllocation.eq(bnZero)).to.be.true; + + // Add bridged USDC, same amount as native USDC. + balance = inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, nativeUSDC[chainId]); + tokenClient.setTokenData(chainId, bridgedUSDC[chainId], balance); + expect(inventoryClient.getBalanceOnChain(chainId, mainnetUsdc, bridgedUSDC[chainId]).eq(balance)).to.be.true; + expect(bridgedAllocation.eq(bnZero)).to.be.true; + + // Native USDC allocation should now be non-zero. + bridgedAllocation = inventoryClient.getCurrentAllocationPct(mainnetUsdc, chainId, bridgedUSDC[chainId]); + expect(bridgedAllocation.gt(bnZero)).to.be.true; + + expect(inventoryClient.getCumulativeBalance(mainnetUsdc).gt(cumulativeBalance)).to.be.true; + cumulativeBalance = inventoryClient.getCumulativeBalance(mainnetUsdc); + expect(cumulativeBalance.gt(initialUsdcTotal)).to.be.true; + + // Return bridged USDC balance to 0 for next loop. + tokenClient.setTokenData(chainId, bridgedUSDC[chainId], bnZero); + }); + }); + + it("Correctly rebalances mainnet USDC into non-repayment USDC", async function () { + // Unset all bridged USDC allocations. + for (const chainId of [OPTIMISM, POLYGON, BASE, ARBITRUM]) { + const l2Token = bridgedUSDC[chainId]; + hubPoolClient.mapTokenInfo(l2Token, "USDC.e", 6); + delete inventoryConfig.tokenConfig[mainnetUsdc][l2Token]; + } + + await inventoryClient.update(); + await inventoryClient.rebalanceInventoryIfNeeded(); + expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; + + const cumulativeUSDC = inventoryClient.getCumulativeBalance(mainnetUsdc); + const targetPct = toWei(0.1); + const thresholdPct = toWei(0.05); + const expectedRebalance = cumulativeUSDC.mul(targetPct).div(fixedPoint); + const { decimals } = TOKEN_SYMBOLS_MAP.USDC; + const formatter = createFormatFunction(2, 4, false, decimals); + const formattedAmount = formatter(expectedRebalance.toString()); + + let virtualMainnetBalance = initialAllocation[MAINNET][mainnetUsdc]; + + for (const chainId of [OPTIMISM, POLYGON, BASE, ARBITRUM]) { + const chain = getNetworkName(chainId); + await inventoryClient.update(); + const l2Token = bridgedUSDC[chainId]; + + // Apply a new target balance for bridged USDC. + inventoryConfig.tokenConfig[mainnetUsdc][l2Token] = { + [chainId]: { targetPct, thresholdPct, targetOverageBuffer }, + }; + + await inventoryClient.update(); + await inventoryClient.rebalanceInventoryIfNeeded(); + expect(lastSpyLogIncludes(spy, `Rebalances sent to ${chain}`)).to.be.true; + expect(lastSpyLogIncludes(spy, `${formattedAmount} USDC.e rebalanced`)).to.be.true; + expect(lastSpyLogIncludes(spy, "This meets target allocation of 10.00%")).to.be.true; // config from client. + + // Decrement the mainnet USDC balance to simulate the rebalance. + virtualMainnetBalance = virtualMainnetBalance.sub(expectedRebalance); + mainnetUsdcContract.balanceOf.whenCalledWith(owner.address).returns(virtualMainnetBalance); + + // The mock adapter manager should have been called with the expected transaction. + expect(adapterManager.tokensSentCrossChain[chainId][mainnetUsdc].amount.eq(expectedRebalance)).to.be.true; + + await inventoryClient.update(); + await inventoryClient.rebalanceInventoryIfNeeded(); + expect(lastSpyLogIncludes(spy, "No rebalances required")).to.be.true; + expect(spyLogIncludes(spy, -2, `"outstandingTransfers":"${formattedAmount}"`)).to.be.true; + } + }); + }); }); function seedMocks(seedBalances: { [chainId: string]: { [token: string]: BigNumber } }) { hubPoolClient.addL1Token({ address: mainnetWeth, decimals: 18, symbol: "WETH" }); hubPoolClient.addL1Token({ address: mainnetUsdc, decimals: 6, symbol: "USDC" }); enabledChainIds.forEach((chainId) => { - adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetWeth, toBN(0)); - adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetUsdc, toBN(0)); - tokenClient.setTokenData(chainId, l2TokensForWeth[chainId], seedBalances[chainId][mainnetWeth], toBN(0)); - tokenClient.setTokenData(chainId, l2TokensForUsdc[chainId], seedBalances[chainId][mainnetUsdc], toBN(0)); + adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetWeth, bnZero); + adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetUsdc, bnZero); + tokenClient.setTokenData(chainId, l2TokensForWeth[chainId], seedBalances[chainId][mainnetWeth], bnZero); + tokenClient.setTokenData(chainId, l2TokensForUsdc[chainId], seedBalances[chainId][mainnetUsdc], bnZero); hubPoolClient.setTokenMapping(mainnetWeth, chainId, l2TokensForWeth[chainId]); hubPoolClient.setTokenMapping(mainnetUsdc, chainId, l2TokensForUsdc[chainId]); }); diff --git a/test/InventoryClient.RefundChain.ts b/test/InventoryClient.RefundChain.ts index e60da3ea9..9e5ba632d 100644 --- a/test/InventoryClient.RefundChain.ts +++ b/test/InventoryClient.RefundChain.ts @@ -9,18 +9,17 @@ import { expect, hubPoolFixture, lastSpyLogIncludes, - randomAddress, sinon, - toBN, toBNWei, toWei, winston, + spyLogIncludes, } from "./utils"; import { ConfigStoreClient, InventoryClient } from "../src/clients"; // Tested import { CrossChainTransferClient } from "../src/clients/bridges"; import { V3Deposit, InventoryConfig } from "../src/interfaces"; -import { ZERO_ADDRESS, bnZero, getNetworkName, TOKEN_SYMBOLS_MAP } from "../src/utils"; +import { CHAIN_IDs, ZERO_ADDRESS, bnZero, getNetworkName, TOKEN_SYMBOLS_MAP } from "../src/utils"; import { MockAdapterManager, MockBundleDataClient, @@ -30,9 +29,10 @@ import { } from "./mocks"; describe("InventoryClient: Refund chain selection", async function () { - const enabledChainIds = [1, 10, 137, 42161]; - const mainnetWeth = TOKEN_SYMBOLS_MAP.WETH.addresses[1]; - const mainnetUsdc = TOKEN_SYMBOLS_MAP.USDC.addresses[1]; + const { MAINNET, OPTIMISM, POLYGON, ARBITRUM } = CHAIN_IDs; + const enabledChainIds = [MAINNET, OPTIMISM, POLYGON, ARBITRUM]; + const mainnetWeth = TOKEN_SYMBOLS_MAP.WETH.addresses[MAINNET]; + const mainnetUsdc = TOKEN_SYMBOLS_MAP.USDC.addresses[MAINNET]; let hubPoolClient: MockHubPoolClient, adapterManager: MockAdapterManager, tokenClient: MockTokenClient; let bundleDataClient: MockBundleDataClient; @@ -42,46 +42,51 @@ describe("InventoryClient: Refund chain selection", async function () { let crossChainTransferClient: CrossChainTransferClient; // construct two mappings of chainId to token address. Set the l1 token address to the "real" token address. - const l2TokensForWeth = { 1: mainnetWeth }; - const l2TokensForUsdc = { 1: mainnetUsdc }; - enabledChainIds.slice(1).forEach((chainId) => { - l2TokensForWeth[chainId] = randomAddress(); - l2TokensForUsdc[chainId] = randomAddress(); - }); + const l2TokensForWeth = { [MAINNET]: mainnetWeth }; + const l2TokensForUsdc = { [MAINNET]: mainnetUsdc }; + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => { + l2TokensForWeth[chainId] = TOKEN_SYMBOLS_MAP.WETH.addresses[chainId]; + l2TokensForUsdc[chainId] = TOKEN_SYMBOLS_MAP["USDC.e"].addresses[chainId]; + }); const toMegaWei = (num: string | number | BigNumber) => ethers.utils.parseUnits(num.toString(), 6); // Configure thresholds percentages as 10% optimism, 5% polygon and 5% Arbitrum with a target being threshold +2%. + const targetOverageBuffer = toWei(1); const inventoryConfig: InventoryConfig = { + wrapEtherTargetPerChain: {}, + wrapEtherTarget: toWei(1), + wrapEtherThresholdPerChain: {}, + wrapEtherThreshold: toWei(1), tokenConfig: { [mainnetWeth]: { - 10: { targetPct: toWei(0.12), thresholdPct: toWei(0.1) }, - 137: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, - 42161: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, }, - [mainnetUsdc]: { - 10: { targetPct: toWei(0.12), thresholdPct: toWei(0.1) }, - 137: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, - 42161: { targetPct: toWei(0.07), thresholdPct: toWei(0.05) }, + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, }, }, - wrapEtherThreshold: toWei(1), }; // Construct an initial distribution that keeps these values within the above thresholds. const initialAllocation = { - 1: { [mainnetWeth]: toWei(100), [mainnetUsdc]: toMegaWei(10000) }, // seed 100 WETH and 10000 USDC on Mainnet - 10: { [mainnetWeth]: toWei(20), [mainnetUsdc]: toMegaWei(2000) }, // seed 20 WETH and 2000 USDC on Optimism - 137: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Polygon - 42161: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Arbitrum + [MAINNET]: { [mainnetWeth]: toWei(100), [mainnetUsdc]: toMegaWei(10000) }, // seed 100 WETH and 10000 USDC on Mainnet + [OPTIMISM]: { [mainnetWeth]: toWei(20), [mainnetUsdc]: toMegaWei(2000) }, // seed 20 WETH and 2000 USDC on Optimism + [POLYGON]: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Polygon + [ARBITRUM]: { [mainnetWeth]: toWei(10), [mainnetUsdc]: toMegaWei(1000) }, // seed 10 WETH and 1000 USDC on Arbitrum }; const seedMocks = (seedBalances: { [chainId: string]: { [token: string]: BigNumber } }) => { hubPoolClient.addL1Token({ address: mainnetWeth, decimals: 18, symbol: "WETH" }); hubPoolClient.addL1Token({ address: mainnetUsdc, decimals: 6, symbol: "USDC" }); enabledChainIds.forEach((chainId) => { - adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetWeth, toBN(0)); - adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetUsdc, toBN(0)); + adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetWeth, bnZero); + adapterManager.setMockedOutstandingCrossChainTransfers(chainId, owner.address, mainnetUsdc, bnZero); tokenClient.setTokenData(chainId, l2TokensForWeth[chainId], seedBalances[chainId][mainnetWeth]); tokenClient.setTokenData(chainId, l2TokensForUsdc[chainId], seedBalances[chainId][mainnetUsdc]); hubPoolClient.setTokenMapping(mainnetWeth, chainId, l2TokensForWeth[chainId]); @@ -134,13 +139,13 @@ describe("InventoryClient: Refund chain selection", async function () { const inputAmount = toBNWei(1); sampleDepositData = { depositId: 0, - originChainId: 1, - destinationChainId: 10, + originChainId: MAINNET, + destinationChainId: OPTIMISM, depositor: owner.address, recipient: owner.address, inputToken: mainnetWeth, inputAmount, - outputToken: l2TokensForWeth[10], + outputToken: l2TokensForWeth[OPTIMISM], outputAmount: inputAmount, message: "0x", quoteTimestamp: hubPoolClient.currentTime!, @@ -156,7 +161,7 @@ describe("InventoryClient: Refund chain selection", async function () { // above the threshold of 12 and so the bot should choose to be refunded on L1. sampleDepositData.inputAmount = toWei(1); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(1); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"136690647482014388"')).to.be.true; // (20-1)/(140-1)=0.136 // Now consider a case where the relayer is filling a marginally larger relay of size 5 WETH. Now the post relay @@ -164,7 +169,7 @@ describe("InventoryClient: Refund chain selection", async function () { // choose to refund on the L2. sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(10); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([OPTIMISM, MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"111111111111111111"')).to.be.true; // (20-5)/(140-5)=0.11 // Now consider a bigger relay that should force refunds on the L2 chain. Set the relay size to 10 WETH. now post @@ -172,7 +177,7 @@ describe("InventoryClient: Refund chain selection", async function () { // set the refund on L2. sampleDepositData.inputAmount = toWei(10); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(10); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([OPTIMISM, MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"76923076923076923"')).to.be.true; // (20-10)/(140-10)=0.076 }); @@ -181,13 +186,13 @@ describe("InventoryClient: Refund chain selection", async function () { // fictitious relay that exceeds all outstanding liquidity on the target chain(Arbitrum) of 15 Weth (target only) // has 10 WETH in it. const largeRelayAmount = toWei(15); - tokenClient.setTokenShortFallData(42161, l2TokensForWeth[42161], [6969], largeRelayAmount); // Mock the shortfall. + tokenClient.setTokenShortFallData(ARBITRUM, l2TokensForWeth[ARBITRUM], [6969], largeRelayAmount); // Mock the shortfall. // The expected cross chain transfer amount is (0.05+0.02-(10-15)/140)*140=14.8 // Mock the cross-chain transfer // leaving L1 to go to arbitrum by adding it to the mock cross chain transfers and removing from l1 balance. const bridgedAmount = toWei(14.8); - adapterManager.setMockedOutstandingCrossChainTransfers(42161, owner.address, mainnetWeth, bridgedAmount); + adapterManager.setMockedOutstandingCrossChainTransfers(ARBITRUM, owner.address, mainnetWeth, bridgedAmount); await inventoryClient.update(); - tokenClient.setTokenData(1, mainnetWeth, initialAllocation[1][mainnetWeth].sub(bridgedAmount)); + tokenClient.setTokenData(MAINNET, mainnetWeth, initialAllocation[MAINNET][mainnetWeth].sub(bridgedAmount)); // Now, consider that the bot is run while these funds for the above deposit are in the canonical bridge and cant // be filled yet. When it runs it picks up a relay that it can do, of size 1.69 WETH. Each part of the computation @@ -207,11 +212,11 @@ describe("InventoryClient: Refund chain selection", async function () { // the buffer then refund on L1. if it is below the threshold then refund on the target chain. As this number is // is below the buffer plus the threshold then the bot should refund on L2. - sampleDepositData.destinationChainId = 42161; - sampleDepositData.outputToken = l2TokensForWeth[42161]; + sampleDepositData.destinationChainId = ARBITRUM; + sampleDepositData.outputToken = l2TokensForWeth[ARBITRUM]; sampleDepositData.inputAmount = toWei(1.69); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(42161); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ARBITRUM, MAINNET]); expect(lastSpyLogIncludes(spy, 'chainShortfall":"15000000000000000000"')).to.be.true; expect(lastSpyLogIncludes(spy, 'chainVirtualBalance":"24800000000000000000"')).to.be.true; // (10+14.8)=24.8 @@ -229,7 +234,7 @@ describe("InventoryClient: Refund chain selection", async function () { // relay allocation is 4.8/120 = 0.04. This is below the threshold of 0.05 so the bot should refund on the target. sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(42161); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ARBITRUM, MAINNET]); // Check only the final step in the computation. expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"40000000000000000"')).to.be.true; // 4.8/120 = 0.04 @@ -239,8 +244,12 @@ describe("InventoryClient: Refund chain selection", async function () { // chain virtual balance with shortfall post relay is 9.8 - 5 + 10 = 14.8. cumulative virtual balance with shortfall // post relay is 125 - 5 + 10 = 130. Expected post relay allocation is 14.8/130 = 0.11. This is above the threshold // of 0.05 so the bot should refund on L1. - tokenClient.setTokenData(42161, l2TokensForWeth[42161], initialAllocation[42161][mainnetWeth].add(toWei(10))); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(1); + tokenClient.setTokenData( + ARBITRUM, + l2TokensForWeth[ARBITRUM], + initialAllocation[ARBITRUM][mainnetWeth].add(toWei(10)) + ); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([MAINNET]); }); it("Correctly decides where to refund based on upcoming refunds", async function () { @@ -252,31 +261,32 @@ describe("InventoryClient: Refund chain selection", async function () { sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); bundleDataClient.setReturnedPendingBundleRefunds({ - 1: createRefunds(owner.address, toWei(5), mainnetWeth), - 10: createRefunds(owner.address, toWei(5), l2TokensForWeth[10]), + [MAINNET]: createRefunds(owner.address, toWei(5), mainnetWeth), + [OPTIMISM]: createRefunds(owner.address, toWei(5), l2TokensForWeth[OPTIMISM]), }); bundleDataClient.setReturnedNextBundleRefunds({ - 10: createRefunds(owner.address, toWei(5), l2TokensForWeth[10]), + [OPTIMISM]: createRefunds(owner.address, toWei(5), l2TokensForWeth[OPTIMISM]), }); // We need HubPoolClient.l2TokenEnabledForL1Token() to return true for a given // L1 token and destination chain ID, otherwise it won't be counted in upcoming // refunds. hubPoolClient.setEnableAllL2Tokens(true); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(1); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"166666666666666666"')).to.be.true; // (20-5)/(140-5)=0.11 // If we set this to false in this test, the destination chain will be default used since the refund data // will be ignored. hubPoolClient.setEnableAllL2Tokens(false); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(10); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([OPTIMISM, MAINNET]); }); it("Correctly throws when Deposit tokens are not equivalent", async function () { sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal( - sampleDepositData.destinationChainId - ); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ + sampleDepositData.destinationChainId, + 1, + ]); sampleDepositData.outputToken = ZERO_ADDRESS; const srcChain = getNetworkName(sampleDepositData.originChainId); @@ -305,9 +315,9 @@ describe("InventoryClient: Refund chain selection", async function () { false, // simMode false // prioritizeUtilization ); - expect(await _inventoryClient.determineRefundChainId(sampleDepositData)).to.equal( - sampleDepositData.destinationChainId - ); + expect(await _inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ + sampleDepositData.destinationChainId, + ]); }); it("includes origin, destination in repayment chain list", async function () { const possibleRepaymentChains = inventoryClient.getPossibleRepaymentChainIds(sampleDepositData); @@ -323,13 +333,13 @@ describe("InventoryClient: Refund chain selection", async function () { const inputAmount = toBNWei(1); sampleDepositData = { depositId: 0, - originChainId: 137, - destinationChainId: 10, + originChainId: POLYGON, + destinationChainId: OPTIMISM, depositor: owner.address, recipient: owner.address, - inputToken: l2TokensForWeth[137], + inputToken: l2TokensForWeth[POLYGON], inputAmount, - outputToken: l2TokensForWeth[10], + outputToken: l2TokensForWeth[OPTIMISM], outputAmount: inputAmount, message: "0x", quoteTimestamp: hubPoolClient.currentTime!, @@ -340,7 +350,7 @@ describe("InventoryClient: Refund chain selection", async function () { }); it("Both origin and destination chain allocations are below target", async function () { // Set Polygon allocation lower than target: - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(9)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(9)); // Post relay allocations: // Optimism (destination chain): (20-5)/(139-5)=11.1% < 12% @@ -348,8 +358,12 @@ describe("InventoryClient: Refund chain selection", async function () { // Relayer should choose to refund on destination over origin if both are under allocated sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(10); - expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"111940298507462686"')).to.be.true; + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ + OPTIMISM, + POLYGON, + MAINNET, + ]); + expect(spyLogIncludes(spy, -2, 'expectedPostRelayAllocation":"111940298507462686"')).to.be.true; }); it("Origin chain allocation does not depend on subtracting from numerator", async function () { // Post relay allocation does not subtract anything from chain virtual balance, unlike @@ -357,8 +371,8 @@ describe("InventoryClient: Refund chain selection", async function () { // Set Polygon allocation just higher than target. This is set so that any subtractions // from the numerator would break this test. - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(10)); - tokenClient.setTokenData(10, l2TokensForWeth[10], toWei(30)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(10)); + tokenClient.setTokenData(OPTIMISM, l2TokensForWeth[OPTIMISM], toWei(30)); // Post relay allocations: // Optimism (destination chain): (30-10)/(150-10)=14.3% > 12% @@ -366,14 +380,14 @@ describe("InventoryClient: Refund chain selection", async function () { // Relayer should default to hub chain. sampleDepositData.inputAmount = toWei(10); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(1); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"71428571428571428"')).to.be.true; }); it("Origin allocation is below target", async function () { // Set Polygon allocation lower than target: - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(5)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(5)); // Set Optimism allocation higher than target: - tokenClient.setTokenData(10, l2TokensForWeth[10], toWei(30)); + tokenClient.setTokenData(OPTIMISM, l2TokensForWeth[OPTIMISM], toWei(30)); // Post relay allocations: // Optimism (destination chain): (30-5)/(150-5)=17.2% > 12% @@ -381,14 +395,14 @@ describe("InventoryClient: Refund chain selection", async function () { // Relayer should choose to refund origin since destination isn't an option. sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(137); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([POLYGON, MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"35714285714285714"')).to.be.true; }); it("Origin allocation depends on outstanding transfers", async function () { // Set Polygon allocation lower than target: - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(5)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(5)); // Set Optimism allocation higher than target: - tokenClient.setTokenData(10, l2TokensForWeth[10], toWei(30)); + tokenClient.setTokenData(OPTIMISM, l2TokensForWeth[OPTIMISM], toWei(30)); // Post relay allocations: // Optimism (destination chain): (30-5)/(150-5)=17.2% > 12% @@ -396,40 +410,40 @@ describe("InventoryClient: Refund chain selection", async function () { // Relayer should choose to refund origin since destination isn't an option. sampleDepositData.inputAmount = toWei(5); sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(137); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([POLYGON, MAINNET]); // Now add outstanding transfers to Polygon that make the allocation above the target. Note that this // increases cumulative balance a bit. - adapterManager.setMockedOutstandingCrossChainTransfers(137, owner.address, mainnetWeth, toWei(10)); + adapterManager.setMockedOutstandingCrossChainTransfers(POLYGON, owner.address, mainnetWeth, toWei(10)); await inventoryClient.update(); // Post relay allocations: // Optimism (destination chain): (30-5)/(160-5)=16.1% > 12% // Polygon (origin chain): (15)/(160-5)=9.6% > 7% // Relayer should now default to hub chain. - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(1); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"100000000000000000"')).to.be.true; }); it("Origin allocation depends on short falls", async function () { // Set Polygon allocation lower than target: - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(5)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(5)); // Set Optimism allocation higher than target: - tokenClient.setTokenData(10, l2TokensForWeth[10], toWei(30)); + tokenClient.setTokenData(OPTIMISM, l2TokensForWeth[OPTIMISM], toWei(30)); // Shortfalls are subtracted from both numerator and denominator. - tokenClient.setTokenShortFallData(137, l2TokensForWeth[137], [6969], toWei(5)); // Mock the shortfall. + tokenClient.setTokenShortFallData(POLYGON, l2TokensForWeth[POLYGON], [6969], toWei(5)); // Mock the shortfall. // Post relay allocations: // Optimism (destination chain): (25-5)/(145-5)=14.3% > 12% // Polygon (origin chain): (0)/(145-5)=0% < 7% // Relayer should still use origin chain - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(137); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([POLYGON, MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"0"')).to.be.true; // (20-5)/(140-5)=0.11 }); it("Origin allocation depends on upcoming refunds", async function () { // Set Polygon allocation lower than target: - tokenClient.setTokenData(137, l2TokensForWeth[137], toWei(5)); + tokenClient.setTokenData(POLYGON, l2TokensForWeth[POLYGON], toWei(5)); // Set Optimism allocation higher than target: - tokenClient.setTokenData(10, l2TokensForWeth[10], toWei(30)); + tokenClient.setTokenData(OPTIMISM, l2TokensForWeth[OPTIMISM], toWei(30)); // Post relay allocations: // Optimism (destination chain): (30-5)/(150-5)=17.2% > 12% @@ -439,7 +453,7 @@ describe("InventoryClient: Refund chain selection", async function () { sampleDepositData.outputAmount = await computeOutputAmount(sampleDepositData); bundleDataClient.setReturnedPendingBundleRefunds({ - 137: createRefunds(owner.address, toWei(5), l2TokensForWeth[137]), + [POLYGON]: createRefunds(owner.address, toWei(5), l2TokensForWeth[POLYGON]), }); // We need HubPoolClient.l2TokenEnabledForL1Token() to return true for a given // L1 token and destination chain ID, otherwise it won't be counted in upcoming @@ -450,12 +464,12 @@ describe("InventoryClient: Refund chain selection", async function () { // Optimism (destination chain): (30-5)/(155-5)=16.7% > 12% // Polygon (origin chain): (10)/(155-5)=6.7% > 7% // Relayer should still pick origin chain but compute a different allocation. - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(137); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([POLYGON, MAINNET]); expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"68965517241379310"')).to.be.true; }); it("includes origin, destination and hub chain in repayment chain list", async function () { const possibleRepaymentChains = inventoryClient.getPossibleRepaymentChainIds(sampleDepositData); - [sampleDepositData.originChainId, sampleDepositData.destinationChainId, 1].forEach((chainId) => { + [sampleDepositData.originChainId, sampleDepositData.destinationChainId, MAINNET].forEach((chainId) => { expect(possibleRepaymentChains).to.include(chainId); }); expect(possibleRepaymentChains.length).to.equal(3); @@ -469,8 +483,8 @@ describe("InventoryClient: Refund chain selection", async function () { // as possible repayment chains. hubPoolClient.setEnableAllL2Tokens(true); excessRunningBalances = { - [10]: toWei("0.1"), - [42161]: toWei("0.2"), + [OPTIMISM]: toWei("0.1"), + [ARBITRUM]: toWei("0.2"), }; // Fill in rest of slow withdrawal chains with 0 excess since we won't test them. inventoryClient.getSlowWithdrawalRepaymentChains(mainnetWeth).forEach((chainId) => { @@ -493,13 +507,13 @@ describe("InventoryClient: Refund chain selection", async function () { const inputAmount = toBNWei(1); sampleDepositData = { depositId: 0, - originChainId: 137, - destinationChainId: 1, + originChainId: POLYGON, + destinationChainId: MAINNET, depositor: owner.address, recipient: owner.address, - inputToken: l2TokensForWeth[137], + inputToken: l2TokensForWeth[POLYGON], inputAmount, - outputToken: l2TokensForWeth[1], + outputToken: l2TokensForWeth[MAINNET], outputAmount: inputAmount, message: "0x", quoteTimestamp: hubPoolClient.currentTime!, @@ -511,13 +525,22 @@ describe("InventoryClient: Refund chain selection", async function () { it("selects slow withdrawal chain with excess running balance and under relayer allocation", async function () { // Initial allocations are all under allocated so the first slow withdrawal chain should be selected since it has // the highest overage. - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(42161); - - // If we instead drop the excess on 42161 to 0, then we should take repayment on + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ + ARBITRUM, + OPTIMISM, + POLYGON, + MAINNET, + ]); + + // If we instead drop the excess on Arbitrum to 0, then we should take repayment on // the next slow withdrawal chain. - excessRunningBalances[42161] = toWei("0"); + excessRunningBalances[ARBITRUM] = toWei("0"); (inventoryClient as MockInventoryClient).setExcessRunningBalances(mainnetWeth, excessRunningBalances); - expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.equal(10); + expect(await inventoryClient.determineRefundChainId(sampleDepositData)).to.deep.equal([ + OPTIMISM, + POLYGON, + MAINNET, + ]); }); it("includes slow withdrawal chains in possible repayment chain list", async function () { const possibleRepaymentChains = inventoryClient.getPossibleRepaymentChainIds(sampleDepositData); @@ -530,4 +553,86 @@ describe("InventoryClient: Refund chain selection", async function () { expect(possibleRepaymentChains.length).to.equal(4); }); }); + + describe("In-protocol swap", async function () { + const nativeUSDC = TOKEN_SYMBOLS_MAP.USDC.addresses; + const bridgedUSDC = { ...TOKEN_SYMBOLS_MAP["USDC.e"].addresses, ...TOKEN_SYMBOLS_MAP["USDbC"].addresses }; + + beforeEach(async function () { + // Sub in a nested USDC config for the existing USDC config. + const usdcConfig = { + [nativeUSDC[OPTIMISM]]: { + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + }, + [nativeUSDC[POLYGON]]: { + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [nativeUSDC[ARBITRUM]]: { + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [bridgedUSDC[OPTIMISM]]: { + [OPTIMISM]: { targetPct: toWei(0.12), thresholdPct: toWei(0.1), targetOverageBuffer }, + }, + [bridgedUSDC[POLYGON]]: { + [POLYGON]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + [bridgedUSDC[ARBITRUM]]: { + [ARBITRUM]: { targetPct: toWei(0.07), thresholdPct: toWei(0.05), targetOverageBuffer }, + }, + }; + inventoryConfig.tokenConfig[mainnetUsdc] = usdcConfig; + + const inputAmount = toMegaWei(100); + sampleDepositData = { + depositId: 0, + originChainId: ARBITRUM, + destinationChainId: OPTIMISM, + depositor: owner.address, + recipient: owner.address, + inputToken: nativeUSDC[ARBITRUM], + inputAmount, + outputToken: bridgedUSDC[OPTIMISM], + outputAmount: inputAmount, + message: "0x", + quoteTimestamp: hubPoolClient.currentTime!, + fillDeadline: 0, + exclusivityDeadline: 0, + exclusiveRelayer: ZERO_ADDRESS, + }; + }); + + it("outputToken is not supported as a repaymentToken", async function () { + // Verify that there is no native USDC anywhere. The relayer is responsible for ensuring that it can make the fill. + enabledChainIds + .filter((chainId) => chainId !== MAINNET) + .forEach((chainId) => expect(tokenClient.getBalance(chainId, nativeUSDC[chainId]).eq(bnZero)).to.be.true); + + // All chains are at target balance; cumulative balance will go down but repaymentToken balances on all chains are unaffected. + expect(await inventoryClient.determineRefundChainId(sampleDepositData, mainnetUsdc)).to.deep.equal([MAINNET]); + expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"71942446043165467"')).to.be.true; // (10000-0)/(14000-100)=0.71942 + + // Even when the output amount is equal to the destination's entire balance, take repayment on mainnet. + sampleDepositData.outputAmount = inventoryClient.getBalanceOnChain(OPTIMISM, mainnetUsdc); + expect(await inventoryClient.determineRefundChainId(sampleDepositData, mainnetUsdc)).to.deep.equal([MAINNET]); + expect(lastSpyLogIncludes(spy, 'expectedPostRelayAllocation":"83333333333333333"')).to.be.true; // (10000-0)/(14000-2000)=0.8333 + + // Drop the relayer's repaymentToken balance on Optimism. Repayment chain should now be Optimism. + let balance = tokenClient.getBalance(OPTIMISM, bridgedUSDC[OPTIMISM]); + tokenClient.setTokenData(OPTIMISM, bridgedUSDC[OPTIMISM], bnZero); + expect(await inventoryClient.determineRefundChainId(sampleDepositData, mainnetUsdc)).to.deep.equal([ + OPTIMISM, + MAINNET, + ]); + + // Restore the Optimism balance and drop the Arbitrum balance. Repayment chain should now be Arbitrum. + tokenClient.setTokenData(OPTIMISM, bridgedUSDC[OPTIMISM], balance); + + balance = tokenClient.getBalance(ARBITRUM, bridgedUSDC[ARBITRUM]); + tokenClient.setTokenData(ARBITRUM, bridgedUSDC[ARBITRUM], bnZero); + expect(await inventoryClient.determineRefundChainId(sampleDepositData, mainnetUsdc)).to.deep.equal([ + ARBITRUM, + MAINNET, + ]); + }); + }); }); diff --git a/test/Monitor.ts b/test/Monitor.ts index f8631f521..32202350f 100644 --- a/test/Monitor.ts +++ b/test/Monitor.ts @@ -10,19 +10,14 @@ import { import { CrossChainTransferClient } from "../src/clients/bridges"; import { spokePoolClientsToProviders } from "../src/common"; import { Dataworker } from "../src/dataworker/Dataworker"; -import { BalanceType, V3DepositWithBlock } from "../src/interfaces"; -import { - ALL_CHAINS_NAME, - Monitor, - REBALANCE_FINALIZE_GRACE_PERIOD, - UNKNOWN_TRANSFERS_NAME, -} from "../src/monitor/Monitor"; +import { BalanceType, L1Token, V3DepositWithBlock } from "../src/interfaces"; +import { ALL_CHAINS_NAME, Monitor, REBALANCE_FINALIZE_GRACE_PERIOD } from "../src/monitor/Monitor"; import { MonitorConfig } from "../src/monitor/MonitorConfig"; import { MAX_UINT_VAL, getNetworkName, toBN } from "../src/utils"; import * as constants from "./constants"; import { amountToDeposit, destinationChainId, mockTreeRoot, originChainId, repaymentChainId } from "./constants"; import { setupDataworker } from "./fixtures/Dataworker.Fixture"; -import { MockAdapterManager } from "./mocks"; +import { MockAdapterManager, SimpleMockHubPoolClient } from "./mocks"; import { BigNumber, Contract, @@ -36,6 +31,20 @@ import { toBNWei, } from "./utils"; +type TokenMap = { [l2TokenAddress: string]: L1Token }; + +class TestMonitor extends Monitor { + private overriddenTokenMap: { [chainId: number]: TokenMap } = {}; + + setL2ToL1TokenMap(chainId: number, map: TokenMap): void { + this.overriddenTokenMap[chainId] = map; + } + // Override internal function that calls into externally defined and hard-coded TOKEN_SYMBOLS_MAP. + protected getL2ToL1TokenMap(l1Tokens: L1Token[], chainId): TokenMap { + return this.overriddenTokenMap[chainId] ?? super.getL2ToL1TokenMap(l1Tokens, chainId); + } +} + describe("Monitor", async function () { const TEST_NETWORK_NAMES = ["Hardhat1", "Hardhat2", "unknown", ALL_CHAINS_NAME]; let l1Token: Contract, l2Token: Contract, erc20_2: Contract; @@ -79,6 +88,8 @@ describe("Monitor", async function () { }; beforeEach(async function () { + let _hubPoolClient: HubPoolClient; + let _updateAllClients: () => Promise; ({ configStoreClient, hubPool, @@ -90,10 +101,10 @@ describe("Monitor", async function () { l1Token_1: l1Token, spokePool_1, spokePool_2, - hubPoolClient, + hubPoolClient: _hubPoolClient, spokePoolClients, multiCallerClient, - updateAllClients, + updateAllClients: _updateAllClients, } = await setupDataworker( ethers, constants.MAX_REFUNDS_PER_RELAYER_REFUND_LEAF, @@ -101,6 +112,23 @@ describe("Monitor", async function () { 0 )); + // Use a mock hub pool client for these tests so we can hardcode the L1TokenInfo for arbitrary tokens. + hubPoolClient = new SimpleMockHubPoolClient( + spyLogger, + hubPool, + configStoreClient, + _hubPoolClient.deploymentBlock, + _hubPoolClient.chainId + ); + updateAllClients = async () => { + await _updateAllClients(); + await hubPoolClient.update(); + }; + + [l2Token.address, erc20_2.address, l1Token.address].forEach((token) => + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(token, "L1Token1") + ); + defaultMonitorEnvVars = { STARTING_BLOCK_NUMBER: "0", ENDING_BLOCK_NUMBER: "100", @@ -140,7 +168,7 @@ describe("Monitor", async function () { adapterManager = new MockAdapterManager(null, null, null, null); adapterManager.setSupportedChains(chainIds); crossChainTransferClient = new CrossChainTransferClient(spyLogger, chainIds, adapterManager); - monitorInstance = new Monitor(spyLogger, monitorConfig, { + monitorInstance = new TestMonitor(spyLogger, monitorConfig, { bundleDataClient, configStoreClient, multiCallerClient, @@ -149,7 +177,27 @@ describe("Monitor", async function () { tokenTransferClient, crossChainTransferClient, }); - + (monitorInstance as TestMonitor).setL2ToL1TokenMap(originChainId, { + [l2Token.address]: { + symbol: "L1Token1", + address: l1Token.address, + decimals: 18, + }, + }); + (monitorInstance as TestMonitor).setL2ToL1TokenMap(destinationChainId, { + [erc20_2.address]: { + symbol: "L1Token1", + address: l1Token.address, + decimals: 18, + }, + }); + (monitorInstance as TestMonitor).setL2ToL1TokenMap(hubPoolClient.chainId, { + [l1Token.address]: { + symbol: "L1Token1", + address: l1Token.address, + decimals: 18, + }, + }); await updateAllClients(); }); @@ -214,7 +262,7 @@ describe("Monitor", async function () { // Have the data worker propose a new bundle. await dataworkerInstance.proposeRootBundle(spokePoolClients); await l1Token.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // While the new bundle is still pending, refunds are in the "next" category await updateAllClients(); @@ -261,7 +309,7 @@ describe("Monitor", async function () { }; await monitorInstance.update(); await dataworkerInstance.executeRelayerRefundLeaves(spokePoolClients, new BalanceAllocator(providers)); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Now, pending refunds should be 0. await monitorInstance.update(); @@ -278,6 +326,7 @@ describe("Monitor", async function () { crossChainTransferClient.increaseOutstandingTransfer( depositor.address, l1Token.address, + l2Token.address, toBN(5), destinationChainId ); @@ -306,7 +355,7 @@ describe("Monitor", async function () { // Have the data worker propose a new bundle. await dataworkerInstance.proposeRootBundle(spokePoolClients); await l1Token.approve(hubPool.address, MAX_UINT_VAL); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); // Execute pool rebalance leaves. await executeBundle(hubPool); @@ -319,7 +368,8 @@ describe("Monitor", async function () { originChainId, spokePool_1.address, l1Token.address, - toBN(5) + toBN(5), + l2Token.address ); await updateAllClients(); await monitorInstance.update(); @@ -355,20 +405,6 @@ describe("Monitor", async function () { expect(log.lastArg.mrkdwn).to.contains("100.00"); }); - it("Monitor should report unknown transfers", async function () { - await l2Token.connect(depositor).transfer(dataworker.address, 1); - - await monitorInstance.update(); - const reports = monitorInstance.initializeBalanceReports( - monitorInstance.monitorConfig.monitoredRelayers, - monitorInstance.clients.hubPoolClient.getL1Tokens(), - [UNKNOWN_TRANSFERS_NAME] - ); - monitorInstance.updateUnknownTransfers(reports); - - expect(lastSpyLogIncludes(spy, `Transfers that are not fills for relayer ${depositor.address} 🦨`)).to.be.true; - }); - it("Monitor should send token refills", async function () { const refillConfig = [ { @@ -407,7 +443,7 @@ describe("Monitor", async function () { await _monitor.refillBalances(); expect(multiCallerClient.transactionCount()).to.equal(1); - await multiCallerClient.executeTransactionQueue(); + await multiCallerClient.executeTxnQueues(); expect(await spokePool_1.provider.getBalance(spokePool_1.address)).to.equal(toBNWei("2")); }); diff --git a/test/MultiCallerClient.ts b/test/MultiCallerClient.ts index 69f032445..1ce18fd1f 100644 --- a/test/MultiCallerClient.ts +++ b/test/MultiCallerClient.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { AugmentedTransaction, knownRevertReasons, @@ -266,8 +266,8 @@ describe("MultiCallerClient", async function () { expect(multiCaller.transactionCount()).to.equal(nTxns * 2 * chainIds.length); // Note: Half of the txns should be consolidated into a single multicall txn. - const results: string[] = await multiCaller.executeTransactionQueue(); - expect(results.length).to.equal(fail ? 0 : (nTxns + 1) * chainIds.length); + const results = await multiCaller.executeTxnQueues(); + expect(Object.values(results).flat().length).to.equal(fail ? 0 : (nTxns + 1) * chainIds.length); } }); diff --git a/test/ProfitClient.ConsiderProfitability.ts b/test/ProfitClient.ConsiderProfitability.ts index 0efdab6bd..35ab22c50 100644 --- a/test/ProfitClient.ConsiderProfitability.ts +++ b/test/ProfitClient.ConsiderProfitability.ts @@ -1,6 +1,6 @@ import { assert } from "chai"; import { random } from "lodash"; -import { constants as sdkConstants, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { constants as sdkConstants, utils as sdkUtils } from "@across-protocol/sdk"; import { ConfigStoreClient, FillProfit, SpokePoolClient } from "../src/clients"; import { V3Deposit } from "../src/interfaces"; import { @@ -319,7 +319,7 @@ describe("ProfitClient: Consider relay profit", () => { outputAmount, }; hubPoolClient.setTokenMapping(token.address, deposit.originChainId, deposit.inputToken); - hubPoolClient.setTokenMapping(token.address, deposit.destinationChainId, deposit.outputToken); + hubPoolClient.mapTokenInfo(deposit.outputToken, token.symbol, token.decimals); const tokenPriceUsd = profitClient.getPriceOfToken(token.symbol); // Normalise any tokens with <18 decimals to 18 decimals. @@ -359,7 +359,7 @@ describe("ProfitClient: Consider relay profit", () => { netRelayerFeeUsd: formatEther(expected.netRelayerFeeUsd), }); - const { profitable } = await profitClient.isFillProfitable(deposit, lpFeePct, token); + const { profitable } = await profitClient.isFillProfitable(deposit, lpFeePct, token, destinationChainId); expect(profitable).to.equal(expected.profitable); } } @@ -395,7 +395,7 @@ describe("ProfitClient: Consider relay profit", () => { outputAmount, }; hubPoolClient.setTokenMapping(token.address, deposit.originChainId, deposit.inputToken); - hubPoolClient.setTokenMapping(token.address, deposit.destinationChainId, deposit.outputToken); + hubPoolClient.mapTokenInfo(deposit.outputToken, token.symbol, token.decimals); const tokenPriceUsd = profitClient.getPriceOfToken(token.symbol); // Normalise any tokens with <18 decimals to 18 decimals. @@ -429,7 +429,12 @@ describe("ProfitClient: Consider relay profit", () => { netRelayerFeeUsd: formatEther(expected.netRelayerFeeUsd), }); - const { profitable } = await profitClient.isFillProfitable(deposit, effectiveLpFeePct, token); + const { profitable } = await profitClient.isFillProfitable( + deposit, + effectiveLpFeePct, + token, + destinationChainId + ); expect(profitable).to.equal(expected.profitable); } } @@ -478,7 +483,7 @@ describe("ProfitClient: Consider relay profit", () => { const deposit = { ...v3DepositTemplate }; const l1Token = tokens.WETH; hubPoolClient.setTokenMapping(l1Token.address, originChainId, deposit.inputToken); - hubPoolClient.setTokenMapping(l1Token.address, destinationChainId, deposit.outputToken); + hubPoolClient.mapTokenInfo(deposit.outputToken, l1Token.symbol, l1Token.decimals); randomiseGasCost(destinationChainId); const outputTokenPriceUsd = profitClient.getPriceOfToken(l1Token.symbol); @@ -497,7 +502,7 @@ describe("ProfitClient: Consider relay profit", () => { const deposit = { ...v3DepositTemplate, updatedOutputAmount }; hubPoolClient.setTokenMapping(tokens.WETH.address, originChainId, deposit.inputToken); - hubPoolClient.setTokenMapping(tokens.WETH.address, destinationChainId, deposit.outputToken); + hubPoolClient.mapTokenInfo(deposit.outputToken, tokens.WETH.symbol, tokens.WETH.decimals); const outputTokenPriceUsd = profitClient.getPriceOfToken(tokens.WETH.symbol); let expectedOutputAmountUsd = deposit.outputAmount.mul(outputTokenPriceUsd).div(fixedPoint); diff --git a/test/ProfitClient.PriceRetrieval.ts b/test/ProfitClient.PriceRetrieval.ts index 3c259e57b..b85fbb29b 100644 --- a/test/ProfitClient.PriceRetrieval.ts +++ b/test/ProfitClient.PriceRetrieval.ts @@ -63,4 +63,11 @@ describe("ProfitClient: Price Retrieval", async () => { await profitClient.update(); ["ETH", "MATIC"].forEach((gasToken) => expect(profitClient.resolveTokenAddress(gasToken)).to.not.be.undefined); }); + + it("Remaps token symbols to equivalent token symbols", async () => { + await profitClient.update(); + ["USDbC", "USDC.e"].forEach((unknownL1Token) => + expect(profitClient.resolveTokenAddress(unknownL1Token)).to.equal(TOKEN_SYMBOLS_MAP["USDC"].addresses[1]) + ); + }); }); diff --git a/test/Relayer.BasicFill.ts b/test/Relayer.BasicFill.ts index 8909d387b..0ca3d3b8b 100644 --- a/test/Relayer.BasicFill.ts +++ b/test/Relayer.BasicFill.ts @@ -1,7 +1,7 @@ -import { clients, constants, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { clients, constants, utils as sdkUtils } from "@across-protocol/sdk"; import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient } from "../src/clients"; import { CONFIG_STORE_VERSION } from "../src/common"; -import { bnOne, getNetworkName, getUnfilledDeposits } from "../src/utils"; +import { bnOne, bnUint256Max, getNetworkName, getAllUnfilledDeposits } from "../src/utils"; import { Relayer } from "../src/relayer/Relayer"; import { RelayerConfig } from "../src/relayer/RelayerConfig"; // Tested import { @@ -13,7 +13,7 @@ import { destinationChainId, repaymentChainId, } from "./constants"; -import { MockConfigStoreClient, MockInventoryClient, MockProfitClient } from "./mocks"; +import { MockConfigStoreClient, MockInventoryClient, MockProfitClient, SimpleMockHubPoolClient } from "./mocks"; import { MockedMultiCallerClient } from "./mocks/MockMultiCallerClient"; import { BigNumber, @@ -40,6 +40,7 @@ import { } from "./utils"; describe("Relayer: Check for Unfilled Deposits and Fill", async function () { + const [srcChain, dstChain] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; const { EMPTY_MESSAGE } = constants; const { fixedPointAdjustment: fixedPoint } = sdkUtils; @@ -103,7 +104,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { ) as unknown as ConfigStoreClient; await configStoreClient.update(); - hubPoolClient = new clients.HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new SimpleMockHubPoolClient(spyLogger, hubPool, configStoreClient); await hubPoolClient.update(); multiCallerClient = new MockedMultiCallerClient(spyLogger); @@ -166,6 +167,8 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await setupTokensForWallet(spokePool_2, depositor, [erc20_2], weth, 10); await setupTokensForWallet(spokePool_1, relayer, [erc20_1, erc20_2], weth, 10); await setupTokensForWallet(spokePool_2, relayer, [erc20_1, erc20_2], weth, 10); + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_1.address, await l1Token.symbol()); + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_2.address, await l1Token.symbol()); await l1Token.approve(hubPool.address, amountToLp); await hubPool.addLiquidity(l1Token.address, amountToLp); @@ -202,7 +205,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; await Promise.all([spokePoolClient_1.update(), spokePoolClient_2.update(), hubPoolClient.update()]); @@ -217,7 +220,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { // Re-run the execution loop and validate that no additional relays are sent. multiCallerClient.clearTransactionQueue(); txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(lastSpyLogIncludes(spy, "0 unfilled deposits")).to.be.true; }); @@ -226,13 +231,15 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; // The first fill is still pending but if we rerun the relayer loop, it shouldn't try to fill a second time. await Promise.all([spokePoolClient_1.update(), spokePoolClient_2.update(), hubPoolClient.update()]); txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } }); it("Queries the latest onchain fill status for all deposits", async function () { @@ -246,28 +253,33 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { outputAmount ); await updateAllClients(); - let unfilledDeposits = await getUnfilledDeposits(spokePoolClients, hubPoolClient); + let unfilledDeposits = getAllUnfilledDeposits(spokePoolClients, hubPoolClient); expect(Object.values(unfilledDeposits).flat().length).to.equal(1); // Run the relayer in simulation mode so it doesn't fill the relay. const simulate = true; let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(false, simulate); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(spyLogIncludes(spy, -2, "Filled v3 deposit")).is.true; // Verify that the deposit is still unfilled (relayer didn't execute it). - unfilledDeposits = await getUnfilledDeposits(spokePoolClients, hubPoolClient); + unfilledDeposits = getAllUnfilledDeposits(spokePoolClients, hubPoolClient); expect(Object.values(unfilledDeposits).flat().length).to.equal(1); // Fill the deposit and immediately check for unfilled deposits (without SpokePoolClient update). + // There will still be 1 unfilled deposit because the SpokePoolClient has not been updated. await fillV3Relay(spokePool_2, deposit, relayer); - unfilledDeposits = await getUnfilledDeposits(spokePoolClients, hubPoolClient); - expect(Object.values(unfilledDeposits).flat().length).to.equal(0); + unfilledDeposits = getAllUnfilledDeposits(spokePoolClients, hubPoolClient); + expect(Object.values(unfilledDeposits).flat().length).to.equal(1); // Verify that the relayer now sees that the deposit has been filled. txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); - expect(lastSpyLogIncludes(spy, "0 unfilled deposits")).to.be.true; + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } + expect(spyLogIncludes(spy, -2, "1 unfilled deposits found")).to.be.true; }); it("Respects configured relayer routes", async function () { @@ -305,10 +317,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await depositV3(spokePool_1, destinationChainId, depositor, inputToken, inputAmount, outputToken, outputAmount); await updateAllClients(); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); - expect( - spy.getCalls().find(({ lastArg }) => lastArg.message.includes("Skipping deposit from or to disabled chains")) - ).to.not.be.undefined; + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } }); it("Correctly validates self-relays", async function () { @@ -330,7 +341,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { const [expectedLog, expectedReceipts] = selfRelay ? ["Filled v3 deposit", 1] : ["Not relaying unprofitable deposit", 0]; - expect(txnReceipts[destinationChainId].length).to.equal(expectedReceipts); + expect((await txnReceipts[destinationChainId]).length).to.equal(expectedReceipts); expect(lastSpyLogIncludes(spy, expectedLog)).to.be.true; } }); @@ -345,7 +356,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { }); await spokePool_2.setCurrentTime(fillDeadline); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(multiCallerClient.transactionCount()).to.equal(0); }); @@ -370,26 +383,26 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(0); - expect(lastSpyLogIncludes(spy, "0 unfilled deposits")).to.be.true; + expect((await txnReceipts[destinationChainId]).length).to.equal(0); + expect(spyLogIncludes(spy, -2, "1 unfilled deposits found")).to.be.true; await spokePool_2.setCurrentTime(exclusivityDeadline + 1); await updateAllClients(); // Relayer can unconditionally fill after the exclusivityDeadline. txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; }); it("Ignores deposits older than min deposit confirmation threshold", async function () { await depositV3(spokePool_1, destinationChainId, depositor, inputToken, inputAmount, outputToken, outputAmount); - // Set MDC such that the deposit is is ignored. The profit client will return a fill USD amount of $0, + // Set MDC such that the deposit is ignored. The profit client will return a fill USD amount of $0, // so we need to set the MDC for the `0` threshold to be large enough such that the deposit would be ignored. relayerInstance = new Relayer( relayer.address, @@ -407,7 +420,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { { relayerTokens: [], minDepositConfirmations: { - default: { [originChainId]: 10 }, // This needs to be set large enough such that the deposit is ignored. + [originChainId]: [{ usdThreshold: bnUint256Max, minConfirmations: 3 }], }, sendingRelaysEnabled: true, } as unknown as RelayerConfig @@ -415,8 +428,11 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); - expect(lastSpyLogIncludes(spy, "due to insufficient deposit confirmations")).to.be.true; + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } + expect(spyLogIncludes(spy, -2, "due to insufficient deposit confirmations.")).to.be.true; + expect(lastSpyLogIncludes(spy, "0 unfilled deposits found.")).to.be.true; }); it("Ignores deposits with quote times in future", async function () { @@ -434,13 +450,15 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); hubPoolClient.currentTime = quoteTimestamp - 1; let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(lastSpyLogIncludes(spy, "0 unfilled deposits")).to.be.true; // If we reset the timestamp, the relayer will fill the deposit: hubPoolClient.currentTime = quoteTimestamp; txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; }); @@ -465,7 +483,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } // Dynamic fill simulation fails in test, so the deposit will // appear as unprofitable when message filling is enabled. @@ -482,12 +502,14 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); - const [origin, destination] = [getNetworkName(originChainId), getNetworkName(destinationChainId)]; + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } + expect( spy .getCalls() - .find(({ lastArg }) => lastArg.message.includes(`Ignoring ${origin} deposit destined for ${destination}.`)) + .find(({ lastArg }) => lastArg.message.includes(`Ignoring ${srcChain} deposit destined for ${dstChain}.`)) ).to.not.be.undefined; }); @@ -526,13 +548,16 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); if (update.ignored) { - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } + expect( spy.getCalls().find(({ lastArg }) => lastArg.message.includes("Skipping fill for deposit with message")) ).to.not.be.undefined; } else { // Now speed up deposit again with a higher fee and a message of 0x. This should be filled. - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; await spokePoolClient_2.update(); @@ -559,7 +584,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { multiCallerClient.clearTransactionQueue(); await Promise.all([spokePoolClient_1.update(), spokePoolClient_2.update(), hubPoolClient.update()]); const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(lastSpyLogIncludes(spy, "0 unfilled deposits")).to.be.true; }); @@ -587,7 +614,9 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); let txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - Object.values(txnReceipts).forEach((receipts) => expect(receipts.length).to.equal(0)); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(spy.getCalls().find(({ lastArg }) => lastArg.message.includes("Skipping fill for deposit with message"))) .to.not.be.undefined; @@ -602,7 +631,7 @@ describe("Relayer: Check for Unfilled Deposits and Fill", async function () { await updateAllClients(); txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(txnReceipts[destinationChainId].length).to.equal(1); + expect((await txnReceipts[destinationChainId]).length).to.equal(1); expect(lastSpyLogIncludes(spy, "Filled v3 deposit")).to.be.true; }); }); diff --git a/test/Relayer.IndexedSpokePoolClient.ts b/test/Relayer.IndexedSpokePoolClient.ts index a5925e491..35ae3d936 100644 --- a/test/Relayer.IndexedSpokePoolClient.ts +++ b/test/Relayer.IndexedSpokePoolClient.ts @@ -1,13 +1,12 @@ import { Contract, Event, providers, utils as ethersUtils } from "ethers"; import winston from "winston"; import { Result } from "@ethersproject/abi"; -import { CHAIN_IDs } from "@across-protocol/constants-v2"; -import { constants, utils as sdkUtils } from "@across-protocol/sdk-v2"; -import * as utils from "../scripts/utils"; +import { CHAIN_IDs } from "@across-protocol/constants"; +import { constants, utils as sdkUtils } from "@across-protocol/sdk"; import { IndexedSpokePoolClient } from "../src/clients"; -import { mangleEventArgs, sortEventsAscending, sortEventsAscendingInPlace } from "../src/utils"; +import { EventSearchConfig, mangleEventArgs, sortEventsAscending, sortEventsAscendingInPlace } from "../src/utils"; import { SpokePoolClientMessage } from "../src/clients/SpokePoolClient"; -import { assertPromiseError, createSpyLogger, expect, randomAddress } from "./utils"; +import { assertPromiseError, createSpyLogger, deploySpokePoolWithToken, expect, randomAddress } from "./utils"; type Block = providers.Block; type TransactionReceipt = providers.TransactionReceipt; @@ -18,6 +17,10 @@ class MockIndexedSpokePoolClient extends IndexedSpokePoolClient { override indexerUpdate(rawMessage: unknown): void { super.indexerUpdate(rawMessage); } + + override startWorker(): void { + return; + } } describe("IndexedSpokePoolClient: Update", async function () { @@ -121,9 +124,19 @@ describe("IndexedSpokePoolClient: Update", async function () { }; beforeEach(async function () { + let deploymentBlock: number; ({ spyLogger: logger } = createSpyLogger()); - spokePool = await utils.getSpokePoolContract(chainId); - spokePoolClient = new MockIndexedSpokePoolClient(logger, spokePool, null, chainId, 0); + ({ spokePool, deploymentBlock } = await deploySpokePoolWithToken(chainId, 1_000_000)); + const eventSearchConfig: EventSearchConfig | undefined = undefined; + spokePoolClient = new MockIndexedSpokePoolClient( + logger, + spokePool, + null, + chainId, + deploymentBlock, + eventSearchConfig, + {} + ); depositId = 1; currentTime = Math.round(Date.now() / 1000); oldestTime = currentTime - 7200; diff --git a/test/Relayer.SlowFill.ts b/test/Relayer.SlowFill.ts index 8303346a5..5602ddcde 100644 --- a/test/Relayer.SlowFill.ts +++ b/test/Relayer.SlowFill.ts @@ -17,7 +17,7 @@ import { destinationChainId, repaymentChainId, } from "./constants"; -import { MockInventoryClient } from "./mocks"; +import { MockInventoryClient, SimpleMockHubPoolClient } from "./mocks"; import { Contract, SignerWithAddress, @@ -102,7 +102,7 @@ describe("Relayer: Initiates slow fill requests", async function () { configStoreClient = new ConfigStoreClient(spyLogger, configStore, { fromBlock: 0 }, CONFIG_STORE_VERSION); await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new SimpleMockHubPoolClient(spyLogger, hubPool, configStoreClient); await hubPoolClient.update(); multiCallerClient = new MockedMultiCallerClient(spyLogger); // leave out the gasEstimator for now. @@ -158,7 +158,6 @@ describe("Relayer: Initiates slow fill requests", async function () { { relayerTokens: [], slowDepositors: [], - relayerDestinationChains: [], minDepositConfirmations: defaultMinDepositConfirmations, } as unknown as RelayerConfig ); @@ -169,6 +168,8 @@ describe("Relayer: Initiates slow fill requests", async function () { await setupTokensForWallet(spokePool_2, depositor, [erc20_2], weth, 10); await setupTokensForWallet(spokePool_1, relayer, [erc20_1, erc20_2], weth, 10); await setupTokensForWallet(spokePool_2, relayer, [erc20_1, erc20_2], weth, 10); + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_1.address, await l1Token.symbol()); + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_2.address, await l1Token.symbol()); await l1Token.approve(hubPool.address, amountToLp); await hubPool.addLiquidity(l1Token.address, amountToLp); @@ -204,9 +205,14 @@ describe("Relayer: Initiates slow fill requests", async function () { expect(deposit).to.exist; await updateAllClients(); - await relayerInstance.checkForUnfilledDepositsAndFill(); - expect(spyLogIncludes(spy, -2, "Requested slow fill for deposit.")).to.be.true; - expect(lastSpyLogIncludes(spy, "Insufficient balance to fill all deposits")).to.be.true; + const _txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); + const txnHashes = await _txnReceipts[destinationChainId]; + expect(txnHashes.length).to.equal(1); + const txn = await spokePool_1.provider.getTransaction(txnHashes[0]); + const { name: method } = spokePool_1.interface.parseTransaction(txn); + expect(method).to.equal("requestV3SlowFill"); + expect(spyLogIncludes(spy, -5, "Insufficient balance to fill all deposits")).to.be.true; + expect(lastSpyLogIncludes(spy, "Requested slow fill for deposit.")).to.be.true; // Verify that the slowFill request was received by the destination SpokePoolClient. await Promise.all([spokePoolClient_1.update(), spokePoolClient_2.update(), hubPoolClient.update()]); @@ -218,7 +224,10 @@ describe("Relayer: Initiates slow fill requests", async function () { getRelayDataHash(deposit, deposit.destinationChainId) ); - await relayerInstance.checkForUnfilledDepositsAndFill(); + const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(); + for (const receipts of Object.values(txnReceipts)) { + expect((await receipts).length).to.equal(0); + } expect(lastSpyLogIncludes(spy, "Insufficient balance to fill all deposits")).to.be.true; }); }); diff --git a/test/Relayer.TokenShortfall.ts b/test/Relayer.TokenShortfall.ts index 5734c593b..feb28c60f 100644 --- a/test/Relayer.TokenShortfall.ts +++ b/test/Relayer.TokenShortfall.ts @@ -18,7 +18,7 @@ import { destinationChainId, repaymentChainId, } from "./constants"; -import { MockInventoryClient, MockProfitClient } from "./mocks"; +import { MockInventoryClient, MockProfitClient, SimpleMockHubPoolClient } from "./mocks"; import { MockCrossChainTransferClient } from "./mocks/MockCrossChainTransferClient"; import { MockedMultiCallerClient } from "./mocks/MockMultiCallerClient"; import { @@ -108,7 +108,8 @@ describe("Relayer: Token balance shortfall", async function () { configStoreClient = new ConfigStoreClient(spyLogger, configStore, { fromBlock: 0 }, CONFIG_STORE_VERSION); await configStoreClient.update(); - hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new SimpleMockHubPoolClient(spyLogger, hubPool, configStoreClient); + await hubPoolClient.update(); multiCallerClient = new MockedMultiCallerClient(spyLogger); // leave out the gasEstimator for now. @@ -160,7 +161,6 @@ describe("Relayer: Token balance shortfall", async function () { { relayerTokens: [], slowDepositors: [], - relayerDestinationChains: [], minDepositConfirmations: defaultMinDepositConfirmations, } as unknown as RelayerConfig ); @@ -182,6 +182,8 @@ describe("Relayer: Token balance shortfall", async function () { inputToken = erc20_1.address; outputToken = erc20_2.address; + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_1.address, await l1Token.symbol()); + (hubPoolClient as SimpleMockHubPoolClient).mapTokenInfo(erc20_2.address, await l1Token.symbol()); inputTokenDecimals = await erc20_1.decimals(); // Standard deposit outputAmount is 100 tokens. Work backwards to inputAmount to simplify the shortfall math. @@ -218,10 +220,16 @@ describe("Relayer: Token balance shortfall", async function () { // Other relays should not be filled. await erc20_2.mint(relayer.address, toBN(60).mul(bn10.pow(inputTokenDecimals))); await updateAllClients(); - await relayerInstance.checkForUnfilledDepositsAndFill(noSlowRelays); - expect(spyLogIncludes(spy, -2, "Relayed depositId 0")).to.be.true; - expect(lastSpyLogIncludes(spy, `${await l1Token.symbol()} cumulative shortfall of 190.00`)).to.be.true; - expect(lastSpyLogIncludes(spy, "blocking deposits: 1,2")).to.be.true; + const txnReceipts = await relayerInstance.checkForUnfilledDepositsAndFill(noSlowRelays); + const txnHashes = await txnReceipts[destinationChainId]; + expect(txnHashes.length).to.equal(1); + const txn = await spokePool_1.provider.getTransaction(txnHashes[0]); + const { name: method, args } = spokePool_1.interface.parseTransaction(txn); + expect(method).to.equal("fillV3Relay"); + expect(args[0].depositId).to.equal(0); // depositId 0 + + expect(spyLogIncludes(spy, -5, `${await l1Token.symbol()} cumulative shortfall of 190.00`)).to.be.true; + expect(spyLogIncludes(spy, -5, "blocking deposits: 1,2")).to.be.true; }); it("Produces expected logs based on insufficient multiple token balance", async function () { diff --git a/test/Relayer.UnfilledDeposits.ts b/test/Relayer.UnfilledDeposits.ts index 85bb258a1..6910f0a89 100644 --- a/test/Relayer.UnfilledDeposits.ts +++ b/test/Relayer.UnfilledDeposits.ts @@ -1,7 +1,14 @@ -import * as contracts from "@across-protocol/contracts-v2/dist/test-utils"; -import { ExpandedERC20__factory as ERC20 } from "@across-protocol/contracts-v2"; -import { clients, utils as sdkUtils } from "@across-protocol/sdk-v2"; -import { AcrossApiClient, ConfigStoreClient, MultiCallerClient, TokenClient } from "../src/clients"; +import * as contracts from "@across-protocol/contracts/dist/test-utils"; +import { ExpandedERC20__factory as ERC20 } from "@across-protocol/contracts"; +import { utils as sdkUtils } from "@across-protocol/sdk"; +import { + AcrossApiClient, + ConfigStoreClient, + HubPoolClient, + MultiCallerClient, + SpokePoolClient, + TokenClient, +} from "../src/clients"; import { DepositWithBlock, FillStatus } from "../src/interfaces"; import { CHAIN_ID_TEST_LIST, @@ -34,7 +41,7 @@ import { // Tested import { Relayer } from "../src/relayer/Relayer"; import { RelayerConfig } from "../src/relayer/RelayerConfig"; -import { RelayerUnfilledDeposit, getUnfilledDeposits, utf8ToHex } from "../src/utils"; +import { RelayerUnfilledDeposit, getAllUnfilledDeposits, getUnfilledDeposits, utf8ToHex } from "../src/utils"; describe("Relayer: Unfilled Deposits", async function () { const { bnOne } = sdkUtils; @@ -43,8 +50,9 @@ describe("Relayer: Unfilled Deposits", async function () { let hubPool: Contract, l1Token: Contract, configStore: Contract; let owner: SignerWithAddress, depositor: SignerWithAddress, relayer: SignerWithAddress; - let spokePoolClient_1: clients.SpokePoolClient, spokePoolClient_2: clients.SpokePoolClient; - let configStoreClient: MockConfigStoreClient, hubPoolClient: clients.HubPoolClient; + let spokePoolClient_1: SpokePoolClient, spokePoolClient_2: SpokePoolClient; + let configStoreClient: MockConfigStoreClient, hubPoolClient: HubPoolClient; + let spokePoolClients: Record; let multiCallerClient: MultiCallerClient, tokenClient: TokenClient; let profitClient: MockProfitClient; let spokePool1DeploymentBlock: number, spokePool2DeploymentBlock: number; @@ -53,7 +61,7 @@ describe("Relayer: Unfilled Deposits", async function () { let unfilledDeposits: RelayerUnfilledDeposit[] = []; let inputAmount: BigNumber, outputAmount: BigNumber; - let _getUnfilledDeposits: () => Promise; + let _getAllUnfilledDeposits: () => RelayerUnfilledDeposit[]; const { spy, spyLogger } = createSpyLogger(); const updateAllClients = async () => { @@ -90,17 +98,17 @@ describe("Relayer: Unfilled Deposits", async function () { configStoreClient = new MockConfigStoreClient(spyLogger, configStore, undefined, undefined, CHAIN_ID_TEST_LIST); await configStoreClient.update(); - hubPoolClient = new clients.HubPoolClient(spyLogger, hubPool, configStoreClient); + hubPoolClient = new HubPoolClient(spyLogger, hubPool, configStoreClient); await hubPoolClient.update(); - spokePoolClient_1 = new clients.SpokePoolClient( + spokePoolClient_1 = new SpokePoolClient( spyLogger, spokePool_1, hubPoolClient, originChainId, spokePool1DeploymentBlock ); - spokePoolClient_2 = new clients.SpokePoolClient( + spokePoolClient_2 = new SpokePoolClient( spyLogger, spokePool_2, hubPoolClient, @@ -109,7 +117,7 @@ describe("Relayer: Unfilled Deposits", async function () { { fromBlock: 0, toBlock: undefined, maxBlockLookBack: 0 } ); - const spokePoolClients = { [originChainId]: spokePoolClient_1, [destinationChainId]: spokePoolClient_2 }; + spokePoolClients = { [originChainId]: spokePoolClient_1, [destinationChainId]: spokePoolClient_2 }; multiCallerClient = new MockedMultiCallerClient(spyLogger); tokenClient = new TokenClient(spyLogger, relayer.address, spokePoolClients, hubPoolClient); profitClient = new MockProfitClient(spyLogger, hubPoolClient, spokePoolClients, []); @@ -131,7 +139,6 @@ describe("Relayer: Unfilled Deposits", async function () { }, { relayerTokens: [], - relayerDestinationChains: [], minDepositConfirmations: defaultMinDepositConfirmations, acceptInvalidFills: false, } as unknown as RelayerConfig @@ -156,8 +163,8 @@ describe("Relayer: Unfilled Deposits", async function () { await Promise.all([spokePool_1, spokePool_2].map((spokePool) => spokePool.setCurrentTime(currentTime))); await updateAllClients(); - _getUnfilledDeposits = async (): Promise => - Object.values(await getUnfilledDeposits(relayerInstance.clients.spokePoolClients, hubPoolClient)).flat(); + _getAllUnfilledDeposits = (): RelayerUnfilledDeposit[] => + Object.values(getAllUnfilledDeposits(relayerInstance.clients.spokePoolClients, hubPoolClient)).flat(); unfilledDeposits = []; const tokenBalance = await erc20_1.balanceOf(depositor.address); @@ -179,7 +186,7 @@ describe("Relayer: Unfilled Deposits", async function () { ); await updateAllClients(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits) .excludingEvery(["realizedLpFeePct", "quoteBlockNumber"]) .to.deep.equal( @@ -187,7 +194,47 @@ describe("Relayer: Unfilled Deposits", async function () { .sort((a, b) => (a.destinationChainId > b.destinationChainId ? 1 : -1)) .map((deposit) => ({ deposit, - fillStatus: FillStatus.Unfilled, + unfilledAmount: deposit.outputAmount, + invalidFills: [], + version: configStoreClient.configStoreVersion, + })) + ); + }); + + it("Correctly uses input fill status", async function () { + const deposits: DepositWithBlock[] = []; + for (let i = 0; i < 5; ++i) { + const deposit = await depositV3( + spokePool_1, + destinationChainId, + depositor, + erc20_1.address, + inputAmount, + erc20_2.address, + outputAmount + ); + deposits.push(deposit); + } + await updateAllClients(); + + // Take the 2nd last deposit and mark it filled. + expect(deposits.length > 2).to.be.true; + const filledDeposit = deposits.at(-2); + expect(filledDeposit).to.exist; + + const depositHash = spokePoolClient_1.getDepositHash(filledDeposit!); + const { fillStatus } = relayerInstance; + fillStatus[depositHash] = FillStatus.Filled; + + unfilledDeposits = getUnfilledDeposits(destinationChainId, spokePoolClients, hubPoolClient, fillStatus); + expect(unfilledDeposits) + .excludingEvery(["realizedLpFeePct", "quoteBlockNumber"]) + .to.deep.equal( + deposits + .filter(({ depositId }) => depositId !== filledDeposit!.depositId) + .map((deposit) => ({ + deposit, + unfilledAmount: deposit.outputAmount, invalidFills: [], version: configStoreClient.configStoreVersion, })) @@ -213,13 +260,13 @@ describe("Relayer: Unfilled Deposits", async function () { // The deposit should show up as unfilled, since the fill was incorrectly applied to the wrong deposit. await updateAllClients(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits) .excludingEvery(["realizedLpFeePct", "quoteBlockNumber"]) .to.deep.equal([ { deposit, - fillStatus: FillStatus.Unfilled, + unfilledAmount: deposit.outputAmount, invalidFills: [invalidFill], version: configStoreClient.configStoreVersion, }, @@ -279,7 +326,7 @@ describe("Relayer: Unfilled Deposits", async function () { } await spokePoolClient_1.update(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); // Expect both unfilled deposits. The SpokePool contract guarantees // that the quoteTimestamp can't be ahead of SpokePool time. @@ -306,7 +353,7 @@ describe("Relayer: Unfilled Deposits", async function () { ); await updateAllClients(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits.length).to.equal(1); expect(sdkUtils.getRelayDataHash(unfilledDeposits[0].deposit, destinationChainId)).to.equal( sdkUtils.getRelayDataHash(deposit, deposit.destinationChainId) @@ -315,7 +362,7 @@ describe("Relayer: Unfilled Deposits", async function () { await fillV3Relay(spokePool_2, deposit, relayer); await updateAllClients(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits.length).to.equal(0); // Speed up deposit, and check that unfilled amount is still the same. @@ -341,7 +388,7 @@ describe("Relayer: Unfilled Deposits", async function () { ); await updateAllClients(); - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits.length).to.equal(0); }); @@ -448,7 +495,7 @@ describe("Relayer: Unfilled Deposits", async function () { ); // Make a fill with a different outputAmount. This fill should be - // considered invalid and getUnfilledDeposits should log it. + // considered invalid and getAllUnfilledDeposits should log it. const invalidFill = await fillV3Relay( spokePool_2, { ...deposit, outputAmount: deposit.outputAmount.sub(bnOne) }, @@ -457,13 +504,13 @@ describe("Relayer: Unfilled Deposits", async function () { await updateAllClients(); // getUnfilledDeposit still returns the deposit as unfilled but with the invalid fill. - unfilledDeposits = await _getUnfilledDeposits(); + unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits) .excludingEvery(["realizedLpFeePct", "quoteBlockNumber"]) .to.deep.equal([ { deposit, - fillStatus: FillStatus.Unfilled, + unfilledAmount: deposit.outputAmount, invalidFills: [invalidFill], version: configStoreClient.configStoreVersion, }, @@ -504,7 +551,7 @@ describe("Relayer: Unfilled Deposits", async function () { ); await updateAllClients(); - const unfilledDeposits = await _getUnfilledDeposits(); + const unfilledDeposits = _getAllUnfilledDeposits(); expect(unfilledDeposits.length).to.equal(1); expect(unfilledDeposits[0].version).to.equal(highVersion); diff --git a/test/RetryUtils.ts b/test/RetryUtils.ts new file mode 100644 index 000000000..41c2088ee --- /dev/null +++ b/test/RetryUtils.ts @@ -0,0 +1,56 @@ +import { assertPromiseError, expect } from "./utils"; + +// Tested +import { delay, retryAsync } from "../src/utils"; + +const expectedErrorMsg = "async error"; +const expectedReturnValue = 1; + +let ERROR_COUNTER = 0; + +// This function will throw based on the value of an external counter variable, this way +// we can use it to test a function that fails intermittently. If `errorIndex` is > +// to ERROR_COUNTER, then `expectedErrorMsg` will be thrown. +async function incrementCounterThrowError(errorIndex: number, returnValue = expectedReturnValue): Promise { + const oldCounter = ERROR_COUNTER; + ERROR_COUNTER++; + await delay(0); + if (errorIndex > oldCounter) { + throw new Error(expectedErrorMsg); + } else { + return Promise.resolve(returnValue); + } +} + +describe("RetryUtils", async function () { + beforeEach(async function () { + ERROR_COUNTER = 0; + }); + it("retryAsync", async function () { + // Succeeds first time, runs one loop. + expect(await retryAsync(() => incrementCounterThrowError(ERROR_COUNTER), 3, 1)).to.equal(expectedReturnValue); + expect(ERROR_COUNTER).to.equal(1); + + // Fails every time, should throw error, runs four loops, one for first try, and then three retries. + await assertPromiseError( + retryAsync(() => incrementCounterThrowError(ERROR_COUNTER + 1), 3, 1), + expectedErrorMsg + ); + expect(ERROR_COUNTER).to.equal(5); + + // Fails first time only, runs two loops, one for first failure, and then retries + // successfully: + const errorCounter = ERROR_COUNTER; + expect(await retryAsync(() => incrementCounterThrowError(errorCounter + 1), 3, 1)).to.equal(expectedReturnValue); + expect(ERROR_COUNTER).to.equal(7); + + // Delays 1s per retry, retries three times for four more iterations. + const timerStart = performance.now(); + await assertPromiseError( + retryAsync(() => incrementCounterThrowError(ERROR_COUNTER + 1), 3, 1), + expectedErrorMsg + ); + expect(ERROR_COUNTER).to.equal(11); + expect(performance.now() - timerStart).to.be.greaterThan(3000); + }); +}); diff --git a/test/TokenClient.BalanceAlowance.ts b/test/TokenClient.BalanceAlowance.ts index 5f7004fc4..2a4d7e711 100644 --- a/test/TokenClient.BalanceAlowance.ts +++ b/test/TokenClient.BalanceAlowance.ts @@ -1,4 +1,4 @@ -import { ConfigStoreClient, SpokePoolClient, TokenClient } from "../src/clients"; // Tested +import { ConfigStoreClient, SpokePoolClient, TokenClient, TokenDataType } from "../src/clients"; // Tested import { originChainId, destinationChainId, ZERO_ADDRESS } from "./constants"; import { MockHubPoolClient } from "./mocks"; import { @@ -10,6 +10,7 @@ import { deploySpokePoolWithToken, ethers, expect, + toBN, toBNWei, winston, } from "./utils"; @@ -103,6 +104,15 @@ describe("TokenClient: Balance and Allowance", async function () { }); it("Fetches all associated balances", async function () { + const alignTokenData = (data: TokenDataType): TokenDataType => + Object.entries(data).reduce((acc, [chainId, tokenData]) => { + acc[chainId] = Object.entries(tokenData).reduce((acc, [token, { balance, allowance }]) => { + acc[token] = { balance: toBN(balance), allowance: toBN(allowance) }; + return acc; + }, {}); + return acc; + }, {}); + await updateAllClients(); const expectedData = { [originChainId]: { @@ -115,7 +125,7 @@ describe("TokenClient: Balance and Allowance", async function () { }, }; const tokenData = tokenClient.getAllTokenData(); - expect(tokenData).to.deep.equal(expectedData); + expect(alignTokenData(tokenData)).to.deep.equal(expectedData); // Check some balance/allowances directly. expect(tokenClient.getBalance(originChainId, erc20_1.address)).to.equal(toBNWei(0)); @@ -139,7 +149,7 @@ describe("TokenClient: Balance and Allowance", async function () { }, }; const tokenData1 = tokenClient.getAllTokenData(); - expect(tokenData1).to.deep.equal(expectedData1); + expect(alignTokenData(tokenData1)).to.deep.equal(expectedData1); expect(tokenClient.getBalance(originChainId, erc20_1.address)).to.equal(toBNWei(42069)); expect(tokenClient.getBalance(destinationChainId, weth_2.address)).to.equal(toBNWei(1337)); diff --git a/test/constants.ts b/test/constants.ts index 55215826d..f33e82065 100644 --- a/test/constants.ts +++ b/test/constants.ts @@ -9,8 +9,8 @@ import { repaymentChainId, refundProposalLiveness, randomAddress, -} from "@across-protocol/contracts-v2/dist/test-utils"; -import { toWei, ZERO_ADDRESS } from "../src/utils"; +} from "@across-protocol/contracts/dist/test-utils"; +import { bnUint256Max, toWei, ZERO_ADDRESS } from "../src/utils"; export { amountToDeposit, @@ -71,5 +71,6 @@ export const IMPOSSIBLE_BLOCK_RANGE = DEFAULT_BLOCK_RANGE_FOR_CHAIN.map((range) export const baseSpeedUpString = "ACROSS-V2-FEE-1.0"; export const defaultMinDepositConfirmations = { - default: { [originChainId]: 0, [destinationChainId]: 0 }, + [originChainId]: [{ usdThreshold: bnUint256Max, minConfirmations: 0 }], + [destinationChainId]: [{ usdThreshold: bnUint256Max, minConfirmations: 0 }], }; diff --git a/test/cross-chain-adapters/Linea.ts b/test/cross-chain-adapters/Linea.ts new file mode 100644 index 000000000..55de999fe --- /dev/null +++ b/test/cross-chain-adapters/Linea.ts @@ -0,0 +1,236 @@ +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { SpokePoolClient } from "../../src/clients"; +import { LineaAdapter } from "../../src/clients/bridges/LineaAdapter"; +import { ethers, getContractFactory, Contract, randomAddress, expect, createRandomBytes32, toBN } from "../utils"; +import { utils } from "@across-protocol/sdk"; +import { ZERO_ADDRESS } from "@uma/common"; +import { CONTRACT_ADDRESSES } from "../../src/common"; + +describe("Cross Chain Adapter: Linea", async function () { + let adapter: LineaAdapter; + let monitoredEoa: string; + let l1Token, l1USDCToken, l1WETHToken: string; + + let wethBridgeContract: Contract; + let usdcBridgeContract: Contract; + let erc20BridgeContract: Contract; + let searchConfig: utils.EventSearchConfig; + + beforeEach(async function () { + searchConfig = { + fromBlock: 0, + toBlock: 1_000_000, + }; + const [deployer] = await ethers.getSigners(); + + monitoredEoa = randomAddress(); + l1Token = TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.MAINNET]; + l1USDCToken = TOKEN_SYMBOLS_MAP.USDC.addresses[CHAIN_IDs.MAINNET]; + l1WETHToken = TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.MAINNET]; + + const spokePool = await (await getContractFactory("MockSpokePool", deployer)).deploy(ZERO_ADDRESS); + + const l2SpokePoolClient = new SpokePoolClient(null, spokePool, null, CHAIN_IDs.LINEA, 0, { + fromBlock: 0, + }); + const l1SpokePoolClient = new SpokePoolClient(null, spokePool, null, CHAIN_IDs.MAINNET, 0, { + fromBlock: 0, + }); + adapter = new LineaAdapter( + null, + { + [CHAIN_IDs.LINEA]: l2SpokePoolClient, + [CHAIN_IDs.MAINNET]: l1SpokePoolClient, + }, // Don't need spoke pool clients for this test + [] // monitored address doesn't matter for this test since we inject it into the function + ); + + wethBridgeContract = await (await getContractFactory("LineaWethBridge", deployer)).deploy(); + usdcBridgeContract = await (await getContractFactory("LineaUsdcBridge", deployer)).deploy(); + erc20BridgeContract = await (await getContractFactory("LineaERC20Bridge", deployer)).deploy(); + }); + + describe("WETH", function () { + it("Get L1 initiated events", async function () { + // Emit events: + // - some with monitored address as sender + // - some with monitored address as recipient + // Function should return only events with recipient equal + // to monitored address and value greater than 0 + await wethBridgeContract.emitMessageSent(randomAddress(), monitoredEoa, 0); + await wethBridgeContract.emitMessageSent(monitoredEoa, randomAddress(), 0); + await wethBridgeContract.emitMessageSent(randomAddress(), monitoredEoa, 1); + await wethBridgeContract.emitMessageSent(monitoredEoa, randomAddress(), 1); + const result = await adapter.getWethDepositInitiatedEvents(wethBridgeContract, monitoredEoa, searchConfig); + expect(result.length).to.equal(1); + expect(result[0].args._to).to.equal(monitoredEoa); + expect(result[0].args._value).to.equal(1); + }); + it("Get L2 finalized events", async function () { + // Function should return only finalized events that match + // on message hash. + const messageHash = createRandomBytes32(); + const otherMessageHash = createRandomBytes32(); + await wethBridgeContract.emitMessageClaimed(messageHash); + await wethBridgeContract.emitMessageClaimed(otherMessageHash); + const result = await adapter.getWethDepositFinalizedEvents(wethBridgeContract, [messageHash], searchConfig); + expect(result.length).to.equal(1); + expect(result[0].args._messageHash).to.equal(messageHash); + }); + it("Matches L1 and L2 events", async function () { + const messageHash = createRandomBytes32(); + await wethBridgeContract.emitMessageSentWithMessageHash(randomAddress(), monitoredEoa, 1, messageHash); + await wethBridgeContract.emitMessageClaimed(messageHash); + const l1Events = await adapter.getWethDepositInitiatedEvents(wethBridgeContract, monitoredEoa, searchConfig); + const l2Events = await adapter.getWethDepositFinalizedEvents(wethBridgeContract, [messageHash], searchConfig); + + let outstandingTransfers = {}; + + // 1. If l1 and l2 events pair off, outstanding transfers will be empty + adapter.matchWethDepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1WETHToken); + expect(outstandingTransfers).to.deep.equal({}); + + // 2. If finalized event is missing, there will be an outstanding transfer. + outstandingTransfers = {}; + adapter.matchWethDepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1WETHToken); + expect( + outstandingTransfers[monitoredEoa][l1WETHToken][TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.LINEA]] + ).to.deep.equal({ + totalAmount: toBN(1), + depositTxHashes: l1Events.map((e) => e.transactionHash), + }); + }); + }); + describe("USDC", function () { + it("Get L1 initiated events", async function () { + await usdcBridgeContract.emitDeposited(randomAddress(), monitoredEoa); + await usdcBridgeContract.emitDeposited(monitoredEoa, randomAddress()); + const result = await adapter.getUsdcDepositInitiatedEvents(usdcBridgeContract, monitoredEoa, searchConfig); + expect(result.length).to.equal(1); + expect(result[0].args.to).to.equal(monitoredEoa); + }); + it("Get L2 finalized events", async function () { + await usdcBridgeContract.emitReceivedFromOtherLayer(randomAddress()); + await usdcBridgeContract.emitReceivedFromOtherLayer(monitoredEoa); + const result = await adapter.getUsdcDepositFinalizedEvents(usdcBridgeContract, monitoredEoa, searchConfig); + expect(result.length).to.equal(1); + expect(result[0].args.recipient).to.equal(monitoredEoa); + }); + it("Matches L1 and L2 events", async function () { + await usdcBridgeContract.emitDeposited(randomAddress(), monitoredEoa); + await usdcBridgeContract.emitReceivedFromOtherLayer(monitoredEoa); + const l1Events = await adapter.getUsdcDepositInitiatedEvents(usdcBridgeContract, monitoredEoa, searchConfig); + const l2Events = await adapter.getUsdcDepositFinalizedEvents(usdcBridgeContract, monitoredEoa, searchConfig); + + let outstandingTransfers = {}; + + // 1. If l1 and l2 events pair off, outstanding transfers will be empty + adapter.matchUsdcDepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1USDCToken); + expect(outstandingTransfers).to.deep.equal({}); + + // 2. If finalized event is missing, there will be an outstanding transfer. + outstandingTransfers = {}; + adapter.matchUsdcDepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1USDCToken); + expect( + outstandingTransfers[monitoredEoa][l1USDCToken][TOKEN_SYMBOLS_MAP["USDC.e"].addresses[CHAIN_IDs.LINEA]] + ).to.deep.equal({ + totalAmount: toBN(0), + depositTxHashes: l1Events.map((e) => e.transactionHash), + }); + }); + }); + describe("ERC20", function () { + it("Get L1 initiated events", async function () { + await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, l1Token); + await erc20BridgeContract.emitBridgingInitiated(monitoredEoa, randomAddress(), l1Token); + await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, randomAddress()); + const result = await adapter.getErc20DepositInitiatedEvents( + erc20BridgeContract, + monitoredEoa, + l1Token, + searchConfig + ); + expect(result.length).to.equal(1); + expect(result[0].args.recipient).to.equal(monitoredEoa); + expect(result[0].args.token).to.equal(l1Token); + }); + it("Get L2 finalized events", async function () { + // Should return only event + await erc20BridgeContract.emitBridgingFinalized(l1Token, monitoredEoa); + await erc20BridgeContract.emitBridgingFinalized(randomAddress(), monitoredEoa); + await erc20BridgeContract.emitBridgingFinalized(l1Token, randomAddress()); + const result = await adapter.getErc20DepositFinalizedEvents( + erc20BridgeContract, + monitoredEoa, + l1Token, + searchConfig + ); + expect(result.length).to.equal(1); + expect(result[0].args.recipient).to.equal(monitoredEoa); + expect(result[0].args.nativeToken).to.equal(l1Token); + }); + it("Matches L1 and L2 events", async function () { + await erc20BridgeContract.emitBridgingInitiated(randomAddress(), monitoredEoa, l1Token); + await erc20BridgeContract.emitBridgingFinalized(l1Token, monitoredEoa); + const l1Events = await adapter.getErc20DepositInitiatedEvents( + erc20BridgeContract, + monitoredEoa, + l1Token, + searchConfig + ); + const l2Events = await adapter.getErc20DepositFinalizedEvents( + erc20BridgeContract, + monitoredEoa, + l1Token, + searchConfig + ); + + let outstandingTransfers = {}; + + // 1. If l1 and l2 events pair off, outstanding transfers will be empty + adapter.matchErc20DepositEvents(l1Events, l2Events, outstandingTransfers, monitoredEoa, l1Token); + expect(outstandingTransfers).to.deep.equal({}); + + // 2. If finalized event is missing, there will be an outstanding transfer. + outstandingTransfers = {}; + adapter.matchErc20DepositEvents(l1Events, [], outstandingTransfers, monitoredEoa, l1Token); + expect( + outstandingTransfers[monitoredEoa][l1Token][TOKEN_SYMBOLS_MAP.WBTC.addresses[CHAIN_IDs.LINEA]] + ).to.deep.equal({ + totalAmount: toBN(0), + depositTxHashes: l1Events.map((e) => e.transactionHash), + }); + }); + }); + + it("getL1MessageService", async function () { + const l1MessageService = adapter.getL1MessageService(); + expect(l1MessageService).to.not.be.undefined; + expect(l1MessageService.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaMessageService"].address; + }); + it("getL2MessageService", async function () { + const l2MessageService = adapter.getL2MessageService(); + expect(l2MessageService).to.not.be.undefined; + expect(l2MessageService.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["l2MessageService"].address; + }); + it("getL1Bridge: USDC", async function () { + const bridge = adapter.getL1Bridge(TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); + expect(bridge).to.not.be.undefined; + expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaL1UsdcBridge"].address; + }); + it("getL2Bridge: USDC", async function () { + const bridge = adapter.getL2Bridge(TOKEN_SYMBOLS_MAP.USDC.addresses[this.hubChainId]); + expect(bridge).to.not.be.undefined; + expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["lineaL2UsdcBridge"].address; + }); + it("getL1Bridge: ERC20", async function () { + const bridge = adapter.getL1Bridge(TOKEN_SYMBOLS_MAP.WBTC.addresses[this.hubChainId]); + expect(bridge).to.not.be.undefined; + expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET]["lineaL1TokenBridge"].address; + }); + it("getL2Bridge: ERC20", async function () { + const bridge = adapter.getL2Bridge(TOKEN_SYMBOLS_MAP.WBTC.addresses[this.hubChainId]); + expect(bridge).to.not.be.undefined; + expect(bridge.address) === CONTRACT_ADDRESSES[CHAIN_IDs.LINEA]["lineaL2TokenBridge"].address; + }); +}); diff --git a/test/cross-chain-adapters/OpStack.ts b/test/cross-chain-adapters/OpStack.ts new file mode 100644 index 000000000..e0de7019f --- /dev/null +++ b/test/cross-chain-adapters/OpStack.ts @@ -0,0 +1,90 @@ +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { ethers, getContractFactory, Contract, randomAddress, expect } from "../utils"; +import { utils } from "@across-protocol/sdk"; +import { CONTRACT_ADDRESSES } from "../../src/common"; +import { WethBridge } from "../../src/clients/bridges/op-stack/WethBridge"; +import { Event, Signer } from "ethers"; + +describe("Cross Chain Adapter: OP Stack", async function () { + let monitoredEoa: string; + let atomicDepositorAddress: string; + let monitoredEoaAccount: Signer; + + let wethBridge: WethBridge; + let wethBridgeContract: Contract; + let wethContract: Contract; + let searchConfig: utils.EventSearchConfig; + + beforeEach(async function () { + searchConfig = { + fromBlock: 0, + toBlock: 1_000_000, + }; + [monitoredEoaAccount] = await ethers.getSigners(); + + monitoredEoa = monitoredEoaAccount.address; + atomicDepositorAddress = CONTRACT_ADDRESSES[CHAIN_IDs.MAINNET].atomicDepositor.address; + + wethBridge = new WethBridge(CHAIN_IDs.OPTIMISM, CHAIN_IDs.MAINNET, monitoredEoaAccount, monitoredEoaAccount); + + wethBridgeContract = await (await getContractFactory("OpStackWethBridge", monitoredEoaAccount)).deploy(); + wethContract = await (await getContractFactory("WETH9", monitoredEoaAccount)).deploy(); + }); + + describe("WETH", function () { + it("Get L1 initiated events for EOA", async function () { + // For EOA's only returns transfers originating from atomic depositor address and recipient + // is the filtered address. + await wethBridgeContract.emitDepositInitiated(monitoredEoa, randomAddress(), 1); + await wethBridgeContract.emitDepositInitiated(randomAddress(), monitoredEoa, 1); + await wethBridgeContract.emitDepositInitiated(atomicDepositorAddress, randomAddress(), 1); + await wethBridgeContract.emitDepositInitiated(atomicDepositorAddress, monitoredEoa, 1); + const result = ( + await wethBridge.queryL1BridgeInitiationEvents( + wethContract.address, + monitoredEoa, + searchConfig, + wethBridgeContract + ) + )[TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.OPTIMISM]]; + expect(result.length).to.equal(1); + expect(result[0].args._from).to.equal(atomicDepositorAddress); + expect(result[0].args._to).to.equal(monitoredEoa); + expect(result[0].args._amount).to.equal(1); + }); + // TODO: Add unit tests when from address is contract but need to change the providers such that we can + // pretend we are monitoring the hub pool contract. + it("Get L2 finalized events for EOA", async function () { + // Counts only finalized events preceding a WETH wrap event. + // For EOA's, weth transfer from address should be atomic depositor address + await wethBridgeContract.emitDepositFinalized(atomicDepositorAddress, monitoredEoa, 1); + const convertResults = (result: Record) => + result[TOKEN_SYMBOLS_MAP.WETH.addresses[CHAIN_IDs.OPTIMISM]]; + const emptyResult = convertResults( + await wethBridge.queryL2BridgeFinalizationEvents( + wethContract.address, + monitoredEoa, + searchConfig, + wethBridgeContract, + wethContract + ) + ); + expect(emptyResult.length).to.equal(0); + + // Mine Deposit event now. + await wethContract.connect(monitoredEoaAccount).deposit({ value: 0 }); + const result = convertResults( + await wethBridge.queryL2BridgeFinalizationEvents( + wethContract.address, + monitoredEoa, + searchConfig, + wethBridgeContract, + wethContract + ) + ); + expect(result.length).to.equal(1); + expect(result[0].args._from).to.equal(atomicDepositorAddress); + expect(result[0].args._to).to.equal(monitoredEoa); + }); + }); +}); diff --git a/test/fixtures/Dataworker.Fixture.ts b/test/fixtures/Dataworker.Fixture.ts index edeae906c..f65bccfd7 100644 --- a/test/fixtures/Dataworker.Fixture.ts +++ b/test/fixtures/Dataworker.Fixture.ts @@ -30,7 +30,7 @@ import { BundleDataClient, TokenClient } from "../../src/clients"; import { DataworkerClients } from "../../src/dataworker/DataworkerClientHelper"; import { MockConfigStoreClient, MockedMultiCallerClient } from "../mocks"; import { EthersTestLibrary } from "../types"; -import { clients as sdkClients } from "@across-protocol/sdk-v2"; +import { clients as sdkClients } from "@across-protocol/sdk"; async function _constructSpokePoolClientsWithLookback( spokePools: Contract[], diff --git a/test/fixtures/UmaEcosystemFixture.ts b/test/fixtures/UmaEcosystemFixture.ts index 35dc6d10e..aae1bcb0d 100644 --- a/test/fixtures/UmaEcosystemFixture.ts +++ b/test/fixtures/UmaEcosystemFixture.ts @@ -1,7 +1,7 @@ -import * as utils from "@across-protocol/contracts-v2/dist/test-utils"; +import * as utils from "@across-protocol/contracts/dist/test-utils"; import { Contract } from "ethers"; import { SignerWithAddress } from "@nomiclabs/hardhat-ethers/signers"; -import { utf8ToHex, identifier, refundProposalLiveness } from "@across-protocol/contracts-v2/dist/test-utils"; +import { utf8ToHex, identifier, refundProposalLiveness } from "@across-protocol/contracts/dist/test-utils"; export async function setupUmaEcosystem(owner: SignerWithAddress): Promise<{ timer: Contract; diff --git a/test/mocks/MockAdapterManager.ts b/test/mocks/MockAdapterManager.ts index ef24f28e2..c8edf0878 100644 --- a/test/mocks/MockAdapterManager.ts +++ b/test/mocks/MockAdapterManager.ts @@ -1,5 +1,5 @@ import { AdapterManager } from "../../src/clients/bridges"; -import { BigNumber, TransactionResponse } from "../../src/utils"; +import { BigNumber, TransactionResponse, getTokenAddressWithCCTP } from "../../src/utils"; import { createRandomBytes32 } from "../utils"; import { OutstandingTransfers } from "../../src/interfaces"; @@ -40,14 +40,25 @@ export class MockAdapterManager extends AdapterManager { return this.mockedOutstandingCrossChainTransfers[chainId]; } - setMockedOutstandingCrossChainTransfers(chainId: number, address: string, l1Token: string, amount: BigNumber): void { - if (!this.mockedOutstandingCrossChainTransfers[chainId]) { - this.mockedOutstandingCrossChainTransfers[chainId] = {}; - } + setMockedOutstandingCrossChainTransfers( + chainId: number, + address: string, + l1Token: string, + amount: BigNumber, + l2Token?: string + ): void { + this.mockedOutstandingCrossChainTransfers[chainId] ??= {}; + const transfers = this.mockedOutstandingCrossChainTransfers[chainId]; - if (!transfers[address]) { - transfers[address] = {}; - } - transfers[address][l1Token] = { totalAmount: amount, depositTxHashes: [] }; + + transfers[address] ??= {}; + transfers[address][l1Token] ??= {}; + + l2Token ??= getTokenAddressWithCCTP(l1Token, 1, chainId, false); + + transfers[address][l1Token][l2Token] = { + totalAmount: amount, + depositTxHashes: [], + }; } } diff --git a/test/mocks/MockConfigStoreClient.ts b/test/mocks/MockConfigStoreClient.ts index 52ca9743c..b41233dd9 100644 --- a/test/mocks/MockConfigStoreClient.ts +++ b/test/mocks/MockConfigStoreClient.ts @@ -1,4 +1,4 @@ -import { clients } from "@across-protocol/sdk-v2"; +import { clients } from "@across-protocol/sdk"; import { EventSearchConfig, MakeOptional, winston } from "../../src/utils"; import { Contract } from "../utils"; import { CHAIN_ID_TEST_LIST } from "../constants"; diff --git a/test/mocks/MockHubPoolClient.ts b/test/mocks/MockHubPoolClient.ts index 06f04d486..d7f64b27f 100644 --- a/test/mocks/MockHubPoolClient.ts +++ b/test/mocks/MockHubPoolClient.ts @@ -1,12 +1,14 @@ -import { clients } from "@across-protocol/sdk-v2"; +import { clients } from "@across-protocol/sdk"; import { Contract, winston, BigNumber } from "../utils"; -import { ConfigStoreClient } from "../../src/clients"; +import { ConfigStoreClient, HubPoolClient } from "../../src/clients"; import { MockConfigStoreClient } from "./MockConfigStoreClient"; +import { L1Token } from "../../src/interfaces"; // Adds functions to MockHubPoolClient to facilitate Dataworker unit testing. export class MockHubPoolClient extends clients.mocks.MockHubPoolClient { public latestBundleEndBlocks: { [chainId: number]: number } = {}; public enableAllL2Tokens: boolean | undefined; + private tokenInfoMap: { [tokenAddress: string]: L1Token } = {}; constructor( logger: winston.Logger, @@ -32,6 +34,27 @@ export class MockHubPoolClient extends clients.mocks.MockHubPoolClient { this.lpTokens[l1Token] = { lastLpFeeUpdate, liquidReserves }; } + mapTokenInfo(token: string, symbol: string, decimals?: number): void { + this.tokenInfoMap[token] = { + symbol, + address: token, + decimals: decimals ?? 18, + }; + } + + getTokenInfoForAddress(token: string): L1Token { + // If output token is mapped manually to a symbol in the symbol map, + // use that info. + if (this.tokenInfoMap[token]) { + return this.tokenInfoMap[token]; + } + return { + symbol: token, + address: token, + decimals: 18, + }; + } + setEnableAllL2Tokens(enableAllL2Tokens: boolean): void { this.enableAllL2Tokens = enableAllL2Tokens; } @@ -43,3 +66,24 @@ export class MockHubPoolClient extends clients.mocks.MockHubPoolClient { return this.enableAllL2Tokens; } } + +export class SimpleMockHubPoolClient extends HubPoolClient { + private tokenInfoMap: { [tokenAddress: string]: L1Token } = {}; + + mapTokenInfo(token: string, symbol: string, decimals = 18): void { + this.tokenInfoMap[token] = { + symbol, + address: token, + decimals, + }; + } + + getTokenInfoForAddress(token: string, chainId: number): L1Token { + // If output token is mapped manually to a symbol in the symbol map, + // use that info. + if (this.tokenInfoMap[token]) { + return this.tokenInfoMap[token]; + } + return super.getTokenInfoForAddress(token, chainId); + } +} diff --git a/test/mocks/MockInventoryClient.ts b/test/mocks/MockInventoryClient.ts index 0916da06c..a2333df2a 100644 --- a/test/mocks/MockInventoryClient.ts +++ b/test/mocks/MockInventoryClient.ts @@ -33,8 +33,8 @@ export class MockInventoryClient extends InventoryClient { } // eslint-disable-next-line @typescript-eslint/no-unused-vars - async determineRefundChainId(_deposit: Deposit): Promise { - return this.inventoryConfig === null ? 1 : super.determineRefundChainId(_deposit); + override async determineRefundChainId(_deposit: Deposit): Promise { + return this.inventoryConfig === null ? [1] : super.determineRefundChainId(_deposit); } setExcessRunningBalances(l1Token: string, balances: { [chainId: number]: BigNumber }): void { @@ -53,7 +53,7 @@ export class MockInventoryClient extends InventoryClient { this.possibleRebalances = []; } - getPossibleRebalances(): Rebalance[] { + override getPossibleRebalances(): Rebalance[] { return this.possibleRebalances; } @@ -61,7 +61,7 @@ export class MockInventoryClient extends InventoryClient { this.balanceOnChain = newBalance; } - getBalanceOnChainForL1Token(): BigNumber { + override getBalanceOnChain(): BigNumber { return this.balanceOnChain; } } diff --git a/test/mocks/MockProfitClient.ts b/test/mocks/MockProfitClient.ts index e49e259e1..e20a31a21 100644 --- a/test/mocks/MockProfitClient.ts +++ b/test/mocks/MockProfitClient.ts @@ -1,4 +1,4 @@ -import { utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { utils as sdkUtils } from "@across-protocol/sdk"; import { ProfitClient } from "../../src/clients"; import { SpokePoolClientsByChain } from "../../src/interfaces"; import { bnOne, isDefined, TOKEN_SYMBOLS_MAP } from "../../src/utils"; diff --git a/test/mocks/MockSpokePoolClient.ts b/test/mocks/MockSpokePoolClient.ts index 4e61c5eba..260625eda 100644 --- a/test/mocks/MockSpokePoolClient.ts +++ b/test/mocks/MockSpokePoolClient.ts @@ -1,4 +1,4 @@ -import { clients } from "@across-protocol/sdk-v2"; +import { clients } from "@across-protocol/sdk"; export class MockSpokePoolClient extends clients.mocks.MockSpokePoolClient { public maxFillDeadlineOverride?: number; public oldestBlockTimestampOverride?: number; diff --git a/test/types/index.ts b/test/types/index.ts index a8b6e47a1..17797f4fb 100644 --- a/test/types/index.ts +++ b/test/types/index.ts @@ -1,7 +1,7 @@ import { HardhatEthersHelpers } from "@nomiclabs/hardhat-ethers/types"; import type { ethers } from "ethers"; import winston from "winston"; -import * as utils from "@across-protocol/contracts-v2/dist/test-utils"; +import * as utils from "@across-protocol/contracts/dist/test-utils"; import { sinon } from "../utils"; export type EthersTestLibrary = typeof ethers & HardhatEthersHelpers; diff --git a/test/utils/SpokePoolUtils.ts b/test/utils/SpokePoolUtils.ts index 7447c52fe..445c55dda 100644 --- a/test/utils/SpokePoolUtils.ts +++ b/test/utils/SpokePoolUtils.ts @@ -1,5 +1,5 @@ import { Contract, bnZero } from "../../src/utils"; -import { interfaces } from "@across-protocol/sdk-v2"; +import { interfaces } from "@across-protocol/sdk"; import { repaymentChainId } from "../constants"; import { SlowFillRequestWithBlock } from "../../src/interfaces"; import { SignerWithAddress } from "./utils"; diff --git a/test/utils/UBAUtils.ts b/test/utils/UBAUtils.ts index a1f6b8494..6b954890d 100644 --- a/test/utils/UBAUtils.ts +++ b/test/utils/UBAUtils.ts @@ -1,4 +1,4 @@ -import { clients, constants } from "@across-protocol/sdk-v2"; +import { clients, constants } from "@across-protocol/sdk"; /** * This is a helper function to generate an array of empty objects that are typed as SpokePoolClients. diff --git a/test/utils/index.ts b/test/utils/index.ts index ae87b741d..19bbc0bae 100644 --- a/test/utils/index.ts +++ b/test/utils/index.ts @@ -1,5 +1,5 @@ -export * as contracts from "@across-protocol/contracts-v2/dist/test-utils"; -export * as uma from "@uma/financial-templates-lib"; +export * as contracts from "@across-protocol/contracts/dist/test-utils"; +export * as uma from "@uma/logger"; export * from "./utils"; export * from "./BlockchainUtils"; diff --git a/test/utils/utils.ts b/test/utils/utils.ts index f10b86df3..171457aeb 100644 --- a/test/utils/utils.ts +++ b/test/utils/utils.ts @@ -1,8 +1,8 @@ -import * as utils from "@across-protocol/contracts-v2/dist/test-utils"; +import * as utils from "@across-protocol/contracts/dist/test-utils"; import { TokenRolesEnum } from "@uma/common"; -import { SpyTransport, bigNumberFormatter } from "@uma/financial-templates-lib"; -import { AcrossConfigStore, FakeContract } from "@across-protocol/contracts-v2"; -import { constants, utils as sdkUtils } from "@across-protocol/sdk-v2"; +import { SpyTransport, bigNumberFormatter } from "@uma/logger"; +import { AcrossConfigStore, FakeContract } from "@across-protocol/contracts"; +import { constants, utils as sdkUtils } from "@across-protocol/sdk"; import { BigNumber, Contract, providers } from "ethers"; import chai, { assert, expect } from "chai"; import chaiExclude from "chai-exclude"; @@ -10,7 +10,7 @@ import sinon from "sinon"; import winston from "winston"; import { GLOBAL_CONFIG_STORE_KEYS } from "../../src/clients"; import { V3Deposit, V3DepositWithBlock, V3FillWithBlock, V3SlowFillLeaf } from "../../src/interfaces"; -import { isDefined, toBN, toBNWei, toWei, utf8ToHex, ZERO_ADDRESS } from "../../src/utils"; +import { isDefined, spreadEvent, toBN, toBNWei, toWei, utf8ToHex, ZERO_ADDRESS } from "../../src/utils"; import { DEFAULT_BLOCK_RANGE_FOR_CHAIN, MAX_L1_TOKENS_PER_POOL_REBALANCE_LEAF, @@ -18,7 +18,6 @@ import { sampleRateModel, } from "../constants"; import { SpokePoolDeploymentResult, SpyLoggerResult } from "../types"; -import { CombinedRefunds } from "../../src/dataworker/DataworkerUtils"; export { SpyTransport, @@ -27,7 +26,7 @@ export { lastSpyLogLevel, spyLogIncludes, spyLogLevel, -} from "@uma/financial-templates-lib"; +} from "@uma/logger"; export { MAX_SAFE_ALLOWANCE, MAX_UINT_VAL } from "../../src/utils"; export const { ethers, @@ -290,55 +289,41 @@ export async function depositV3( const exclusivityDeadline = opts.exclusivityDeadline ?? 0; const exclusiveRelayer = opts.exclusiveRelayer ?? ZERO_ADDRESS; - await spokePool - .connect(signer) - .depositV3( - depositor, - recipient, - inputToken, - outputToken, - inputAmount, - outputAmount, - destinationChainId, - exclusiveRelayer, - quoteTimestamp, - fillDeadline, - exclusivityDeadline, - message - ); - - const [events, originChainId] = await Promise.all([ - spokePool.queryFilter(spokePool.filters.V3FundsDeposited()), + const [originChainId, txnResponse] = await Promise.all([ spokePool.chainId(), + spokePool + .connect(signer) + .depositV3( + depositor, + recipient, + inputToken, + outputToken, + inputAmount, + outputAmount, + destinationChainId, + exclusiveRelayer, + quoteTimestamp, + fillDeadline, + exclusivityDeadline, + message + ), ]); + const txnReceipt = await txnResponse.wait(); - const lastEvent = events.at(-1); - let args = lastEvent?.args; - assert.exists(args); - args = args!; // tsc coersion - - const { blockNumber, transactionHash, transactionIndex, logIndex } = lastEvent!; + const _topic = "V3FundsDeposited"; + const topic = spokePool.interface.getEventTopic(_topic); + const eventLog = txnReceipt.logs.find(({ topics: [eventTopic] }) => eventTopic === topic); + const { args } = spokePool.interface.parseLog(eventLog); + const { blockNumber, transactionHash, transactionIndex } = txnReceipt; + const { logIndex } = eventLog; return { - depositId: args.depositId, originChainId: Number(originChainId), - destinationChainId: Number(args.destinationChainId), - depositor: args.depositor, - recipient: args.recipient, - inputToken: args.inputToken, - inputAmount: args.inputAmount, - outputToken: args.outputToken, - outputAmount: args.outputAmount, - quoteTimestamp: args.quoteTimestamp, - message: args.message, - fillDeadline: args.fillDeadline, - exclusivityDeadline: args.exclusivityDeadline, - exclusiveRelayer: args.exclusiveRelayer, - quoteBlockNumber: 0, // @todo blockNumber, transactionHash, transactionIndex, logIndex, + ...spreadEvent(args), }; } @@ -476,10 +461,14 @@ export function getDefaultBlockRange(toBlockOffset: number): number[][] { return DEFAULT_BLOCK_RANGE_FOR_CHAIN.map((range) => [range[0], range[1] + toBlockOffset]); } -export function createRefunds(address: string, refundAmount: BigNumber, token: string): CombinedRefunds { +export function createRefunds( + outputToken: string, + refundAmount: BigNumber, + repaymentToken: string +): { [repaymentToken: string]: { [outputToken: string]: BigNumber } } { return { - [token]: { - [address]: refundAmount, + [repaymentToken]: { + [outputToken]: refundAmount, }, }; } diff --git a/yarn.lock b/yarn.lock index dae62fef0..693465f9a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11,47 +11,26 @@ "@uma/common" "^2.17.0" hardhat "^2.9.3" -"@across-protocol/constants-v2@1.0.19": - version "1.0.19" - resolved "https://registry.yarnpkg.com/@across-protocol/constants-v2/-/constants-v2-1.0.19.tgz#398d64f590bb0ebea6c4241acac2af24af4ab4e1" - integrity sha512-v12qvWxuN37HLSCg+LahzGIShakR4/+l7K6GtVEIRg/dj/HTUtGzgpB4Uyx6nYkA6ffAKcxJaUK600JovNhkEA== - -"@across-protocol/constants-v2@^1.0.14": - version "1.0.14" - resolved "https://registry.yarnpkg.com/@across-protocol/constants-v2/-/constants-v2-1.0.14.tgz#2eb6624c306db3f184293d8abb023d2354abadce" - integrity sha512-7C8hyH/7aDh8AOd5DLQrtcey6Ip4Q6o8FmSiDrBnxcD/aVeH+L8cZhh/hRMLm8CA5Olx4usEarfEfZ2rN0AZfg== - -"@across-protocol/constants-v2@^1.0.19": - version "1.0.20" - resolved "https://registry.yarnpkg.com/@across-protocol/constants-v2/-/constants-v2-1.0.20.tgz#d28fb3d3be8514db51c214d92bee954d15e5f06a" - integrity sha512-DYb48kaAzv7t4/FVi4a5KYGNxJSG2rOY2mSznuWDI4n4mezM1wTTtQzK3un15tcwkTlIeO13CaDeABgR6jIVvg== - -"@across-protocol/contracts-v2@2.5.4": - version "2.5.4" - resolved "https://registry.yarnpkg.com/@across-protocol/contracts-v2/-/contracts-v2-2.5.4.tgz#7e1b6ff26d159abdad3a0ac51991f09df0f33f74" - integrity sha512-LdHN2XQIrzj3CyzVlYY69ppeLxohwjwzsAaRtZ7nbB/HsVwUuJJoCjoUAV+ePGjqKhxN2lojFFlni7uMnSUeRw== - dependencies: - "@across-protocol/constants-v2" "^1.0.14" - "@defi-wonderland/smock" "^2.3.4" - "@eth-optimism/contracts" "^0.5.40" - "@ethersproject/abstract-provider" "5.7.0" - "@ethersproject/abstract-signer" "5.7.0" - "@ethersproject/bignumber" "5.7.0" - "@openzeppelin/contracts" "4.9.6" - "@openzeppelin/contracts-upgradeable" "4.9.6" - "@scroll-tech/contracts" "^0.1.0" - "@uma/common" "^2.34.0" - "@uma/contracts-node" "^0.4.17" - "@uma/core" "^2.56.0" - axios "^1.6.2" - zksync-web3 "^0.14.3" +"@across-protocol/constants@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@across-protocol/constants/-/constants-3.0.0.tgz#0f5cbcaf40d43babbecb322d91a689b49c5536a0" + integrity sha512-iJz1EDdYr+GRgHRZwcAw2G06ZvV9VOW6GQ846X7G/dd8wVQej18v1Fpx6th4bcSba1j7jR5Hfi+/zqSDoxyLmw== + +"@across-protocol/contracts@^0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-0.1.4.tgz#64b3d91e639d2bb120ea94ddef3d160967047fa5" + integrity sha512-y9FVRSFdPgEdGWBcf8rUmmzdYhzGdy0752HwpaAFtMJ1pn+HFgNaI0EZc/UudMKIPOkk+/BxPIHYPy7tKad5/A== + dependencies: + "@eth-optimism/contracts" "^0.5.5" + "@openzeppelin/contracts" "4.1.0" + "@uma/core" "^2.18.0" -"@across-protocol/contracts-v2@2.5.6": - version "2.5.6" - resolved "https://registry.yarnpkg.com/@across-protocol/contracts-v2/-/contracts-v2-2.5.6.tgz#3734e03ca42b81e8a878e6d88a5c5ddcb5a8e9ca" - integrity sha512-A3bmYubISoH/KpaOKYNJ4/SF2f/VLdfo94+/9P1zNDL9bq/S/1KeKq8SU7jjPIJ9JEAqIUk5HhNgrQjYx/1ZkQ== +"@across-protocol/contracts@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-3.0.0.tgz#2897823d51d1fb8bd4292e9d05500c4d824c3c08" + integrity sha512-4rZTWR8gJAKecdfbhX31HHL5QgRuKJFL7epNEiPfHODFDivAfCltUkY/ozxf0EUnl2u3DrfrEhc6HeOS2lXNHw== dependencies: - "@across-protocol/constants-v2" "^1.0.19" + "@across-protocol/constants" "^3.0.0" "@defi-wonderland/smock" "^2.3.4" "@eth-optimism/contracts" "^0.5.40" "@ethersproject/abstract-provider" "5.7.0" @@ -66,24 +45,15 @@ axios "^1.6.2" zksync-web3 "^0.14.3" -"@across-protocol/contracts@^0.1.4": - version "0.1.4" - resolved "https://registry.yarnpkg.com/@across-protocol/contracts/-/contracts-0.1.4.tgz#64b3d91e639d2bb120ea94ddef3d160967047fa5" - integrity sha512-y9FVRSFdPgEdGWBcf8rUmmzdYhzGdy0752HwpaAFtMJ1pn+HFgNaI0EZc/UudMKIPOkk+/BxPIHYPy7tKad5/A== - dependencies: - "@eth-optimism/contracts" "^0.5.5" - "@openzeppelin/contracts" "4.1.0" - "@uma/core" "^2.18.0" - -"@across-protocol/sdk-v2@0.23.8": - version "0.23.8" - resolved "https://registry.yarnpkg.com/@across-protocol/sdk-v2/-/sdk-v2-0.23.8.tgz#55fcadda3fc1d2ccbe5d76b75a88cd1acf44921e" - integrity sha512-IN+nVmneclHDuyMnnBGA33JHjbq+oPfGNopiAl8ngs9rfCMHH0I3rDU2r4vXA0G08wwwjSiU+usfGVCRJKJY8A== +"@across-protocol/sdk@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@across-protocol/sdk/-/sdk-3.0.0.tgz#f261f9bd24ed30a23f2969f03dfbc551d417867d" + integrity sha512-ZLZlobj96pHI7dXSdhfAj5BDIDP7xryu0BIJnNbkmYeinc8bNG5BkrxKJdjUb8a6VDnexXdGH5dkO8ljr1i+/Q== dependencies: "@across-protocol/across-token" "^1.0.0" - "@across-protocol/constants-v2" "^1.0.19" - "@across-protocol/contracts-v2" "2.5.6" - "@eth-optimism/sdk" "^3.2.2" + "@across-protocol/constants" "^3.0.0" + "@across-protocol/contracts" "^3.0.0" + "@eth-optimism/sdk" "^3.3.1" "@pinata/sdk" "^2.1.0" "@types/mocha" "^10.0.1" "@uma/sdk" "^0.34.1" @@ -272,7 +242,7 @@ babel-plugin-polyfill-regenerator "^0.3.0" semver "^6.3.0" -"@babel/runtime@7.20.6", "@babel/runtime@^7.15.4", "@babel/runtime@^7.4.4", "@babel/runtime@^7.5.5": +"@babel/runtime@7.20.6", "@babel/runtime@^7.4.4", "@babel/runtime@^7.5.5": version "7.20.6" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.20.6.tgz#facf4879bfed9b5326326273a64220f099b0fce3" integrity sha512-Q+8MqP7TiHMWzSfwiJwXCjyf4GYA4Dgw3emg/7xmwsdLJOZUp+nMqcOwOzzYheuM1rhDu8FSj2l0aoMygEuXuA== @@ -532,11 +502,6 @@ minimatch "^3.0.4" strip-json-comments "^3.1.1" -"@eth-optimism/contracts-bedrock@0.17.1": - version "0.17.1" - resolved "https://registry.yarnpkg.com/@eth-optimism/contracts-bedrock/-/contracts-bedrock-0.17.1.tgz#729b1dc53ec23d02ea9e68181f994955129f7415" - integrity sha512-Hc5peN5PM8kzl9dzqSD5jv6ED3QliO1DF0dXLRJxfrXR7/rmEeyuAYESUwUM0gdJZjkwRYiS5m230BI6bQmnlw== - "@eth-optimism/contracts@0.6.0": version "0.6.0" resolved "https://registry.yarnpkg.com/@eth-optimism/contracts/-/contracts-0.6.0.tgz#15ae76222a9b4d958a550cafb1960923af613a31" @@ -577,10 +542,10 @@ bufio "^1.0.7" chai "^4.3.4" -"@eth-optimism/core-utils@0.13.1": - version "0.13.1" - resolved "https://registry.yarnpkg.com/@eth-optimism/core-utils/-/core-utils-0.13.1.tgz#f15ec207a629c9bbf1a10425c1b4a4c0be544755" - integrity sha512-1FvzbUmCEy9zSKPG1QWg2VfA2Cy90xBA9Wkp11lXXrz91zUPCNCNSRTujXWYIC86ketNsZp7p4njSf6lTycHCw== +"@eth-optimism/core-utils@0.13.2": + version "0.13.2" + resolved "https://registry.yarnpkg.com/@eth-optimism/core-utils/-/core-utils-0.13.2.tgz#c0187c3abf6d86dad039edf04ff81299253214fe" + integrity sha512-u7TOKm1RxH1V5zw7dHmfy91bOuEAZU68LT/9vJPkuWEjaTl+BgvPDRDTurjzclHzN0GbWdcpOqPZg4ftjkJGaw== dependencies: "@ethersproject/abi" "^5.7.0" "@ethersproject/abstract-provider" "^5.7.0" @@ -593,7 +558,7 @@ "@ethersproject/properties" "^5.7.0" "@ethersproject/rlp" "^5.7.0" "@ethersproject/web" "^5.7.1" - chai "^4.3.9" + chai "^4.3.10" ethers "^5.7.2" node-fetch "^2.6.7" @@ -610,14 +575,13 @@ ethers "^5.5.4" lodash "^4.17.21" -"@eth-optimism/sdk@^3.2.2": - version "3.2.2" - resolved "https://registry.yarnpkg.com/@eth-optimism/sdk/-/sdk-3.2.2.tgz#732c2d6fde96a25303b3c5b39b3b3ed1f913d9aa" - integrity sha512-P8YXAlh2lun0KZlwrw4FqmK4kNIoOOzI816XXhfkW3nMVADGRAru3TKSM74MgmEuyGiHrA9EoPRq1WLqUX4B0w== +"@eth-optimism/sdk@^3.3.1": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@eth-optimism/sdk/-/sdk-3.3.1.tgz#f72b6f93b58e2a2943f10aca3be91dfc23d9839f" + integrity sha512-zf8qL+KwYWUUwvdcjF1HpBxgKSt5wsKr8oa6jwqUhdPkQHUtVK5SRKtqXqYplnAgKtxDQYwlK512GU16odEl1w== dependencies: "@eth-optimism/contracts" "0.6.0" - "@eth-optimism/contracts-bedrock" "0.17.1" - "@eth-optimism/core-utils" "0.13.1" + "@eth-optimism/core-utils" "0.13.2" lodash "^4.17.21" merkletreejs "^0.3.11" rlp "^2.2.7" @@ -820,7 +784,7 @@ "@ethersproject/logger" "^5.7.0" "@ethersproject/properties" "^5.7.0" -"@ethersproject/address@5.7.0", "@ethersproject/address@^5.0.0-beta.134", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.0.4", "@ethersproject/address@^5.0.8", "@ethersproject/address@^5.4.0", "@ethersproject/address@^5.7.0": +"@ethersproject/address@5.7.0", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.0.4", "@ethersproject/address@^5.0.8", "@ethersproject/address@^5.4.0", "@ethersproject/address@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== @@ -846,7 +810,7 @@ "@ethersproject/bytes" "^5.7.0" "@ethersproject/properties" "^5.7.0" -"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@^5.0.5", "@ethersproject/bignumber@^5.0.7", "@ethersproject/bignumber@^5.1.1", "@ethersproject/bignumber@^5.4.1", "@ethersproject/bignumber@^5.4.2", "@ethersproject/bignumber@^5.7.0": +"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@^5.0.5", "@ethersproject/bignumber@^5.0.7", "@ethersproject/bignumber@^5.1.1", "@ethersproject/bignumber@^5.4.1", "@ethersproject/bignumber@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== @@ -869,7 +833,7 @@ dependencies: "@ethersproject/bignumber" "^5.7.0" -"@ethersproject/contracts@5.7.0", "@ethersproject/contracts@^5.0.0-beta.143", "@ethersproject/contracts@^5.4.1", "@ethersproject/contracts@^5.7.0": +"@ethersproject/contracts@5.7.0", "@ethersproject/contracts@^5.4.1", "@ethersproject/contracts@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e" integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg== @@ -950,7 +914,7 @@ resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== -"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.0.0-beta.135", "@ethersproject/networks@^5.7.0": +"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.7.0": version "5.7.1" resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== @@ -972,7 +936,7 @@ dependencies: "@ethersproject/logger" "^5.7.0" -"@ethersproject/providers@5.7.2", "@ethersproject/providers@^5.0.0-beta.153", "@ethersproject/providers@^5.4.2", "@ethersproject/providers@^5.4.4", "@ethersproject/providers@^5.5.3", "@ethersproject/providers@^5.7.0", "@ethersproject/providers@^5.7.1", "@ethersproject/providers@^5.7.2": +"@ethersproject/providers@5.7.2", "@ethersproject/providers@^5.4.2", "@ethersproject/providers@^5.4.4", "@ethersproject/providers@^5.5.3", "@ethersproject/providers@^5.7.0", "@ethersproject/providers@^5.7.1", "@ethersproject/providers@^5.7.2": version "5.7.2" resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.2.tgz#f8b1a4f275d7ce58cf0a2eec222269a08beb18cb" integrity sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg== @@ -1035,7 +999,7 @@ elliptic "6.5.4" hash.js "1.1.7" -"@ethersproject/solidity@5.7.0", "@ethersproject/solidity@^5.0.0-beta.131", "@ethersproject/solidity@^5.4.0", "@ethersproject/solidity@^5.7.0": +"@ethersproject/solidity@5.7.0", "@ethersproject/solidity@^5.4.0", "@ethersproject/solidity@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8" integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA== @@ -2261,6 +2225,11 @@ resolved "https://registry.yarnpkg.com/@redis/bloom/-/bloom-1.0.2.tgz#42b82ec399a92db05e29fffcdfd9235a5fc15cdf" integrity sha512-EBw7Ag1hPgFzdznK2PBblc1kdlj5B5Cw3XwI9/oG7tSn85/HKy3X9xHy/8tm/eNXJYHLXHJL/pkwBpFMVVefkw== +"@redis/bloom@1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@redis/bloom/-/bloom-1.2.0.tgz#d3fd6d3c0af3ef92f26767b56414a370c7b63b71" + integrity sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg== + "@redis/client@1.1.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@redis/client/-/client-1.1.0.tgz#e52a85aee802796ceb14bf27daf9550f51f238b8" @@ -2270,26 +2239,55 @@ generic-pool "3.8.2" yallist "4.0.0" +"@redis/client@1.5.14": + version "1.5.14" + resolved "https://registry.yarnpkg.com/@redis/client/-/client-1.5.14.tgz#1107893464d092f140d77c468b018a6ed306a180" + integrity sha512-YGn0GqsRBFUQxklhY7v562VMOP0DcmlrHHs3IV1mFE3cbxe31IITUkqhBcIhVSI/2JqtWAJXg5mjV4aU+zD0HA== + dependencies: + cluster-key-slot "1.1.2" + generic-pool "3.9.0" + yallist "4.0.0" + "@redis/graph@1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@redis/graph/-/graph-1.0.1.tgz#eabc58ba99cd70d0c907169c02b55497e4ec8a99" integrity sha512-oDE4myMCJOCVKYMygEMWuriBgqlS5FqdWerikMoJxzmmTUErnTRRgmIDa2VcgytACZMFqpAOWDzops4DOlnkfQ== +"@redis/graph@1.1.1": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@redis/graph/-/graph-1.1.1.tgz#8c10df2df7f7d02741866751764031a957a170ea" + integrity sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw== + "@redis/json@1.0.3": version "1.0.3" resolved "https://registry.yarnpkg.com/@redis/json/-/json-1.0.3.tgz#a13fde1d22ebff0ae2805cd8e1e70522b08ea866" integrity sha512-4X0Qv0BzD9Zlb0edkUoau5c1bInWSICqXAGrpwEltkncUwcxJIGEcVryZhLgb0p/3PkKaLIWkjhHRtLe9yiA7Q== +"@redis/json@1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@redis/json/-/json-1.0.6.tgz#b7a7725bbb907765d84c99d55eac3fcf772e180e" + integrity sha512-rcZO3bfQbm2zPRpqo82XbW8zg4G/w4W3tI7X8Mqleq9goQjAGLL7q/1n1ZX4dXEAmORVZ4s1+uKLaUOg7LrUhw== + "@redis/search@1.0.6": version "1.0.6" resolved "https://registry.yarnpkg.com/@redis/search/-/search-1.0.6.tgz#53d7451c2783f011ebc48ec4c2891264e0b22f10" integrity sha512-pP+ZQRis5P21SD6fjyCeLcQdps+LuTzp2wdUbzxEmNhleighDDTD5ck8+cYof+WLec4csZX7ks+BuoMw0RaZrA== +"@redis/search@1.1.6": + version "1.1.6" + resolved "https://registry.yarnpkg.com/@redis/search/-/search-1.1.6.tgz#33bcdd791d9ed88ab6910243a355d85a7fedf756" + integrity sha512-mZXCxbTYKBQ3M2lZnEddwEAks0Kc7nauire8q20oA0oA/LoA+E/b5Y5KZn232ztPb1FkIGqo12vh3Lf+Vw5iTw== + "@redis/time-series@1.0.3": version "1.0.3" resolved "https://registry.yarnpkg.com/@redis/time-series/-/time-series-1.0.3.tgz#4cfca8e564228c0bddcdf4418cba60c20b224ac4" integrity sha512-OFp0q4SGrTH0Mruf6oFsHGea58u8vS/iI5+NpYdicaM+7BgqBZH8FFvNZ8rYYLrUO/QRqMq72NpXmxLVNcdmjA== +"@redis/time-series@1.0.5": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@redis/time-series/-/time-series-1.0.5.tgz#a6d70ef7a0e71e083ea09b967df0a0ed742bc6ad" + integrity sha512-IFjIgTusQym2B5IZJG3XKr5llka7ey84fw/NOYqESP5WUfQs9zz1ww/9+qoz4ka/S6KcGBodzlCeZ5UImKbscg== + "@resolver-engine/core@^0.3.3": version "0.3.3" resolved "https://registry.yarnpkg.com/@resolver-engine/core/-/core-0.3.3.tgz#590f77d85d45bc7ecc4e06c654f41345db6ca967" @@ -3273,30 +3271,20 @@ "@uniswap/v3-core" "^1.0.0-rc.2" "@uniswap/v3-periphery" "^1.0.0-beta.23" -"@uma/financial-templates-lib@^2.34.1": - version "2.34.1" - resolved "https://registry.yarnpkg.com/@uma/financial-templates-lib/-/financial-templates-lib-2.34.1.tgz#ba0385ab5640651380f461388eafa2e91e1d2009" - integrity sha512-Te4fJAE5S3lhL2iPWZriixiIYuK/OzCzcZ9Mu4MUtcODlCKaDE9SFFbk8lqfewUTtVwMPWO8FI6+3v5LwTf7hA== +"@uma/logger@1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@uma/logger/-/logger-1.2.0.tgz#3a271c9a169d0eb5bc50a0b2975fd6a04a61be48" + integrity sha512-5gAjeaUcYX8jZ4c3qYF24hzoukJwXtbTgZhaYBD/5Wdfd+u7a9XjzEk+za8pjGE7ndGxzh1iq/y/0O43OM/6ag== dependencies: - "@ethersproject/bignumber" "^5.4.2" "@google-cloud/logging-winston" "^4.1.1" "@google-cloud/trace-agent" "^5.1.6" "@pagerduty/pdjs" "^2.2.4" - "@uma/common" "^2.34.0" - "@uma/contracts-node" "^0.4.18" - "@uma/sdk" "^0.34.3" - "@uniswap/sdk" "^2.0.5" - bluebird "^3.7.2" bn.js "^4.11.9" discord.js "^14.11.0" dotenv "^9.0.0" - lodash "^4.17.20" - mathjs "^9.2.0" minimist "^1.2.0" - moment "^2.29.1" - moment-timezone "^0.5.33" - node-fetch "^2.6.0" node-pagerduty "^1.2.0" + redis "^4.6.13" superstruct "^1.0.3" web3 "^1.6.0" winston "^3.2.1" @@ -3316,7 +3304,7 @@ mocha "^8.3.0" node-fetch "^2.6.1" -"@uma/sdk@^0.34.1", "@uma/sdk@^0.34.3": +"@uma/sdk@^0.34.1": version "0.34.3" resolved "https://registry.yarnpkg.com/@uma/sdk/-/sdk-0.34.3.tgz#cd358e11df02abcf163703d94d4c5f448220b999" integrity sha512-1DqzkculvR5qlRv0R1by9F/RJfMWgFgQa4nn4W2pWhyTvhsTi7/9ZFDbRgm5iU9xRHnWWZiEQE89SIZ8Vl+UiQ== @@ -3351,30 +3339,12 @@ resolved "https://registry.yarnpkg.com/@uniswap/lib/-/lib-4.0.1-alpha.tgz#2881008e55f075344675b3bca93f020b028fbd02" integrity sha512-f6UIliwBbRsgVLxIaBANF6w09tYqc6Y/qXdsrbEmXHyFA7ILiKrIwRFXe1yOg8M3cksgVsO9N7yuL2DdCGQKBA== -"@uniswap/sdk@^2.0.5": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@uniswap/sdk/-/sdk-2.1.0.tgz#b71abcc7b69099a71960d38ab5f9266411ff94db" - integrity sha512-pqPllIlHtH4mhnKj581MW1oNVZxwlv0NIVnLTUoMqygxoWTjdddHL1JoEEWQKpf+RYh4BbSFGClnljNKmJqTkg== - dependencies: - "@ethersproject/address" "^5.0.0-beta.134" - "@ethersproject/contracts" "^5.0.0-beta.143" - "@ethersproject/networks" "^5.0.0-beta.135" - "@ethersproject/providers" "^5.0.0-beta.153" - "@ethersproject/solidity" "^5.0.0-beta.131" - "@uniswap/v2-core" "^1.0.0" - big.js "^5.2.2" - decimal.js-light "^2.5.0" - jsbi "^3.1.1" - tiny-invariant "^1.1.0" - tiny-warning "^1.0.3" - toformat "^2.0.0" - "@uniswap/v2-core@1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@uniswap/v2-core/-/v2-core-1.0.0.tgz#e0fab91a7d53e8cafb5326ae4ca18351116b0844" integrity sha512-BJiXrBGnN8mti7saW49MXwxDBRFiWemGetE58q8zgfnPPzQKq55ADltEILqOt6VFZ22kVeVKbF8gVd8aY3l7pA== -"@uniswap/v2-core@1.0.1", "@uniswap/v2-core@^1.0.0": +"@uniswap/v2-core@1.0.1": version "1.0.1" resolved "https://registry.yarnpkg.com/@uniswap/v2-core/-/v2-core-1.0.1.tgz#af8f508bf183204779938969e2e54043e147d425" integrity sha512-MtybtkUPSyysqLY2U210NBDeCHX+ltHt3oADGdjqoThZaFRDKwM6k1Nb3F0A3hk5hwuQvytFWhrWHOEq6nVJ8Q== @@ -4013,10 +3983,10 @@ axios@^0.27.2: follow-redirects "^1.14.9" form-data "^4.0.0" -axios@^1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.0.tgz#f1e5292f26b2fd5c2e66876adc5b06cdbd7d2102" - integrity sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg== +axios@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.1.tgz#76550d644bf0a2d469a01f9244db6753208397d7" + integrity sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g== dependencies: follow-redirects "^1.15.0" form-data "^4.0.0" @@ -4114,11 +4084,6 @@ big-number@^2.0.0: resolved "https://registry.yarnpkg.com/big-number/-/big-number-2.0.0.tgz#98548eda9393b445791670a213aed6f6dcd66ee3" integrity sha512-C67Su0g+XsmXADX/UM9L/+xSbqqwq0D/qGJs2ky6Noy2FDuCZnC38ZSXODiaBvqWma2VYRZEXgm4H74PS6tCDg== -big.js@^5.2.2: - version "5.2.2" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-5.2.2.tgz#65f0af382f578bcdc742bd9c281e9cb2d7768328" - integrity sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ== - big.js@^6.0.3: version "6.1.1" resolved "https://registry.yarnpkg.com/big.js/-/big.js-6.1.1.tgz#63b35b19dc9775c94991ee5db7694880655d5537" @@ -4691,7 +4656,7 @@ chai@^4.3.0, chai@^4.3.4, chai@^4.3.7: pathval "^1.1.1" type-detect "^4.0.5" -chai@^4.3.9: +chai@^4.3.10: version "4.4.1" resolved "https://registry.yarnpkg.com/chai/-/chai-4.4.1.tgz#3603fa6eba35425b0f2ac91a009fe924106e50d1" integrity sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g== @@ -5043,6 +5008,11 @@ cluster-key-slot@1.1.0: resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.0.tgz#30474b2a981fb12172695833052bc0d01336d10d" integrity sha512-2Nii8p3RwAPiFwsnZvukotvow2rIHM+yQ6ZcBXGHdniadkYGZYiGmkHJIbZPIV9nfv7m/U1IPMVVcAhoWFeklw== +cluster-key-slot@1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz#88ddaa46906e303b5de30d3153b7d9fe0a0c19ac" + integrity sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA== + cmd-shim@^4.0.1: version "4.1.0" resolved "https://registry.yarnpkg.com/cmd-shim/-/cmd-shim-4.1.0.tgz#b3a904a6743e9fede4148c6f3800bf2a08135bdd" @@ -5188,11 +5158,6 @@ compare-versions@^5.0.0: resolved "https://registry.yarnpkg.com/compare-versions/-/compare-versions-5.0.3.tgz#a9b34fea217472650ef4a2651d905f42c28ebfd7" integrity sha512-4UZlZP8Z99MGEY+Ovg/uJxJuvoXuN4M6B3hKaiackiHrgzQFEe3diJi1mf1PNHbFujM7FvLrK2bpgIaImbtZ1A== -complex.js@^2.0.15: - version "2.1.1" - resolved "https://registry.yarnpkg.com/complex.js/-/complex.js-2.1.1.tgz#0675dac8e464ec431fb2ab7d30f41d889fb25c31" - integrity sha512-8njCHOTtFFLtegk6zQo0kkVX1rngygb/KQI6z1qZxlFI3scluC+LVTCFbrkWjBv4vvLlbQ9t88IPMC6k95VTTg== - compressible@^2.0.12: version "2.0.18" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" @@ -5539,11 +5504,6 @@ decamelize@^4.0.0: resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== -decimal.js-light@^2.5.0: - version "2.5.1" - resolved "https://registry.yarnpkg.com/decimal.js-light/-/decimal.js-light-2.5.1.tgz#134fd32508f19e208f4fb2f8dac0d2626a867934" - integrity sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg== - decimal.js@^10.2.1, decimal.js@^10.3.1: version "10.3.1" resolved "https://registry.yarnpkg.com/decimal.js/-/decimal.js-10.3.1.tgz#d8c3a444a9c6774ba60ca6ad7261c3a94fd5e783" @@ -6159,11 +6119,6 @@ escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= -escape-latex@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/escape-latex/-/escape-latex-1.2.0.tgz#07c03818cf7dac250cce517f4fda1b001ef2bca1" - integrity sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw== - escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" @@ -7246,11 +7201,6 @@ fp-ts@^1.0.0: resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-1.19.5.tgz#3da865e585dfa1fdfd51785417357ac50afc520a" integrity sha512-wDNqTimnzs8QqpldiId9OavWK2NptormjXnRJTQecNjzwfyp6P/8s/zG8e4h3ja3oqkKaY72UlTjQYt/1yXf9A== -fraction.js@^4.1.1: - version "4.2.0" - resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.2.0.tgz#448e5109a313a3527f5a3ab2119ec4cf0e0e2950" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== - fresh@0.5.2: version "0.5.2" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" @@ -7488,6 +7438,11 @@ generic-pool@3.8.2: resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.8.2.tgz#aab4f280adb522fdfbdc5e5b64d718d3683f04e9" integrity sha512-nGToKy6p3PAbYQ7p1UlWl6vSPwfwU6TMSWK7TTu+WUY4ZjyZQGniGGt2oNVvyNSpyZYSB43zMXVLcBm08MTMkg== +generic-pool@3.9.0: + version "3.9.0" + resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.9.0.tgz#36f4a678e963f4fdb8707eab050823abc4e8f5e4" + integrity sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g== + get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" @@ -9122,11 +9077,6 @@ jackspeak@^2.3.5: optionalDependencies: "@pkgjs/parseargs" "^0.11.0" -javascript-natural-sort@^0.7.1: - version "0.7.1" - resolved "https://registry.yarnpkg.com/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz#f9e2303d4507f6d74355a73664d1440fb5a0ef59" - integrity sha1-+eIwPUUH9tdDVac2ZNFED7Wg71k= - js-cookie@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/js-cookie/-/js-cookie-2.2.1.tgz#69e106dc5d5806894562902aa5baec3744e9b2b8" @@ -9189,11 +9139,6 @@ js2xmlparser@^4.0.2: dependencies: xmlcreate "^2.0.4" -jsbi@^3.1.1: - version "3.2.5" - resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-3.2.5.tgz#b37bb90e0e5c2814c1c2a1bcd8c729888a2e37d6" - integrity sha512-aBE4n43IPvjaddScbvWRA2YlTzKEynHzu7MqOyTipdHucf/VxS63ViCjxYRg86M8Rxwbt/GfzHl1kKERkt45fQ== - jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" @@ -9892,7 +9837,7 @@ lodash.uniqby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.uniqby/-/lodash.uniqby-4.7.0.tgz#d99c07a669e9e6d24e1362dfe266c67616af1302" integrity sha1-2ZwHpmnp5tJOE2Lf4mbGdhavEwI= -lodash@^4.13.1, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash@^4.13.1, lodash@^4.17.11, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.21: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -10090,21 +10035,6 @@ match-all@^1.2.6: resolved "https://registry.yarnpkg.com/match-all/-/match-all-1.2.6.tgz#66d276ad6b49655551e63d3a6ee53e8be0566f8d" integrity sha512-0EESkXiTkWzrQQntBu2uzKvLu6vVkUGz40nGPbSZuegcfE5UuSzNjLaIu76zJWuaT/2I3Z/8M06OlUOZLGwLlQ== -mathjs@^9.2.0: - version "9.5.2" - resolved "https://registry.yarnpkg.com/mathjs/-/mathjs-9.5.2.tgz#e0f3279320dc6f49e45d99c4fcdd8b52231f0462" - integrity sha512-c0erTq0GP503/Ch2OtDOAn50GIOsuxTMjmE00NI/vKJFSWrDaQHRjx6ai+16xYv70yBSnnpUgHZGNf9FR9IwmA== - dependencies: - "@babel/runtime" "^7.15.4" - complex.js "^2.0.15" - decimal.js "^10.3.1" - escape-latex "^1.2.0" - fraction.js "^4.1.1" - javascript-natural-sort "^0.7.1" - seedrandom "^3.0.5" - tiny-emitter "^2.1.0" - typed-function "^2.0.0" - mcl-wasm@^0.7.1: version "0.7.9" resolved "https://registry.yarnpkg.com/mcl-wasm/-/mcl-wasm-0.7.9.tgz#c1588ce90042a8700c3b60e40efb339fc07ab87f" @@ -10608,14 +10538,7 @@ module-error@^1.0.1, module-error@^1.0.2: resolved "https://registry.yarnpkg.com/module-error/-/module-error-1.0.2.tgz#8d1a48897ca883f47a45816d4fb3e3c6ba404d86" integrity sha512-0yuvsqSCv8LbaOKhnsQ/T5JhyFlCYLPXK3U2sgV10zoKQwzs/MyfuQUOZQ1V/6OCOJsK/TRgNVrPuPDqtdMFtA== -moment-timezone@^0.5.33: - version "0.5.37" - resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.37.tgz#adf97f719c4e458fdb12e2b4e87b8bec9f4eef1e" - integrity sha512-uEDzDNFhfaywRl+vwXxffjjq1q0Vzr+fcQpQ1bU0kbzorfS7zVtZnCnGc8mhWmF39d4g4YriF6kwA75mJKE/Zg== - dependencies: - moment ">= 2.9.0" - -"moment@>= 2.9.0", moment@^2.24.0, moment@^2.29.1: +moment@^2.24.0: version "2.29.4" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== @@ -12558,6 +12481,18 @@ recursive-readdir@^2.2.2: "@redis/search" "1.0.6" "@redis/time-series" "1.0.3" +redis@^4.6.13: + version "4.6.13" + resolved "https://registry.yarnpkg.com/redis/-/redis-4.6.13.tgz#e247267c5f3ba35ab8277b57343d3a56acb2f0a6" + integrity sha512-MHgkS4B+sPjCXpf+HfdetBwbRz6vCtsceTmw1pHNYJAsYxrfpOP6dz+piJWGos8wqG7qb3vj/Rrc5qOlmInUuA== + dependencies: + "@redis/bloom" "1.2.0" + "@redis/client" "1.5.14" + "@redis/graph" "1.1.1" + "@redis/json" "1.0.6" + "@redis/search" "1.1.6" + "@redis/time-series" "1.0.5" + reduce-flatten@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/reduce-flatten/-/reduce-flatten-2.0.0.tgz#734fd84e65f375d7ca4465c69798c25c9d10ae27" @@ -12995,7 +12930,7 @@ secp256k1@4.0.3, secp256k1@^4.0.0, secp256k1@^4.0.1: node-addon-api "^2.0.0" node-gyp-build "^4.2.0" -seedrandom@3.0.5, seedrandom@^3.0.5: +seedrandom@3.0.5: version "3.0.5" resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-3.0.5.tgz#54edc85c95222525b0c7a6f6b3543d8e0b3aa0a7" integrity sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg== @@ -13991,26 +13926,11 @@ timeout-abort-controller@^1.1.1: abort-controller "^3.0.0" retimer "^2.0.0" -tiny-emitter@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/tiny-emitter/-/tiny-emitter-2.1.0.tgz#1d1a56edfc51c43e863cbb5382a72330e3555423" - integrity sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q== - -tiny-invariant@^1.1.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/tiny-invariant/-/tiny-invariant-1.2.0.tgz#a1141f86b672a9148c72e978a19a73b9b94a15a9" - integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== - tiny-relative-date@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/tiny-relative-date/-/tiny-relative-date-1.3.0.tgz#fa08aad501ed730f31cc043181d995c39a935e07" integrity sha512-MOQHpzllWxDCHHaDno30hhLfbouoYlOI8YlMNtvKe1zXbjEVhbcEovQxvZrPvtiYW630GQDoMMarCnjfyfHA+A== -tiny-warning@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/tiny-warning/-/tiny-warning-1.0.3.tgz#94a30db453df4c643d0fd566060d60a875d84754" - integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== - title-case@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/title-case/-/title-case-2.1.1.tgz#3e127216da58d2bc5becf137ab91dae3a7cd8faa" @@ -14050,11 +13970,6 @@ to-regex-range@^5.0.1: dependencies: is-number "^7.0.0" -toformat@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/toformat/-/toformat-2.0.0.tgz#7a043fd2dfbe9021a4e36e508835ba32056739d8" - integrity sha512-03SWBVop6nU8bpyZCx7SodpYznbZF5R4ljwNLBcTQzKOD9xuihRo/psX58llS1BMFhhAI08H3luot5GoXJz2pQ== - toidentifier@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" @@ -14290,11 +14205,6 @@ typed-array-length@^1.0.4: for-each "^0.3.3" is-typed-array "^1.1.9" -typed-function@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/typed-function/-/typed-function-2.1.0.tgz#ded6f8a442ba8749ff3fe75bc41419c8d46ccc3f" - integrity sha512-bctQIOqx2iVbWGDGPWwIm18QScpu2XRmkC19D8rQGFsjKSgteq/o1hTZvIG/wuDq8fanpBDrLkLq+aEN/6y5XQ== - typedarray-to-buffer@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080"