diff --git a/script/HdpExecutionStore.s.sol b/script/HdpExecutionStore.s.sol deleted file mode 100644 index 19b6736..0000000 --- a/script/HdpExecutionStore.s.sol +++ /dev/null @@ -1,27 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import {Script} from "forge-std/Script.sol"; -import {console2} from "forge-std/console2.sol"; - -import {IAggregatorsFactory} from "../src/interfaces/IAggregatorsFactory.sol"; -import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; -import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; - -contract HdpExecutionStoreDeployer is Script { - function run() external { - uint256 deployerPrivateKey = vm.envUint("PRIV_KEY"); - vm.startBroadcast(deployerPrivateKey); - - IFactsRegistry factsRegistry = IFactsRegistry(vm.envAddress("FACTS_REGISTRY_ADDRESS")); - IAggregatorsFactory aggregatorsFactory = IAggregatorsFactory(vm.envAddress("SHARP_AGGREGATORS_FACTORY")); - - // Deploy the HdpExecutionStore - HdpExecutionStore hdpExecutionStore = - new HdpExecutionStore(factsRegistry, aggregatorsFactory, vm.envBytes32("HDP_PROGRAM_HASH")); - - console2.log("HdpExecutionStore deployed at: ", address(hdpExecutionStore)); - - vm.stopBroadcast(); - } -} diff --git a/script/HdpLocal.s.sol b/script/HdpLocal.s.sol deleted file mode 100644 index 45a4b48..0000000 --- a/script/HdpLocal.s.sol +++ /dev/null @@ -1,32 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.20; - -import {Script} from "forge-std/Script.sol"; -import {console2} from "forge-std/console2.sol"; -import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; -import {IAggregatorsFactory} from "../src/interfaces/IAggregatorsFactory.sol"; -import {MockedSharpFactsRegistry} from "../src/MockedSharpFactsRegistry.sol"; -import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; - -contract HdpLocalDeployer is Script { - function run() external { - uint256 deployerPrivateKey = vm.envUint("PRIV_KEY"); - vm.startBroadcast(deployerPrivateKey); - bytes32 salt = "1337"; - - // Deploy the FactRegistry to the local network - MockedSharpFactsRegistry factsRegistry = new MockedSharpFactsRegistry{salt: salt}(); - address factsRegistryAddress = address(factsRegistry); - - IFactsRegistry iFactsRegistry = IFactsRegistry(address(factsRegistry)); - - IAggregatorsFactory aggregatorsFactory = IAggregatorsFactory(vm.envAddress("SHARP_AGGREGATORS_FACTORY")); - - // Deploy the HdpExecutionStore - HdpExecutionStore hdpExecutionStore = - new HdpExecutionStore{salt: salt}(iFactsRegistry, aggregatorsFactory, vm.envBytes32("HDP_PROGRAM_HASH")); - console2.log("MockedSharpFactsRegistry: ", factsRegistryAddress); - console2.log("HdpExecutionStore: ", address(hdpExecutionStore)); - vm.stopBroadcast(); - } -} diff --git a/src/HdpExecutionStore.sol b/src/HdpExecutionStore.sol index 942b2ac..b53133f 100644 --- a/src/HdpExecutionStore.sol +++ b/src/HdpExecutionStore.sol @@ -2,16 +2,14 @@ pragma solidity ^0.8.4; import {MerkleProof} from "openzeppelin-contracts/contracts/utils/cryptography/MerkleProof.sol"; -import {AccessControl} from "openzeppelin-contracts/contracts/access/AccessControl.sol"; +import {UUPSUpgradeable} from "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol"; +import {OwnableUpgradeable} from "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; import {IFactsRegistry} from "./interfaces/IFactsRegistry.sol"; import {ISharpFactsAggregator} from "./interfaces/ISharpFactsAggregator.sol"; import {IAggregatorsFactory} from "./interfaces/IAggregatorsFactory.sol"; import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "./datatypes/datalake/BlockSampledDatalakeCodecs.sol"; -import { - TransactionsInBlockDatalake, - TransactionsInBlockDatalakeCodecs -} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; +import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "./datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "./datatypes/datalake/ComputeCodecs.sol"; import {ModuleTask, ModuleCodecs} from "./datatypes/module/ModuleCodecs.sol"; @@ -29,7 +27,7 @@ error NotFinalized(); /// @title HdpExecutionStore /// @author Herodotus Dev Ltd /// @notice A contract to store the execution results of HDP tasks -contract HdpExecutionStore is AccessControl { +contract HdpExecutionStore is UUPSUpgradeable, OwnableUpgradeable { using MerkleProof for bytes32[]; using BlockSampledDatalakeCodecs for BlockSampledDatalake; using TransactionsInBlockDatalakeCodecs for TransactionsInBlockDatalake; @@ -53,64 +51,84 @@ contract HdpExecutionStore is AccessControl { event MmrRootCached(uint256 mmrId, uint256 mmrSize, bytes32 mmrRoot); /// @notice emitted when a new task with block sampled datalake is scheduled - event TaskWithBlockSampledDatalakeScheduled(BlockSampledDatalake datalake, ComputationalTask task); + event TaskWithBlockSampledDatalakeScheduled( + BlockSampledDatalake datalake, + ComputationalTask task + ); /// @notice emitted when a new task with transactions in block datalake is scheduled - event TaskWithTransactionsInBlockDatalakeScheduled(TransactionsInBlockDatalake datalake, ComputationalTask task); + event TaskWithTransactionsInBlockDatalakeScheduled( + TransactionsInBlockDatalake datalake, + ComputationalTask task + ); /// @notice emitted when a new module task is scheduled event ModuleTaskScheduled(ModuleTask moduleTask); - /// @notice constant representing role of operator - bytes32 public constant OPERATOR_ROLE = keccak256("OPERATOR_ROLE"); - /// @notice constant representing the pedersen hash of the Cairo HDP program bytes32 public PROGRAM_HASH; /// @notice interface to the facts registry of SHARP - IFactsRegistry public immutable SHARP_FACTS_REGISTRY; + IFactsRegistry public SHARP_FACTS_REGISTRY; - /// @notice immutable representing the chain id - uint256 public immutable CHAIN_ID; + /// @notice representing the chain id + uint256 public CHAIN_ID; /// @notice interface to the aggregators factory - IAggregatorsFactory public immutable AGGREGATORS_FACTORY; + IAggregatorsFactory public AGGREGATORS_FACTORY; /// @notice mapping of task result hash => task mapping(bytes32 => TaskResult) public cachedTasksResult; /// @notice mapping of chain id => mmr id => mmr size => mmr root - mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32))) public cachedMMRsRoots; + mapping(uint256 => mapping(uint256 => mapping(uint256 => bytes32))) + public cachedMMRsRoots; + + function initialize( + IFactsRegistry factsRegistry, + IAggregatorsFactory aggregatorsFactory, + bytes32 programHash + ) public initializer { + __Ownable_init(msg.sender); + __UUPSUpgradeable_init(); - constructor(IFactsRegistry factsRegistry, IAggregatorsFactory aggregatorsFactory, bytes32 programHash) { SHARP_FACTS_REGISTRY = factsRegistry; AGGREGATORS_FACTORY = aggregatorsFactory; PROGRAM_HASH = programHash; CHAIN_ID = block.chainid; - - _setRoleAdmin(OPERATOR_ROLE, OPERATOR_ROLE); - _grantRole(OPERATOR_ROLE, _msgSender()); } - /// @notice Reverts if the caller is not an operator - modifier onlyOperator() { - require(hasRole(OPERATOR_ROLE, _msgSender()), "Ownable: caller is not the owner"); - _; - } + /// @dev Allow to set a new implementation + function _authorizeUpgrade( + address newImplementation + ) internal override onlyOwner {} /// @notice Set the program hash for the HDP program - function setProgramHash(bytes32 programHash) external onlyOperator { + function setProgramHash(bytes32 programHash) external onlyOwner { PROGRAM_HASH = programHash; } + function getProgramHash() external view returns (bytes32) { + return PROGRAM_HASH; + } + /// @notice Caches the MMR root for a given MMR id /// @notice Get MMR size and root from the aggregator and cache it function cacheMmrRoot(uint256 mmrId) public { - ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById(mmrId); - ISharpFactsAggregator.AggregatorState memory aggregatorState = aggregator.aggregatorState(); - cachedMMRsRoots[CHAIN_ID][mmrId][aggregatorState.mmrSize] = aggregatorState.poseidonMmrRoot; - - emit MmrRootCached(mmrId, aggregatorState.mmrSize, aggregatorState.poseidonMmrRoot); + ISharpFactsAggregator aggregator = AGGREGATORS_FACTORY.aggregatorsById( + mmrId + ); + ISharpFactsAggregator.AggregatorState + memory aggregatorState = aggregator.aggregatorState(); + cachedMMRsRoots[CHAIN_ID][mmrId][ + aggregatorState.mmrSize + ] = aggregatorState.poseidonMmrRoot; + + emit MmrRootCached( + mmrId, + aggregatorState.mmrSize, + aggregatorState.poseidonMmrRoot + ); } /// @notice Requests the execution of a task with a block sampled datalake @@ -129,9 +147,15 @@ contract HdpExecutionStore is AccessControl { } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); - - emit TaskWithBlockSampledDatalakeScheduled(blockSampledDatalake, computationalTask); + cachedTasksResult[taskCommitment] = TaskResult({ + status: TaskStatus.SCHEDULED, + result: "" + }); + + emit TaskWithBlockSampledDatalakeScheduled( + blockSampledDatalake, + computationalTask + ); } /// @notice Requests the execution of a task with a transactions in block datalake @@ -150,14 +174,22 @@ contract HdpExecutionStore is AccessControl { } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); - - emit TaskWithTransactionsInBlockDatalakeScheduled(transactionsInBlockDatalake, computationalTask); + cachedTasksResult[taskCommitment] = TaskResult({ + status: TaskStatus.SCHEDULED, + result: "" + }); + + emit TaskWithTransactionsInBlockDatalakeScheduled( + transactionsInBlockDatalake, + computationalTask + ); } /// @notice Requests the execution of a task with a module /// @param moduleTask module task - function requestExecutionOfModuleTask(ModuleTask calldata moduleTask) external { + function requestExecutionOfModuleTask( + ModuleTask calldata moduleTask + ) external { bytes32 taskCommitment = moduleTask.commit(); // Ensure task is not already scheduled @@ -166,7 +198,10 @@ contract HdpExecutionStore is AccessControl { } // Store the task result - cachedTasksResult[taskCommitment] = TaskResult({status: TaskStatus.SCHEDULED, result: ""}); + cachedTasksResult[taskCommitment] = TaskResult({ + status: TaskStatus.SCHEDULED, + result: "" + }); emit ModuleTaskScheduled(moduleTask); } @@ -218,7 +253,9 @@ contract HdpExecutionStore is AccessControl { bytes32 programOutputHash = keccak256(abi.encodePacked(programOutput)); // Compute GPS fact hash - bytes32 gpsFactHash = keccak256(abi.encode(PROGRAM_HASH, programOutputHash)); + bytes32 gpsFactHash = keccak256( + abi.encode(PROGRAM_HASH, programOutputHash) + ); // Ensure GPS fact is registered if (!SHARP_FACTS_REGISTRY.isValid(gpsFactHash)) { @@ -232,42 +269,63 @@ contract HdpExecutionStore is AccessControl { bytes32[] memory resultInclusionProof = resultsInclusionProofs[i]; // Convert the low and high 128 bits to a single 256 bit value - bytes32 resultMerkleRoot = bytes32((resultMerkleRootHigh << 128) | resultMerkleRootLow); - bytes32 taskMerkleRoot = bytes32((taskMerkleRootHigh << 128) | taskMerkleRootLow); + bytes32 resultMerkleRoot = bytes32( + (resultMerkleRootHigh << 128) | resultMerkleRootLow + ); + bytes32 taskMerkleRoot = bytes32( + (taskMerkleRootHigh << 128) | taskMerkleRootLow + ); // Compute the Merkle leaf of the task bytes32 taskCommitment = taskCommitments[i]; bytes32 taskMerkleLeaf = standardLeafHash(taskCommitment); // Ensure that the task is included in the batch, by verifying the Merkle proof - bool isVerifiedTask = taskInclusionProof.verify(taskMerkleRoot, taskMerkleLeaf); + bool isVerifiedTask = taskInclusionProof.verify( + taskMerkleRoot, + taskMerkleLeaf + ); if (!isVerifiedTask) { revert NotInBatch(); } // Compute the Merkle leaf of the task result - bytes32 taskResultCommitment = keccak256(abi.encode(taskCommitment, computationalTaskResult)); - bytes32 taskResultMerkleLeaf = standardLeafHash(taskResultCommitment); + bytes32 taskResultCommitment = keccak256( + abi.encode(taskCommitment, computationalTaskResult) + ); + bytes32 taskResultMerkleLeaf = standardLeafHash( + taskResultCommitment + ); // Ensure that the task result is included in the batch, by verifying the Merkle proof - bool isVerifiedResult = resultInclusionProof.verify(resultMerkleRoot, taskResultMerkleLeaf); + bool isVerifiedResult = resultInclusionProof.verify( + resultMerkleRoot, + taskResultMerkleLeaf + ); if (!isVerifiedResult) { revert NotInBatch(); } // Store the task result - cachedTasksResult[taskCommitment] = - TaskResult({status: TaskStatus.FINALIZED, result: computationalTaskResult}); + cachedTasksResult[taskCommitment] = TaskResult({ + status: TaskStatus.FINALIZED, + result: computationalTaskResult + }); } } /// @notice Load MMR root from cache with given mmrId and mmrSize - function loadMmrRoot(uint256 mmrId, uint256 mmrSize) public view returns (bytes32) { + function loadMmrRoot( + uint256 mmrId, + uint256 mmrSize + ) public view returns (bytes32) { return cachedMMRsRoots[CHAIN_ID][mmrId][mmrSize]; } /// @notice Returns the result of a finalized task - function getFinalizedTaskResult(bytes32 taskCommitment) external view returns (bytes32) { + function getFinalizedTaskResult( + bytes32 taskCommitment + ) external view returns (bytes32) { // Ensure task is finalized if (cachedTasksResult[taskCommitment].status != TaskStatus.FINALIZED) { revert NotFinalized(); @@ -276,7 +334,9 @@ contract HdpExecutionStore is AccessControl { } /// @notice Returns the status of a task - function getTaskStatus(bytes32 taskCommitment) external view returns (TaskStatus) { + function getTaskStatus( + bytes32 taskCommitment + ) external view returns (TaskStatus) { return cachedTasksResult[taskCommitment].status; } diff --git a/test/BasicHdpExecutionStore.t.sol b/test/BasicHdpExecutionStore.t.sol index 3cb5711..e0d6b3c 100644 --- a/test/BasicHdpExecutionStore.t.sol +++ b/test/BasicHdpExecutionStore.t.sol @@ -2,6 +2,7 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; +import {ERC1967Proxy} from "openzeppelin-contracts/contracts/proxy/ERC1967/ERC1967Proxy.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; import {IAggregatorsFactory} from "../src/interfaces/IAggregatorsFactory.sol"; @@ -18,7 +19,10 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { + function createAggregator( + uint256 id, + ISharpFactsAggregator aggregator + ) external { aggregatorsById[id] = aggregator; } } @@ -33,16 +37,18 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return + AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } contract HdpExecutionStoreTest is Test { + ERC1967Proxy public proxy; HdpExecutionStore private hdp; IFactsRegistry private factsRegistry; IAggregatorsFactory private aggregatorsFactory; @@ -57,9 +63,23 @@ contract HdpExecutionStoreTest is Test { aggregatorsFactory = new MockAggregatorsFactory(); bytes32 oldPrgramHash = bytes32(uint256(1)); - hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, oldPrgramHash); + hdp = new HdpExecutionStore(); + proxy = new ERC1967Proxy( + address(hdp), + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, oldPrgramHash) + ) + ); - assertEq(hdp.PROGRAM_HASH(), oldPrgramHash); + emit log_bytes( + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, oldPrgramHash) + ) + ); + + assertEq(hdp.getProgramHash(), oldPrgramHash); bytes32 newProgramHash = bytes32(uint256(2)); hdp.setProgramHash(newProgramHash); diff --git a/test/BlockSampledHdpExecutionStore.t.sol b/test/BlockSampledHdpExecutionStore.t.sol index d2e90fc..8d0ab3b 100644 --- a/test/BlockSampledHdpExecutionStore.t.sol +++ b/test/BlockSampledHdpExecutionStore.t.sol @@ -2,10 +2,9 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; +import {ERC1967Proxy} from "openzeppelin-contracts/contracts/proxy/ERC1967/ERC1967Proxy.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import { - BlockSampledDatalake, BlockSampledDatalakeCodecs -} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; +import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -24,7 +23,10 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { + function createAggregator( + uint256 id, + ISharpFactsAggregator aggregator + ) external { aggregatorsById[id] = aggregator; } } @@ -39,12 +41,13 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return + AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -54,6 +57,7 @@ contract HdpExecutionStoreTest is Test { address public proverAddress = address(12); + ERC1967Proxy public proxy; HdpExecutionStore private hdp; IFactsRegistry private factsRegistry; IAggregatorsFactory private aggregatorsFactory; @@ -73,19 +77,22 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/bs_cached_input.json && helpers/target/bs_cached_output.json // !! And construct corresponding BlockSampledDatalake and ComputationalTask here - BlockSampledDatalake datalake = BlockSampledDatalake({ - chainId: 11155111, - blockRangeStart: 5858987, - blockRangeEnd: 5858997, - increment: 2, - sampledProperty: BlockSampledDatalakeCodecs.encodeSampledPropertyForHeaderProp(uint8(18)) - }); - - ComputationalTask computationalTask = ComputationalTask({ - aggregateFnId: AggregateFn.SLR, - operatorId: Operator.NONE, - valueToCompare: uint256(10000000) - }); + BlockSampledDatalake datalake = + BlockSampledDatalake({ + chainId: 11155111, + blockRangeStart: 5858987, + blockRangeEnd: 5858997, + increment: 2, + sampledProperty: BlockSampledDatalakeCodecs + .encodeSampledPropertyForHeaderProp(uint8(18)) + }); + + ComputationalTask computationalTask = + ComputationalTask({ + aggregateFnId: AggregateFn.SLR, + operatorId: Operator.NONE, + valueToCompare: uint256(10000000) + }); function setUp() public { vm.chainId(11155111); @@ -96,7 +103,21 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); + hdp = new HdpExecutionStore(); + proxy = new ERC1967Proxy( + address(hdp), + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); + + emit log_bytes( + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); // Parse from input file ( @@ -112,22 +133,32 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); + bytes32 computedTaskCommitment = computationalTask.commit( + computedDatalakeCommitment + ); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); + sharpFactsAggregator = new MockSharpFactsAggregator( + fetchedMmrRoots[0], + fetchedMmrSizes[0] + ); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); + aggregatorsFactory.createAggregator( + fetchedMmrIds[0], + sharpFactsAggregator + ); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( + uint256(bytes32(fetchedTasksMerkleRoot)) + ); - (uint256 resultRootLow, uint256 resultRootHigh) = - Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter + .split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -158,11 +189,18 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); - assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( + fetchedTasksCommitments[0] + ); + assertEq( + uint256(taskStatusAfter), + uint256(HdpExecutionStore.TaskStatus.FINALIZED) + ); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); + bytes32 taskResult = hdp.getFinalizedTaskResult( + fetchedTasksCommitments[0] + ); assertEq(taskResult, fetchedResults[0]); } @@ -177,7 +215,9 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString(bytes memory _data) public pure returns (string memory) { + function bytesToString( + bytes memory _data + ) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -232,7 +272,17 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) + ( + uint256[], + uint256[], + bytes32[], + bytes32, + bytes32, + bytes32[][], + bytes32[][], + bytes32[], + bytes32[] + ) ); } } diff --git a/test/ModuleHdpExecutionStore.t.sol b/test/ModuleHdpExecutionStore.t.sol index 6c8191b..382897e 100644 --- a/test/ModuleHdpExecutionStore.t.sol +++ b/test/ModuleHdpExecutionStore.t.sol @@ -2,6 +2,7 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; +import {ERC1967Proxy} from "openzeppelin-contracts/contracts/proxy/ERC1967/ERC1967Proxy.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; import {ModuleTask, ModuleCodecs} from "../src/datatypes/module/ModuleCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -20,7 +21,10 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { + function createAggregator( + uint256 id, + ISharpFactsAggregator aggregator + ) external { aggregatorsById[id] = aggregator; } } @@ -35,12 +39,13 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return + AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -49,6 +54,7 @@ contract HdpExecutionStoreTest is Test { address public proverAddress = address(12); + ERC1967Proxy public proxy; HdpExecutionStore private hdp; IFactsRegistry private factsRegistry; IAggregatorsFactory private aggregatorsFactory; @@ -73,12 +79,26 @@ contract HdpExecutionStoreTest is Test { // !! And construct corresponding BlockSampledDatalake and ComputationalTask here bytes32[] memory moduleInputs = new bytes32[](2); moduleInputs[0] = bytes32(uint256(5382820)); - assertEq(moduleInputs[0], bytes32(0x00000000000000000000000000000000000000000000000000000000005222a4)); - moduleInputs[1] = bytes32(uint256(113007187165825507614120510246167695609561346261)); - assertEq(moduleInputs[1], bytes32(0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5)); + assertEq( + moduleInputs[0], + bytes32( + 0x00000000000000000000000000000000000000000000000000000000005222a4 + ) + ); + moduleInputs[1] = bytes32( + uint256(113007187165825507614120510246167695609561346261) + ); + assertEq( + moduleInputs[1], + bytes32( + 0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5 + ) + ); ModuleTask memory moduleTask = ModuleTask({ - programHash: bytes32(0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412), + programHash: bytes32( + 0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412 + ), inputs: moduleInputs }); @@ -97,7 +117,21 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); + hdp = new HdpExecutionStore(); + proxy = new ERC1967Proxy( + address(hdp), + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); + + emit log_bytes( + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); // Parse from input file ( @@ -117,17 +151,25 @@ contract HdpExecutionStoreTest is Test { assertEq(fetchedTasksCommitments[0], moduleTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); + sharpFactsAggregator = new MockSharpFactsAggregator( + fetchedMmrRoots[0], + fetchedMmrSizes[0] + ); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); + aggregatorsFactory.createAggregator( + fetchedMmrIds[0], + sharpFactsAggregator + ); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( + uint256(bytes32(fetchedTasksMerkleRoot)) + ); - (uint256 resultRootLow, uint256 resultRootHigh) = - Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter + .split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -158,11 +200,18 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); - assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( + fetchedTasksCommitments[0] + ); + assertEq( + uint256(taskStatusAfter), + uint256(HdpExecutionStore.TaskStatus.FINALIZED) + ); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); + bytes32 taskResult = hdp.getFinalizedTaskResult( + fetchedTasksCommitments[0] + ); assertEq(taskResult, fetchedResults[0]); } @@ -177,7 +226,10 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function _callPreprocessCli(bytes memory encodedTask, bytes memory encodedDatalake) internal { + function _callPreprocessCli( + bytes memory encodedTask, + bytes memory encodedDatalake + ) internal { string[] memory inputs = new string[](4); inputs[0] = "node"; inputs[1] = "./helpers/fetch_cairo_input.js"; @@ -186,7 +238,9 @@ contract HdpExecutionStoreTest is Test { vm.ffi(inputs); } - function bytesToString(bytes memory _data) public pure returns (string memory) { + function bytesToString( + bytes memory _data + ) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -241,7 +295,17 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) + ( + uint256[], + uint256[], + bytes32[], + bytes32, + bytes32, + bytes32[][], + bytes32[][], + bytes32[], + bytes32[] + ) ); } } diff --git a/test/TransactionsInBlockHdpExecutionStore.t.sol b/test/TransactionsInBlockHdpExecutionStore.t.sol index b8ab0fb..7c18512 100644 --- a/test/TransactionsInBlockHdpExecutionStore.t.sol +++ b/test/TransactionsInBlockHdpExecutionStore.t.sol @@ -2,11 +2,9 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; +import {ERC1967Proxy} from "openzeppelin-contracts/contracts/proxy/ERC1967/ERC1967Proxy.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import { - TransactionsInBlockDatalake, - TransactionsInBlockDatalakeCodecs -} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; +import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -25,7 +23,10 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { + function createAggregator( + uint256 id, + ISharpFactsAggregator aggregator + ) external { aggregatorsById[id] = aggregator; } } @@ -40,12 +41,13 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return + AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -55,6 +57,7 @@ contract HdpExecutionStoreTest is Test { address public proverAddress = address(12); + ERC1967Proxy public proxy; HdpExecutionStore private hdp; IFactsRegistry private factsRegistry; IAggregatorsFactory private aggregatorsFactory; @@ -74,18 +77,24 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/tx_cached_input.json && helpers/target/tx_cached_output.json // !! And construct corresponding TransactionsInBlockDatalake and ComputationalTask here - TransactionsInBlockDatalake datalake = TransactionsInBlockDatalake({ - chainId: 11155111, - targetBlock: uint256(5605816), - startIndex: uint256(12), - endIndex: uint256(53), - increment: uint256(1), - includedTypes: uint256(0x00000101), - sampledProperty: TransactionsInBlockDatalakeCodecs.encodeSampledPropertyFortxReceipt(uint8(0)) - }); + TransactionsInBlockDatalake datalake = + TransactionsInBlockDatalake({ + chainId: 11155111, + targetBlock: uint256(5605816), + startIndex: uint256(12), + endIndex: uint256(53), + increment: uint256(1), + includedTypes: uint256(0x00000101), + sampledProperty: TransactionsInBlockDatalakeCodecs + .encodeSampledPropertyFortxReceipt(uint8(0)) + }); ComputationalTask computationalTask = - ComputationalTask({aggregateFnId: AggregateFn.SLR, operatorId: Operator.NONE, valueToCompare: uint256(50)}); + ComputationalTask({ + aggregateFnId: AggregateFn.SLR, + operatorId: Operator.NONE, + valueToCompare: uint256(50) + }); function setUp() public { vm.chainId(11155111); @@ -96,7 +105,21 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); + hdp = new HdpExecutionStore(); + proxy = new ERC1967Proxy( + address(hdp), + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); + + emit log_bytes( + abi.encodeCall( + hdp.initialize, + (factsRegistry, aggregatorsFactory, programHash) + ) + ); // Parse from input file ( @@ -112,22 +135,32 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); + bytes32 computedTaskCommitment = computationalTask.commit( + computedDatalakeCommitment + ); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); + sharpFactsAggregator = new MockSharpFactsAggregator( + fetchedMmrRoots[0], + fetchedMmrSizes[0] + ); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); + aggregatorsFactory.createAggregator( + fetchedMmrIds[0], + sharpFactsAggregator + ); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( + uint256(bytes32(fetchedTasksMerkleRoot)) + ); - (uint256 resultRootLow, uint256 resultRootHigh) = - Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter + .split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR roots for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -159,11 +192,18 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); - assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( + fetchedTasksCommitments[0] + ); + assertEq( + uint256(taskStatusAfter), + uint256(HdpExecutionStore.TaskStatus.FINALIZED) + ); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); + bytes32 taskResult = hdp.getFinalizedTaskResult( + fetchedTasksCommitments[0] + ); assertEq(taskResult, fetchedResults[0]); } @@ -178,7 +218,9 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString(bytes memory _data) public pure returns (string memory) { + function bytesToString( + bytes memory _data + ) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -233,7 +275,17 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) + ( + uint256[], + uint256[], + bytes32[], + bytes32, + bytes32, + bytes32[][], + bytes32[][], + bytes32[], + bytes32[] + ) ); } }