diff --git a/test/BlockSampledHdpExecutionStore.t.sol b/test/BlockSampledHdpExecutionStore.t.sol index a69cbb6..d2e90fc 100644 --- a/test/BlockSampledHdpExecutionStore.t.sol +++ b/test/BlockSampledHdpExecutionStore.t.sol @@ -3,7 +3,9 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; +import { + BlockSampledDatalake, BlockSampledDatalakeCodecs +} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -22,10 +24,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -40,13 +39,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -75,22 +73,19 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/bs_cached_input.json && helpers/target/bs_cached_output.json // !! And construct corresponding BlockSampledDatalake and ComputationalTask here - BlockSampledDatalake datalake = - BlockSampledDatalake({ - chainId: 11155111, - blockRangeStart: 5858987, - blockRangeEnd: 5858997, - increment: 2, - sampledProperty: BlockSampledDatalakeCodecs - .encodeSampledPropertyForHeaderProp(uint8(18)) - }); - - ComputationalTask computationalTask = - ComputationalTask({ - aggregateFnId: AggregateFn.SLR, - operatorId: Operator.NONE, - valueToCompare: uint256(10000000) - }); + BlockSampledDatalake datalake = BlockSampledDatalake({ + chainId: 11155111, + blockRangeStart: 5858987, + blockRangeEnd: 5858997, + increment: 2, + sampledProperty: BlockSampledDatalakeCodecs.encodeSampledPropertyForHeaderProp(uint8(18)) + }); + + ComputationalTask computationalTask = ComputationalTask({ + aggregateFnId: AggregateFn.SLR, + operatorId: Operator.NONE, + valueToCompare: uint256(10000000) + }); function setUp() public { vm.chainId(11155111); @@ -101,11 +96,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -121,32 +112,22 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit( - computedDatalakeCommitment - ); + bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -177,18 +158,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -203,9 +177,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -260,17 +232,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } } diff --git a/test/ModuleHdpExecutionStore.t.sol b/test/ModuleHdpExecutionStore.t.sol index 680150e..6c8191b 100644 --- a/test/ModuleHdpExecutionStore.t.sol +++ b/test/ModuleHdpExecutionStore.t.sol @@ -20,10 +20,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -38,13 +35,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -77,26 +73,12 @@ contract HdpExecutionStoreTest is Test { // !! And construct corresponding BlockSampledDatalake and ComputationalTask here bytes32[] memory moduleInputs = new bytes32[](2); moduleInputs[0] = bytes32(uint256(5382820)); - assertEq( - moduleInputs[0], - bytes32( - 0x00000000000000000000000000000000000000000000000000000000005222a4 - ) - ); - moduleInputs[1] = bytes32( - uint256(113007187165825507614120510246167695609561346261) - ); - assertEq( - moduleInputs[1], - bytes32( - 0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5 - ) - ); + assertEq(moduleInputs[0], bytes32(0x00000000000000000000000000000000000000000000000000000000005222a4)); + moduleInputs[1] = bytes32(uint256(113007187165825507614120510246167695609561346261)); + assertEq(moduleInputs[1], bytes32(0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5)); ModuleTask memory moduleTask = ModuleTask({ - programHash: bytes32( - 0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412 - ), + programHash: bytes32(0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412), inputs: moduleInputs }); @@ -115,11 +97,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -139,25 +117,17 @@ contract HdpExecutionStoreTest is Test { assertEq(fetchedTasksCommitments[0], moduleTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR root for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -188,18 +158,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -214,10 +177,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function _callPreprocessCli( - bytes memory encodedTask, - bytes memory encodedDatalake - ) internal { + function _callPreprocessCli(bytes memory encodedTask, bytes memory encodedDatalake) internal { string[] memory inputs = new string[](4); inputs[0] = "node"; inputs[1] = "./helpers/fetch_cairo_input.js"; @@ -226,9 +186,7 @@ contract HdpExecutionStoreTest is Test { vm.ffi(inputs); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -283,17 +241,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } } diff --git a/test/TransactionsInBlockHdpExecutionStore.t.sol b/test/TransactionsInBlockHdpExecutionStore.t.sol index 0a2bf8b..b8ab0fb 100644 --- a/test/TransactionsInBlockHdpExecutionStore.t.sol +++ b/test/TransactionsInBlockHdpExecutionStore.t.sol @@ -3,7 +3,10 @@ pragma solidity ^0.8.4; import {Test} from "forge-std/Test.sol"; import {HdpExecutionStore} from "../src/HdpExecutionStore.sol"; -import {TransactionsInBlockDatalake, TransactionsInBlockDatalakeCodecs} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; +import { + TransactionsInBlockDatalake, + TransactionsInBlockDatalakeCodecs +} from "../src/datatypes/datalake/TransactionsInBlockDatalakeCodecs.sol"; import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol"; import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol"; @@ -22,10 +25,7 @@ contract MockFactsRegistry is IFactsRegistry { contract MockAggregatorsFactory is IAggregatorsFactory { mapping(uint256 => ISharpFactsAggregator) public aggregatorsById; - function createAggregator( - uint256 id, - ISharpFactsAggregator aggregator - ) external { + function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external { aggregatorsById[id] = aggregator; } } @@ -40,13 +40,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator { } function aggregatorState() external view returns (AggregatorState memory) { - return - AggregatorState({ - poseidonMmrRoot: usedMmrRoot, - keccakMmrRoot: bytes32(0), - mmrSize: usedMmrSize, - continuableParentHash: bytes32(0) - }); + return AggregatorState({ + poseidonMmrRoot: usedMmrRoot, + keccakMmrRoot: bytes32(0), + mmrSize: usedMmrSize, + continuableParentHash: bytes32(0) + }); } } @@ -75,24 +74,18 @@ contract HdpExecutionStoreTest is Test { // !! If want to fetch different input, modify helpers/target/tx_cached_input.json && helpers/target/tx_cached_output.json // !! And construct corresponding TransactionsInBlockDatalake and ComputationalTask here - TransactionsInBlockDatalake datalake = - TransactionsInBlockDatalake({ - chainId: 11155111, - targetBlock: uint256(5605816), - startIndex: uint256(12), - endIndex: uint256(53), - increment: uint256(1), - includedTypes: uint256(0x00000101), - sampledProperty: TransactionsInBlockDatalakeCodecs - .encodeSampledPropertyFortxReceipt(uint8(0)) - }); + TransactionsInBlockDatalake datalake = TransactionsInBlockDatalake({ + chainId: 11155111, + targetBlock: uint256(5605816), + startIndex: uint256(12), + endIndex: uint256(53), + increment: uint256(1), + includedTypes: uint256(0x00000101), + sampledProperty: TransactionsInBlockDatalakeCodecs.encodeSampledPropertyFortxReceipt(uint8(0)) + }); ComputationalTask computationalTask = - ComputationalTask({ - aggregateFnId: AggregateFn.SLR, - operatorId: Operator.NONE, - valueToCompare: uint256(50) - }); + ComputationalTask({aggregateFnId: AggregateFn.SLR, operatorId: Operator.NONE, valueToCompare: uint256(50)}); function setUp() public { vm.chainId(11155111); @@ -103,11 +96,7 @@ contract HdpExecutionStoreTest is Test { // Get program hash from compiled Cairo program programHash = _getProgramHash(); - hdp = new HdpExecutionStore( - factsRegistry, - aggregatorsFactory, - programHash - ); + hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash); // Parse from input file ( @@ -123,32 +112,22 @@ contract HdpExecutionStoreTest is Test { ) = _fetchCairoInput(); bytes32 computedDatalakeCommitment = datalake.commit(); - bytes32 computedTaskCommitment = computationalTask.commit( - computedDatalakeCommitment - ); + bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment); assertEq(fetchedTasksCommitments[0], computedTaskCommitment); // Mock SHARP facts aggregator - sharpFactsAggregator = new MockSharpFactsAggregator( - fetchedMmrRoots[0], - fetchedMmrSizes[0] - ); + sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]); // Create mock SHARP facts aggregator - aggregatorsFactory.createAggregator( - fetchedMmrIds[0], - sharpFactsAggregator - ); + aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator); } function testHdpExecutionFlow() public { - (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128( - uint256(bytes32(fetchedTasksMerkleRoot)) - ); + (uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot))); - (uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter - .split128(uint256(bytes32(fetchedResultsMerkleRoot))); + (uint256 resultRootLow, uint256 resultRootHigh) = + Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot))); // Cache MMR roots for (uint256 i = 0; i < fetchedMmrIds.length; i++) { @@ -180,18 +159,11 @@ contract HdpExecutionStoreTest is Test { ); // Check if the task state is FINALIZED - HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus( - fetchedTasksCommitments[0] - ); - assertEq( - uint256(taskStatusAfter), - uint256(HdpExecutionStore.TaskStatus.FINALIZED) - ); + HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]); + assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED)); // Check if the task result is stored - bytes32 taskResult = hdp.getFinalizedTaskResult( - fetchedTasksCommitments[0] - ); + bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]); assertEq(taskResult, fetchedResults[0]); } @@ -206,9 +178,7 @@ contract HdpExecutionStoreTest is Test { return abi.decode(abiEncoded, (bytes32)); } - function bytesToString( - bytes memory _data - ) public pure returns (string memory) { + function bytesToString(bytes memory _data) public pure returns (string memory) { bytes memory buffer = new bytes(_data.length); for (uint256 i = 0; i < _data.length; i++) { bytes1 b = _data[i]; @@ -263,17 +233,7 @@ contract HdpExecutionStoreTest is Test { taskResults ) = abi.decode( abiEncoded, - ( - uint256[], - uint256[], - bytes32[], - bytes32, - bytes32, - bytes32[][], - bytes32[][], - bytes32[], - bytes32[] - ) + (uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[]) ); } }