Skip to content

Commit

Permalink
chore: forge fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
rkdud007 committed Jul 29, 2024
1 parent ebbfea8 commit d15eb72
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 221 deletions.
108 changes: 35 additions & 73 deletions test/BlockSampledHdpExecutionStore.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ pragma solidity ^0.8.4;

import {Test} from "forge-std/Test.sol";
import {HdpExecutionStore} from "../src/HdpExecutionStore.sol";
import {BlockSampledDatalake, BlockSampledDatalakeCodecs} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol";
import {
BlockSampledDatalake, BlockSampledDatalakeCodecs
} from "../src/datatypes/datalake/BlockSampledDatalakeCodecs.sol";
import {ComputationalTask, ComputationalTaskCodecs} from "../src/datatypes/datalake/ComputeCodecs.sol";
import {AggregateFn, Operator} from "../src/datatypes/datalake/ComputeCodecs.sol";
import {IFactsRegistry} from "../src/interfaces/IFactsRegistry.sol";
Expand All @@ -22,10 +24,7 @@ contract MockFactsRegistry is IFactsRegistry {
contract MockAggregatorsFactory is IAggregatorsFactory {
mapping(uint256 => ISharpFactsAggregator) public aggregatorsById;

function createAggregator(
uint256 id,
ISharpFactsAggregator aggregator
) external {
function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external {
aggregatorsById[id] = aggregator;
}
}
Expand All @@ -40,13 +39,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator {
}

function aggregatorState() external view returns (AggregatorState memory) {
return
AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
return AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
}
}

Expand Down Expand Up @@ -75,22 +73,19 @@ contract HdpExecutionStoreTest is Test {

// !! If want to fetch different input, modify helpers/target/bs_cached_input.json && helpers/target/bs_cached_output.json
// !! And construct corresponding BlockSampledDatalake and ComputationalTask here
BlockSampledDatalake datalake =
BlockSampledDatalake({
chainId: 11155111,
blockRangeStart: 5858987,
blockRangeEnd: 5858997,
increment: 2,
sampledProperty: BlockSampledDatalakeCodecs
.encodeSampledPropertyForHeaderProp(uint8(18))
});

ComputationalTask computationalTask =
ComputationalTask({
aggregateFnId: AggregateFn.SLR,
operatorId: Operator.NONE,
valueToCompare: uint256(10000000)
});
BlockSampledDatalake datalake = BlockSampledDatalake({
chainId: 11155111,
blockRangeStart: 5858987,
blockRangeEnd: 5858997,
increment: 2,
sampledProperty: BlockSampledDatalakeCodecs.encodeSampledPropertyForHeaderProp(uint8(18))
});

ComputationalTask computationalTask = ComputationalTask({
aggregateFnId: AggregateFn.SLR,
operatorId: Operator.NONE,
valueToCompare: uint256(10000000)
});

function setUp() public {
vm.chainId(11155111);
Expand All @@ -101,11 +96,7 @@ contract HdpExecutionStoreTest is Test {

// Get program hash from compiled Cairo program
programHash = _getProgramHash();
hdp = new HdpExecutionStore(
factsRegistry,
aggregatorsFactory,
programHash
);
hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash);

// Parse from input file
(
Expand All @@ -121,32 +112,22 @@ contract HdpExecutionStoreTest is Test {
) = _fetchCairoInput();

bytes32 computedDatalakeCommitment = datalake.commit();
bytes32 computedTaskCommitment = computationalTask.commit(
computedDatalakeCommitment
);
bytes32 computedTaskCommitment = computationalTask.commit(computedDatalakeCommitment);

assertEq(fetchedTasksCommitments[0], computedTaskCommitment);

// Mock SHARP facts aggregator
sharpFactsAggregator = new MockSharpFactsAggregator(
fetchedMmrRoots[0],
fetchedMmrSizes[0]
);
sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]);

// Create mock SHARP facts aggregator
aggregatorsFactory.createAggregator(
fetchedMmrIds[0],
sharpFactsAggregator
);
aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator);
}

function testHdpExecutionFlow() public {
(uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(
uint256(bytes32(fetchedTasksMerkleRoot))
);
(uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot)));

(uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter
.split128(uint256(bytes32(fetchedResultsMerkleRoot)));
(uint256 resultRootLow, uint256 resultRootHigh) =
Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot)));

// Cache MMR root
for (uint256 i = 0; i < fetchedMmrIds.length; i++) {
Expand Down Expand Up @@ -177,18 +158,11 @@ contract HdpExecutionStoreTest is Test {
);

// Check if the task state is FINALIZED
HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(
fetchedTasksCommitments[0]
);
assertEq(
uint256(taskStatusAfter),
uint256(HdpExecutionStore.TaskStatus.FINALIZED)
);
HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]);
assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED));

// Check if the task result is stored
bytes32 taskResult = hdp.getFinalizedTaskResult(
fetchedTasksCommitments[0]
);
bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]);
assertEq(taskResult, fetchedResults[0]);
}

Expand All @@ -203,9 +177,7 @@ contract HdpExecutionStoreTest is Test {
return abi.decode(abiEncoded, (bytes32));
}

function bytesToString(
bytes memory _data
) public pure returns (string memory) {
function bytesToString(bytes memory _data) public pure returns (string memory) {
bytes memory buffer = new bytes(_data.length);
for (uint256 i = 0; i < _data.length; i++) {
bytes1 b = _data[i];
Expand Down Expand Up @@ -260,17 +232,7 @@ contract HdpExecutionStoreTest is Test {
taskResults
) = abi.decode(
abiEncoded,
(
uint256[],
uint256[],
bytes32[],
bytes32,
bytes32,
bytes32[][],
bytes32[][],
bytes32[],
bytes32[]
)
(uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[])
);
}
}
98 changes: 23 additions & 75 deletions test/ModuleHdpExecutionStore.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,7 @@ contract MockFactsRegistry is IFactsRegistry {
contract MockAggregatorsFactory is IAggregatorsFactory {
mapping(uint256 => ISharpFactsAggregator) public aggregatorsById;

function createAggregator(
uint256 id,
ISharpFactsAggregator aggregator
) external {
function createAggregator(uint256 id, ISharpFactsAggregator aggregator) external {
aggregatorsById[id] = aggregator;
}
}
Expand All @@ -38,13 +35,12 @@ contract MockSharpFactsAggregator is ISharpFactsAggregator {
}

function aggregatorState() external view returns (AggregatorState memory) {
return
AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
return AggregatorState({
poseidonMmrRoot: usedMmrRoot,
keccakMmrRoot: bytes32(0),
mmrSize: usedMmrSize,
continuableParentHash: bytes32(0)
});
}
}

Expand Down Expand Up @@ -77,26 +73,12 @@ contract HdpExecutionStoreTest is Test {
// !! And construct corresponding BlockSampledDatalake and ComputationalTask here
bytes32[] memory moduleInputs = new bytes32[](2);
moduleInputs[0] = bytes32(uint256(5382820));
assertEq(
moduleInputs[0],
bytes32(
0x00000000000000000000000000000000000000000000000000000000005222a4
)
);
moduleInputs[1] = bytes32(
uint256(113007187165825507614120510246167695609561346261)
);
assertEq(
moduleInputs[1],
bytes32(
0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5
)
);
assertEq(moduleInputs[0], bytes32(0x00000000000000000000000000000000000000000000000000000000005222a4));
moduleInputs[1] = bytes32(uint256(113007187165825507614120510246167695609561346261));
assertEq(moduleInputs[1], bytes32(0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5));

ModuleTask memory moduleTask = ModuleTask({
programHash: bytes32(
0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412
),
programHash: bytes32(0x064041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412),
inputs: moduleInputs
});

Expand All @@ -115,11 +97,7 @@ contract HdpExecutionStoreTest is Test {

// Get program hash from compiled Cairo program
programHash = _getProgramHash();
hdp = new HdpExecutionStore(
factsRegistry,
aggregatorsFactory,
programHash
);
hdp = new HdpExecutionStore(factsRegistry, aggregatorsFactory, programHash);

// Parse from input file
(
Expand All @@ -139,25 +117,17 @@ contract HdpExecutionStoreTest is Test {
assertEq(fetchedTasksCommitments[0], moduleTaskCommitment);

// Mock SHARP facts aggregator
sharpFactsAggregator = new MockSharpFactsAggregator(
fetchedMmrRoots[0],
fetchedMmrSizes[0]
);
sharpFactsAggregator = new MockSharpFactsAggregator(fetchedMmrRoots[0], fetchedMmrSizes[0]);

// Create mock SHARP facts aggregator
aggregatorsFactory.createAggregator(
fetchedMmrIds[0],
sharpFactsAggregator
);
aggregatorsFactory.createAggregator(fetchedMmrIds[0], sharpFactsAggregator);
}

function testHdpExecutionFlow() public {
(uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(
uint256(bytes32(fetchedTasksMerkleRoot))
);
(uint256 taskRootLow, uint256 taskRootHigh) = Uint256Splitter.split128(uint256(bytes32(fetchedTasksMerkleRoot)));

(uint256 resultRootLow, uint256 resultRootHigh) = Uint256Splitter
.split128(uint256(bytes32(fetchedResultsMerkleRoot)));
(uint256 resultRootLow, uint256 resultRootHigh) =
Uint256Splitter.split128(uint256(bytes32(fetchedResultsMerkleRoot)));

// Cache MMR root
for (uint256 i = 0; i < fetchedMmrIds.length; i++) {
Expand Down Expand Up @@ -188,18 +158,11 @@ contract HdpExecutionStoreTest is Test {
);

// Check if the task state is FINALIZED
HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(
fetchedTasksCommitments[0]
);
assertEq(
uint256(taskStatusAfter),
uint256(HdpExecutionStore.TaskStatus.FINALIZED)
);
HdpExecutionStore.TaskStatus taskStatusAfter = hdp.getTaskStatus(fetchedTasksCommitments[0]);
assertEq(uint256(taskStatusAfter), uint256(HdpExecutionStore.TaskStatus.FINALIZED));

// Check if the task result is stored
bytes32 taskResult = hdp.getFinalizedTaskResult(
fetchedTasksCommitments[0]
);
bytes32 taskResult = hdp.getFinalizedTaskResult(fetchedTasksCommitments[0]);
assertEq(taskResult, fetchedResults[0]);
}

Expand All @@ -214,10 +177,7 @@ contract HdpExecutionStoreTest is Test {
return abi.decode(abiEncoded, (bytes32));
}

function _callPreprocessCli(
bytes memory encodedTask,
bytes memory encodedDatalake
) internal {
function _callPreprocessCli(bytes memory encodedTask, bytes memory encodedDatalake) internal {
string[] memory inputs = new string[](4);
inputs[0] = "node";
inputs[1] = "./helpers/fetch_cairo_input.js";
Expand All @@ -226,9 +186,7 @@ contract HdpExecutionStoreTest is Test {
vm.ffi(inputs);
}

function bytesToString(
bytes memory _data
) public pure returns (string memory) {
function bytesToString(bytes memory _data) public pure returns (string memory) {
bytes memory buffer = new bytes(_data.length);
for (uint256 i = 0; i < _data.length; i++) {
bytes1 b = _data[i];
Expand Down Expand Up @@ -283,17 +241,7 @@ contract HdpExecutionStoreTest is Test {
taskResults
) = abi.decode(
abiEncoded,
(
uint256[],
uint256[],
bytes32[],
bytes32,
bytes32,
bytes32[][],
bytes32[][],
bytes32[],
bytes32[]
)
(uint256[], uint256[], bytes32[], bytes32, bytes32, bytes32[][], bytes32[][], bytes32[], bytes32[])
);
}
}
Loading

0 comments on commit d15eb72

Please sign in to comment.