diff --git a/packages/protocol/contracts/L1/BasedOperator.sol b/packages/protocol/contracts/L1/BasedOperator.sol index f33d5a97dd2c..78996838e517 100644 --- a/packages/protocol/contracts/L1/BasedOperator.sol +++ b/packages/protocol/contracts/L1/BasedOperator.sol @@ -40,7 +40,7 @@ contract BasedOperator is EssentialContract, TaikoErrors { /// @dev Struct representing transition to be proven. struct ProofBatch { - TaikoData.BlockMetadata _block; + TaikoData.BlockMetadata blockMetadata; TaikoData.Transition transition; ProofData[] proofs; address prover; @@ -51,12 +51,17 @@ contract BasedOperator is EssentialContract, TaikoErrors { uint256 public constant MAX_BLOCKS_TO_VERIFY = 5; uint256 public constant PROVING_WINDOW = 1 hours; - TaikoL1 public taiko; - VerifierRegistry public verifierRegistry; - address public treasury; + address public treasury; // (?) mapping(uint256 => Block) public blocks; + function init(address _addressManager) external initializer { + if (_addressManager == address(0)) { + revert L1_INVALID_ADDRESS(); + } + __Essential_init(_addressManager); + } + /// @dev Proposes a Taiko L2 block. function proposeBlock( bytes calldata params, @@ -71,7 +76,7 @@ contract BasedOperator is EssentialContract, TaikoErrors { { require(msg.value == PROVER_BOND, "Prover bond not expected"); - _block = taiko.proposeBlock(params, txList); + _block = TaikoL1(resolve("taiko", false)).proposeBlock(params, txList); // Check if we have whitelisted proposers if (!_isProposerPermitted(_block)) { @@ -91,25 +96,30 @@ contract BasedOperator is EssentialContract, TaikoErrors { ProofBatch memory proofBatch = abi.decode(data, (ProofBatch)); // Check who can prove the block - TaikoData.Block memory taikoBlock = taiko.getBlock(proofBatch._block.l2BlockNumber); + TaikoData.Block memory taikoBlock = + TaikoL1(resolve("taiko", false)).getBlock(proofBatch.blockMetadata.l2BlockNumber); if (block.timestamp < taikoBlock.timestamp + PROVING_WINDOW) { require( - proofBatch.prover == blocks[proofBatch._block.l2BlockNumber].assignedProver, + proofBatch.prover == blocks[proofBatch.blockMetadata.l2BlockNumber].assignedProver, "assigned prover not the prover" ); } + VerifierRegistry verifierRegistry = VerifierRegistry(resolve("verifier_registry", false)); + TaikoL1 taiko = TaikoL1(resolve("taiko", false)); // Verify the proofs uint160 prevVerifier = uint160(0); for (uint256 i = 0; i < proofBatch.proofs.length; i++) { IVerifier verifier = proofBatch.proofs[i].verifier; // Make sure each verifier is unique - require(prevVerifier >= uint160(address(verifier)), "duplicated verifier"); + if (prevVerifier >= uint160(address(verifier))) { + revert L1_INVALID_OR_DUPLICATE_VERIFIER(); + } // Make sure it's a valid verifier require(verifierRegistry.isVerifier(address(verifier)), "invalid verifier"); // Verify the proof verifier.verifyProof( - proofBatch._block, + proofBatch.blockMetadata, proofBatch.transition, proofBatch.prover, proofBatch.proofs[i].proof @@ -124,16 +134,23 @@ contract BasedOperator is EssentialContract, TaikoErrors { // Only allow an already proven block to be overwritten when the verifiers used are now // invalid // Get the currently stored transition - TaikoData.TransitionState memory storedTransition = - taiko.getTransition(proofBatch._block.l2BlockNumber, proofBatch.transition.parentHash); - if (storedTransition.blockHash != proofBatch.transition.blockHash) { - // TODO(Brecht): Check that one of the verifiers is now poissoned - } else { + TaikoData.TransitionState memory storedTransition = taiko.getTransition( + proofBatch.blockMetadata.l2BlockNumber, proofBatch.transition.parentBlockHash + ); + + // Somehow we need to check if this is proven already and IF YES and transition is trying to + // prove the same, then revert with "block already proven". + if ( + storedTransition.isProven == true + && storedTransition.blockHash == proofBatch.transition.blockHash + ) { revert("block already proven"); + } else { + // TODO(Brecht): Check that one of the verifiers is now poissoned } // Prove the block - taiko.proveBlock(proofBatch._block, proofBatch.transition, proofBatch.prover); + taiko.proveBlock(proofBatch.blockMetadata, proofBatch.transition, proofBatch.prover); // Verify some blocks _verifyBlocks(MAX_BLOCKS_TO_VERIFY); @@ -144,6 +161,7 @@ contract BasedOperator is EssentialContract, TaikoErrors { } function _verifyBlocks(uint256 maxBlocksToVerify) internal { + TaikoL1 taiko = TaikoL1(resolve("taiko", false)); uint256 lastVerifiedBlockIdBefore = taiko.getLastVerifiedBlockId(); // Verify the blocks taiko.verifyBlocks(maxBlocksToVerify); diff --git a/packages/protocol/contracts/L1/TaikoData.sol b/packages/protocol/contracts/L1/TaikoData.sol index c9cad2d43767..be787595fa6f 100644 --- a/packages/protocol/contracts/L1/TaikoData.sol +++ b/packages/protocol/contracts/L1/TaikoData.sol @@ -23,6 +23,7 @@ library TaikoData { /// @dev Struct containing data only required for proving a block struct BlockMetadata { bytes32 blockHash; + bytes32 parentBlockHash; bytes32 parentMetaHash; bytes32 l1Hash; uint256 difficulty; @@ -40,16 +41,17 @@ library TaikoData { /// @dev Struct representing transition to be proven. struct Transition { - bytes32 parentHash; + bytes32 parentBlockHash; bytes32 blockHash; } /// @dev Struct representing state transition data. struct TransitionState { - bytes32 blockHash; + bytes32 blockHash; //Might be removed.. uint64 timestamp; address prover; uint64 verifiableAfter; + bool isProven; } /// @dev Struct containing data required for verifying a block. diff --git a/packages/protocol/contracts/L1/TaikoErrors.sol b/packages/protocol/contracts/L1/TaikoErrors.sol index ef2671b04d11..545a9843ab41 100644 --- a/packages/protocol/contracts/L1/TaikoErrors.sol +++ b/packages/protocol/contracts/L1/TaikoErrors.sol @@ -20,6 +20,7 @@ abstract contract TaikoErrors { error L1_BLOB_NOT_FOUND(); error L1_BLOB_NOT_REUSEABLE(); error L1_BLOCK_MISMATCH(); + error L1_INCORRECT_BLOCK(); error L1_INSUFFICIENT_TOKEN(); error L1_INVALID_ADDRESS(); error L1_INVALID_AMOUNT(); @@ -27,6 +28,7 @@ abstract contract TaikoErrors { error L1_INVALID_CONFIG(); error L1_INVALID_ETH_DEPOSIT(); error L1_INVALID_L1_STATE_BLOCK(); + error L1_INVALID_OR_DUPLICATE_VERIFIER(); error L1_INVALID_PARAM(); error L1_INVALID_PAUSE_STATUS(); error L1_INVALID_PROOF(); diff --git a/packages/protocol/contracts/L1/TaikoL1.sol b/packages/protocol/contracts/L1/TaikoL1.sol index 813787fbbf78..4d0663bba089 100644 --- a/packages/protocol/contracts/L1/TaikoL1.sol +++ b/packages/protocol/contracts/L1/TaikoL1.sol @@ -68,10 +68,11 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { // Verify L1 data // TODO(Brecht): needs to be more configurable for preconfirmations require(_block.l1Hash == blockhash(_block.l1StateBlockNumber), "INVALID_L1_BLOCKHASH"); + require(_block.blockHash != 0x0, "INVALID_L2_BLOCKHASH"); require(_block.difficulty == block.prevrandao, "INVALID_DIFFICULTY"); - require(_block.timestamp == uint64(block.timestamp), "INVALID_TIMESTAMP"); // Verify misc data require(_block.gasLimit == config.blockMaxGasLimit, "INVALID_GAS_LIMIT"); + require(_block.blobUsed == (txList.length == 0), "INVALID_BLOB_USED"); // Verify DA data if (_block.blobUsed) { @@ -93,7 +94,8 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { ); TaikoData.Block storage parentBlock = state.blocks[(state.numBlocks - 1)]; - require(_block.parentMetaHash == parentBlock.metaHash, "invalid parentMetaHash"); + + require(_block.parentMetaHash == parentBlock.metaHash, "invalid parentHash"); // Verify the passed in L1 state block number. // We only allow the L1 block to be 4 epochs old. @@ -130,11 +132,10 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { // Store the block state.blocks[state.numBlocks] = blk; - // Store the passed in block hash as in - state.transitions[blk.blockId][_block.parentMetaHash].blockHash = _block.blockHash; - // For now it does not matter - we are not going to prove anyways - state.transitions[blk.blockId][_block.parentMetaHash].verifiableAfter = - uint64(block.timestamp) + 365 days; + // Store the passed in block hash as is + state.transitions[blk.blockId][_block.parentBlockHash].blockHash = _block.blockHash; + // Big enough number so that we are sure we don't hit that deadline in the future. + state.transitions[blk.blockId][_block.parentBlockHash].verifiableAfter = type(uint64).max; // Increment the counter (cursor) by 1. state.numBlocks++; @@ -167,14 +168,17 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { TaikoData.Block storage blk = state.blocks[_block.l2BlockNumber]; // Make sure the correct block was proven - require(blk.metaHash != keccak256(abi.encode(_block)), "incorrect block"); + if (blk.metaHash != keccak256(abi.encode(_block))) { + revert L1_INCORRECT_BLOCK(); + } // Store the transition TaikoData.TransitionState storage storedTransition = - state.transitions[_block.l2BlockNumber][transition.parentHash]; + state.transitions[_block.l2BlockNumber][transition.parentBlockHash]; storedTransition.blockHash = transition.blockHash; storedTransition.prover = prover; storedTransition.verifiableAfter = uint32(block.timestamp + SECURITY_DELAY_AFTER_PROVEN); + storedTransition.isProven = true; emit TransitionProved({ blockId: _block.l2BlockNumber, tran: transition, prover: prover }); } @@ -190,21 +194,22 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { // Get the last verified blockhash TaikoData.Block storage blk = state.blocks[state.lastVerifiedBlockId]; bytes32 blockHash = blk.blockHash; - // Go to the first unverified block uint256 blockId = uint256(state.lastVerifiedBlockId) + 1; uint256 numBlocksVerified; + while (blockId < state.numBlocks && numBlocksVerified < maxBlocksToVerify) { blk = state.blocks[blockId]; - - // Check if the parent block hash matches the actual block hash of the parent // Check if the timestamp is older than required if ( - state.transitions[blockId][blockHash].blockHash == bytes32(0) - || block.timestamp < state.transitions[blockId][blockHash].verifiableAfter + block + // Genesis is already verified with initialization so if we do not allow to set + // blockHash = bytes32(0), then we can remove the bytes32(0) check. + /*state.transitions[blockId][blockHash].blockHash == bytes32(0) + || */ + .timestamp < state.transitions[blockId][blockHash].verifiableAfter ) { break; } - // Copy the blockhash to the block blk.blockHash = state.transitions[blockId][blockHash].blockHash; // Update latest block hash @@ -256,6 +261,10 @@ contract TaikoL1 is EssentialContract, TaikoEvents, TaikoErrors { return uint256(state.lastVerifiedBlockId); } + function getNumOfBlocks() public view returns (uint256) { + return uint256(state.numBlocks); + } + /// @notice Gets the configuration of the TaikoL1 contract. /// @return Config struct containing configuration parameters. function getConfig() public view virtual returns (TaikoData.Config memory) { diff --git a/packages/protocol/contracts/L1/VerifierBattleRoyale.sol b/packages/protocol/contracts/L1/VerifierBattleRoyale.sol index 607926014b34..b0a99dca6e29 100644 --- a/packages/protocol/contracts/L1/VerifierBattleRoyale.sol +++ b/packages/protocol/contracts/L1/VerifierBattleRoyale.sol @@ -84,7 +84,10 @@ contract VerifierBattleRoyale is EssentialContract { TaikoData.Transition memory transitionA = proofBatch.proofs[0].transition; TaikoData.Transition memory transitionB = proofBatch.proofs[1].transition; - require(transitionA.parentHash == transitionB.parentHash, "parentHash not the same"); + require( + transitionA.parentBlockHash == transitionB.parentBlockHash, + "parentHash not the same" + ); require(transitionA.blockHash != transitionB.blockHash, "blockhash the same"); } else if (proofBatch.proofs.length == 3) { /* Multiple verifiers in a consensus show that another verifier is faulty */ @@ -105,7 +108,10 @@ contract VerifierBattleRoyale is EssentialContract { for (uint256 i = 0; i < proofBatch.proofs.length - 1; i++) { TaikoData.Transition memory transitionA = proofBatch.proofs[i].transition; TaikoData.Transition memory transitionB = proofBatch.proofs[i + 1].transition; - require(transitionA.parentHash == transitionB.parentHash, "parentHash not the same"); + require( + transitionA.parentBlockHash == transitionB.parentBlockHash, + "parentHash not the same" + ); if (i < proofBatch.proofs.length - 2) { require(transitionA.blockHash == transitionB.blockHash, "blockhash the same"); } else { diff --git a/packages/protocol/contracts/L1/actors/PBSActor.sol b/packages/protocol/contracts/L1/actors/PBSActor.sol index f94ff47bc7c4..8cf570be4c8e 100644 --- a/packages/protocol/contracts/L1/actors/PBSActor.sol +++ b/packages/protocol/contracts/L1/actors/PBSActor.sol @@ -24,7 +24,7 @@ contract PBSActor { bytes calldata params, bytes calldata txList, bytes memory proverPaymentData, - bytes32 parentMetaHash, + bytes32 parentHash, uint256 tip ) external @@ -35,7 +35,7 @@ contract PBSActor { operator.proposeBlock{ value: msg.value - tip }(params, txList, proverPaymentData); // Check if parent block has the right meta hash - require(keccak256(abi.encode(_block)) == parentMetaHash, "unexpected parent"); + require(keccak256(abi.encode(_block)) == parentHash, "unexpected parent"); // Do conditional payment address(block.coinbase).sendEther(tip); diff --git a/packages/protocol/deployments/deploy_l1.json b/packages/protocol/deployments/deploy_l1.json index f9779cac12a9..6741e491f969 100644 --- a/packages/protocol/deployments/deploy_l1.json +++ b/packages/protocol/deployments/deploy_l1.json @@ -1,6 +1,10 @@ { - "address_manager": "0x19A827174F66B3c66ad7063951D7b4F94f996e77", - "bridge": "0x1a76F7BA873f90805B49A51cBA617E699Cf142B0", - "erc721_vault": "0x9D46a79Ad6e0dcb36AbAb982e608e186E6826b7C", - "signal_service": "0x798684a55404079b77E19A86325e0c11eA5BB09D" + "address_manager": "0x5991A2dF15A8F6A256D3Ec51E99254Cd3fb576A9", + "based_operator": "0xa0Cb889707d426A7A386870A03bc70d1b0697598", + "sgx1": "0xD6BbDE9174b1CdAa358d2Cf4D57D1a9F7178FBfF", + "sgx2": "0x15cF58144EF33af1e14b5208015d11F9143E27b9", + "sgx3": "0x212224D2F2d262cd093eE13240ca4873fcCBbA3C", + "taiko": "0x2e234DAe75C793f67A35089C9d99245E1C58470b", + "taiko_token": "0x3D7Ebc40AF7092E3F1C81F2e996cbA5Cae2090d7", + "verifier_registry": "0xA4AD4f68d0b91CFD19687c881e50f3A00242828c" } \ No newline at end of file diff --git a/packages/protocol/test/L1/TaikoL1.t.sol b/packages/protocol/test/L1/TaikoL1.t.sol index 9942e6c8c94e..1d29b4fd0063 100644 --- a/packages/protocol/test/L1/TaikoL1.t.sol +++ b/packages/protocol/test/L1/TaikoL1.t.sol @@ -2,196 +2,88 @@ pragma solidity ^0.8.20; import "./TaikoL1TestBase.sol"; -/* -contract TaikoL1_NoCooldown is TaikoL1 { - function getConfig() public view override returns (TaikoData.Config memory config) { - config = TaikoL1.getConfig(); - // over-write the following - config.maxBlocksToVerifyPerProposal = 0; - config.blockMaxProposals = 10; - config.blockRingBufferSize = 12; - config.livenessBond = 1e18; // 1 Taiko token - } -} - -contract Verifier { - fallback(bytes calldata) external returns (bytes memory) { - return bytes.concat(keccak256("taiko")); - } -} contract TaikoL1Test is TaikoL1TestBase { function deployTaikoL1() internal override returns (TaikoL1) { - return TaikoL1( - payable( - deployProxy({ name: "taiko", impl: address(new TaikoL1_NoCooldown()), data: "" }) - ) - ); + return + TaikoL1(payable(deployProxy({ name: "taiko", impl: address(new TaikoL1()), data: "" }))); } - /// @dev Test we can propose, prove, then verify more blocks than - /// 'blockMaxProposals' - function test_L1_more_blocks_than_ring_buffer_size() external { - giveEthAndTko(Alice, 1e8 ether, 100 ether); - // This is a very weird test (code?) issue here. - // If this line (or Bob's query balance) is uncommented, - // Alice/Bob has no balance.. (Causing reverts !!!) - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - giveEthAndTko(Carol, 1e8 ether, 100 ether); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - - for (uint256 blockId = 1; blockId < conf.blockMaxProposals * 3; blockId++) { - //printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - //printVariables("after propose"); - mine(1); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - vm.roll(block.number + 15 * 12); - - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - parentHash = blockHash; - } - printVariables(""); - } + function test_L1_propose_prove_and_verify_blocks_sequentially() external { + giveEthAndTko(Alice, 100 ether, 100 ether); + + TaikoData.BlockMetadata memory meta; - /// @dev Test more than one block can be proposed, proven, & verified in the - /// same L1 block. - function test_L1_multiple_blocks_in_one_L1_block() external { - giveEthAndTko(Alice, 1000 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - giveEthAndTko(Carol, 1e8 ether, 100 ether); - // Bob - vm.prank(Bob, Bob); - - bytes32 parentHash = GENESIS_BLOCK_HASH; - - for (uint256 blockId = 1; blockId <= 20; ++blockId) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - printVariables("after propose"); - - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - vm.roll(block.number + 15 * 12); - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Alice, 2); - parentHash = blockHash; + vm.roll(block.number + 1); + vm.warp(block.timestamp + 12); + + bytes32 parentMetaHash; + for (uint64 blockId = 1; blockId <= 20; blockId++) { + printVariables("before propose & prove & verify"); + // Create metadata and propose the block + meta = createBlockMetaData(Alice, blockId, 1, true); + proposeBlock(Alice, Alice, meta, ""); + + // Create proofs and prove a block + BasedOperator.ProofBatch memory blockProofs = createProofs(meta, Alice, true); + proveBlock(Alice, abi.encode(blockProofs)); + + //Wait enought time and verify block + vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1)); + vm.roll(block.number + 10); + verifyBlock(1); + parentMetaHash = keccak256(abi.encode(meta)); + printVariables("after verify"); } - printVariables(""); } - /// @dev Test verifying multiple blocks in one transaction - function test_L1_verifying_multiple_blocks_once() external { - giveEthAndTko(Alice, 1000 ether, 1000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e8 ether, 100 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - giveEthAndTko(Carol, 1e8 ether, 100 ether); - // Bob - vm.prank(Bob, Bob); + function test_L1_propose_some_blocks_in_a_row_then_prove_and_verify() external { + giveEthAndTko(Alice, 100 ether, 100 ether); - bytes32 parentHash = GENESIS_BLOCK_HASH; + TaikoData.BlockMetadata[] memory blockMetaDatas = new TaikoData.BlockMetadata[](20); - for (uint256 blockId = 1; blockId <= conf.blockMaxProposals; blockId++) { - printVariables("before propose"); - (TaikoData.BlockMetadata memory meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - printVariables("after propose"); + vm.roll(block.number + 1); + vm.warp(block.timestamp + 12); - bytes32 blockHash = bytes32(1e10 + blockId); - bytes32 signalRoot = bytes32(1e9 + blockId); - - proveBlock(Bob, Bob, meta, parentHash, blockHash, signalRoot, meta.minTier, ""); - parentHash = blockHash; + bytes32 parentMetaHash; + for (uint64 blockId = 1; blockId <= 20; blockId++) { + printVariables("before propose & prove & verify"); + // Create metadata and propose the block + blockMetaDatas[blockId - 1] = createBlockMetaData(Alice, blockId, 1, true); + proposeBlock(Alice, Alice, blockMetaDatas[blockId - 1], ""); + vm.roll(block.number + 1); + vm.warp(block.timestamp + 12); } - vm.roll(block.number + 15 * 12); - verifyBlock(Alice, conf.blockMaxProposals - 1); - printVariables("after verify"); - verifyBlock(Alice, conf.blockMaxProposals); - printVariables("after verify"); + for (uint64 blockId = 1; blockId <= 20; blockId++) { + // Create proofs and prove a block + BasedOperator.ProofBatch memory blockProofs = + createProofs(blockMetaDatas[blockId - 1], Alice, true); + proveBlock(Alice, abi.encode(blockProofs)); + + //Wait enought time and verify block (currently we simply just "wait enough" from latest + // block and not time it perfectly) + vm.warp(uint32(block.timestamp + L1.SECURITY_DELAY_AFTER_PROVEN() + 1)); + vm.roll(block.number + 10); + verifyBlock(1); + parentMetaHash = keccak256(abi.encode(blockMetaDatas[blockId - 1])); + printVariables("after verify 1"); + } } - /// @dev getCrossChainBlockHash tests - function test_L1_getCrossChainBlockHash0() external { - bytes32 genHash = L1.getSyncedSnippet(0).blockHash; - assertEq(GENESIS_BLOCK_HASH, genHash); + function test_L1_propose_block_outside_the_4_epoch_window() external { + giveEthAndTko(Alice, 100 ether, 100 ether); - // Reverts if block is not yet verified! - vm.expectRevert(TaikoErrors.L1_BLOCK_MISMATCH.selector); - L1.getSyncedSnippet(1); - } - - /// @dev getSyncedSnippet tests - function test_L1_getSyncedSnippet() external { - uint64 count = 10; - // Declare here so that block prop/prove/verif. can be used in 1 place TaikoData.BlockMetadata memory meta; - bytes32 blockHash; - bytes32 signalRoot; - bytes32[] memory parentHashes = new bytes32[](count); - parentHashes[0] = GENESIS_BLOCK_HASH; - - giveEthAndTko(Alice, 1e6 ether, 100_000 ether); - console2.log("Alice balance:", tko.balanceOf(Alice)); - giveEthAndTko(Bob, 1e7 ether, 100_000 ether); - console2.log("Bob balance:", tko.balanceOf(Bob)); - - // Propose blocks - for (uint64 blockId = 1; blockId < count; ++blockId) { - printVariables("before propose"); - (meta,) = proposeBlock(Alice, Bob, 1_000_000, 1024); - mine(5); - - blockHash = bytes32(1e10 + uint256(blockId)); - signalRoot = bytes32(1e9 + uint256(blockId)); - - proveBlock( - Bob, Bob, meta, parentHashes[blockId - 1], blockHash, signalRoot, meta.minTier, "" - ); - - vm.roll(block.number + 15 * 12); - uint16 minTier = meta.minTier; - vm.warp(block.timestamp + L1.getTier(minTier).cooldownWindow + 1); - - verifyBlock(Carol, 1); - - // Querying written blockhash - assertEq(L1.getSyncedSnippet(blockId).blockHash, blockHash); - - mine(5); - parentHashes[blockId] = blockHash; - } - - uint64 queriedBlockId = 1; - bytes32 expectedSR = bytes32(1e9 + uint256(queriedBlockId)); - assertEq(expectedSR, L1.getSyncedSnippet(queriedBlockId).signalRoot); + vm.roll(block.number + 1); + vm.warp(block.timestamp + 12); - // 2nd - queriedBlockId = 2; - expectedSR = bytes32(1e9 + uint256(queriedBlockId)); - assertEq(expectedSR, L1.getSyncedSnippet(queriedBlockId).signalRoot); + // Create metadata and propose the block 129 blocks later only + meta = createBlockMetaData(Alice, 1, 1, true); + vm.roll(block.number + 129); + vm.warp(block.timestamp + 129 * 12); - // Not found -> reverts - vm.expectRevert(TaikoErrors.L1_BLOCK_MISMATCH.selector); - L1.getSyncedSnippet((count + 1)); + proposeBlock(Alice, Alice, meta, TaikoErrors.L1_INVALID_L1_STATE_BLOCK.selector); } } -*/ diff --git a/packages/protocol/test/L1/TaikoL1TestBase.sol b/packages/protocol/test/L1/TaikoL1TestBase.sol index 97392a52d24a..02278334205a 100644 --- a/packages/protocol/test/L1/TaikoL1TestBase.sol +++ b/packages/protocol/test/L1/TaikoL1TestBase.sol @@ -8,24 +8,30 @@ contract MockVerifier { return bytes.concat(keccak256("taiko")); } } - +*/ // TODO (dani): remove some code to sub-contracts, this one shall only contain // shared logics and data. + abstract contract TaikoL1TestBase is TaikoTest { AddressManager public addressManager; - AssignmentHook public assignmentHook; + // AssignmentHook public assignmentHook; + BasedOperator public basedOperator; TaikoToken public tko; - SignalService public ss; + // SignalService public ss; TaikoL1 public L1; TaikoData.Config conf; uint256 internal logCount; - PseZkVerifier public pv; - SgxVerifier public sv; - SgxAndZkVerifier public sgxZkVerifier; - GuardianVerifier public gv; - GuardianProver public gp; - TaikoA6TierProvider public cp; - Bridge public bridge; + // PseZkVerifier public pv; + /* 3 proof verifiers - to fulfill the requirement in BasedOperator.sol */ + SgxVerifier public sv1; + SgxVerifier public sv2; + SgxVerifier public sv3; + VerifierRegistry public vr; + // SgxAndZkVerifier public sgxZkVerifier; + // GuardianVerifier public gv; + // GuardianProver public gp; + // TaikoA6TierProvider public cp; + // Bridge public bridge; bytes32 public GENESIS_BLOCK_HASH = keccak256("GENESIS_BLOCK_HASH"); @@ -35,6 +41,9 @@ abstract contract TaikoL1TestBase is TaikoTest { function deployTaikoL1() internal virtual returns (TaikoL1 taikoL1); function setUp() public virtual { + vm.roll(20_232_182); //A real Ethereum block number from Jul-04-2024 09:13:47 + vm.warp(1_720_077_269); + L1 = deployTaikoL1(); conf = L1.getConfig(); @@ -46,102 +55,162 @@ abstract contract TaikoL1TestBase is TaikoTest { }) ); - ss = SignalService( + basedOperator = BasedOperator( deployProxy({ - name: "signal_service", - impl: address(new SignalService()), - data: bytes.concat(SignalService.init.selector) + name: "operator", + impl: address(new BasedOperator()), + data: abi.encodeCall(BasedOperator.init, (address(addressManager))) }) ); - pv = PseZkVerifier( + vr = VerifierRegistry( deployProxy({ - name: "tier_pse_zkevm", - impl: address(new PseZkVerifier()), - data: bytes.concat(PseZkVerifier.init.selector, abi.encode(address(addressManager))) + name: "verifier_registry", + impl: address(new VerifierRegistry()), + data: abi.encodeCall(VerifierRegistry.init, (address(addressManager))) }) ); - sv = SgxVerifier( + registerAddress("taiko", address(L1)); + registerAddress("operator", address(basedOperator)); + registerAddress("verifier_registry", address(vr)); + + // ss = SignalService( + // deployProxy({ + // name: "signal_service", + // impl: address(new SignalService()), + // data: bytes.concat(SignalService.init.selector) + // }) + // ); + + // pv = PseZkVerifier( + // deployProxy({ + // name: "tier_pse_zkevm", + // impl: address(new PseZkVerifier()), + // data: bytes.concat(PseZkVerifier.init.selector, + // abi.encode(address(addressManager))) + // }) + // ); + + address sgxImpl = address(new SgxVerifier()); + //Naming is like: 3, 1, 2, is because we need to have incremental order of addresses in + // BasedOperator, so figured out this is actually the way + sv3 = SgxVerifier( deployProxy({ - name: "tier_sgx", - impl: address(new SgxVerifier()), + name: "sgx1", //Name does not matter now, since we check validity via + // verifierRegistry + impl: sgxImpl, data: bytes.concat(SgxVerifier.init.selector, abi.encode(address(addressManager))) }) ); + console2.log(address(sv1)); - address[] memory initSgxInstances = new address[](1); - initSgxInstances[0] = SGX_X_0; - sv.addInstances(initSgxInstances); - - sgxZkVerifier = SgxAndZkVerifier( - deployProxy({ - name: "tier_sgx_and_pse_zkevm", - impl: address(new SgxAndZkVerifier()), -data: bytes.concat(SgxAndZkVerifier.init.selector, abi.encode(address(addressManager))) - }) - ); - - gv = GuardianVerifier( - deployProxy({ - name: "guardian_verifier", - impl: address(new GuardianVerifier()), -data: bytes.concat(GuardianVerifier.init.selector, abi.encode(address(addressManager))) - }) - ); - - gp = GuardianProver( + sv1 = SgxVerifier( deployProxy({ - name: "guardian_prover", - impl: address(new GuardianProver()), -data: bytes.concat(GuardianProver.init.selector, abi.encode(address(addressManager))) + name: "sgx2", //Name does not matter now, since we check validity via + // verifierRegistry + impl: sgxImpl, + data: bytes.concat(SgxVerifier.init.selector, abi.encode(address(addressManager))) }) ); + console2.log(address(sv2)); - setupGuardianProverMultisig(); - - cp = TaikoA6TierProvider( + sv2 = SgxVerifier( deployProxy({ - name: "tier_provider", - impl: address(new TaikoA6TierProvider()), - data: bytes.concat(TaikoA6TierProvider.init.selector) + name: "sgx3", //Name does not matter now, since we check validity via + // verifierRegistry + impl: sgxImpl, + data: bytes.concat(SgxVerifier.init.selector, abi.encode(address(addressManager))) }) ); - bridge = Bridge( - payable( - deployProxy({ - name: "bridge", - impl: address(new Bridge()), - data: bytes.concat(Bridge.init.selector, abi.encode(addressManager)), - registerTo: address(addressManager), - owner: address(0) - }) - ) - ); - - assignmentHook = AssignmentHook( - deployProxy({ - name: "assignment_hook", - impl: address(new AssignmentHook()), -data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManager))) - }) - ); + console2.log(address(sv3)); - registerAddress("taiko", address(L1)); - registerAddress("tier_pse_zkevm", address(pv)); - registerAddress("tier_sgx", address(sv)); - registerAddress("tier_guardian", address(gv)); - registerAddress("tier_sgx_and_pse_zkevm", address(sgxZkVerifier)); - registerAddress("tier_provider", address(cp)); - registerAddress("signal_service", address(ss)); - registerAddress("guardian_prover", address(gp)); - registerAddress("bridge", address(bridge)); - registerL2Address("taiko", address(L2)); - registerL2Address("signal_service", address(L2SS)); - registerL2Address("taiko_l2", address(L2)); - - registerAddress(pv.getVerifierName(300), address(new MockVerifier())); + // Bootstrap / add first trusted instance -> SGX code needs some change tho - because + // changed since taiko-simplified was created first. + address[] memory initSgxInstances = new address[](1); + initSgxInstances[0] = SGX_X_0; + sv1.addInstances(initSgxInstances); + sv2.addInstances(initSgxInstances); + sv3.addInstances(initSgxInstances); + + // Add those 3 to verifier registry + vr.addVerifier(address(sv1), "sgx1"); + vr.addVerifier(address(sv2), "sgx2"); + vr.addVerifier(address(sv3), "sgx3"); + + // address[] memory initSgxInstances = new address[](1); + // initSgxInstances[0] = SGX_X_0; + // sv.addInstances(initSgxInstances); + + // sgxZkVerifier = SgxAndZkVerifier( + // deployProxy({ + // name: "tier_sgx_and_pse_zkevm", + // impl: address(new SgxAndZkVerifier()), + // data: bytes.concat(SgxAndZkVerifier.init.selector, abi.encode(address(addressManager))) + // }) + // ); + + // gv = GuardianVerifier( + // deployProxy({ + // name: "guardian_verifier", + // impl: address(new GuardianVerifier()), + // data: bytes.concat(GuardianVerifier.init.selector, abi.encode(address(addressManager))) + // }) + // ); + + // gp = GuardianProver( + // deployProxy({ + // name: "guardian_prover", + // impl: address(new GuardianProver()), + // data: bytes.concat(GuardianProver.init.selector, abi.encode(address(addressManager))) + // }) + // ); + + // setupGuardianProverMultisig(); + + // cp = TaikoA6TierProvider( + // deployProxy({ + // name: "tier_provider", + // impl: address(new TaikoA6TierProvider()), + // data: bytes.concat(TaikoA6TierProvider.init.selector) + // }) + // ); + + // bridge = Bridge( + // payable( + // deployProxy({ + // name: "bridge", + // impl: address(new Bridge()), + // data: bytes.concat(Bridge.init.selector, abi.encode(addressManager)), + // registerTo: address(addressManager), + // owner: address(0) + // }) + // ) + // ); + + // assignmentHook = AssignmentHook( + // deployProxy({ + // name: "assignment_hook", + // impl: address(new AssignmentHook()), + // data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManager))) + // }) + // ); + + // registerAddress("taiko", address(L1)); + // registerAddress("tier_pse_zkevm", address(pv)); + // registerAddress("tier_sgx", address(sv)); + // registerAddress("tier_guardian", address(gv)); + // registerAddress("tier_sgx_and_pse_zkevm", address(sgxZkVerifier)); + // registerAddress("tier_provider", address(cp)); + // registerAddress("signal_service", address(ss)); + // registerAddress("guardian_prover", address(gp)); + // registerAddress("bridge", address(bridge)); + // registerL2Address("taiko", address(L2)); + // registerL2Address("signal_service", address(L2SS)); + // registerL2Address("taiko_l2", address(L2)); + + // registerAddress(pv.getVerifierName(300), address(new MockVerifier())); tko = TaikoToken( deployProxy({ @@ -154,7 +223,7 @@ data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManag "TTKOk", address(this) ) - ), + ), registerTo: address(addressManager), owner: address(0) }) @@ -167,178 +236,102 @@ data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManag function proposeBlock( address proposer, address prover, - uint32 gasLimit, - uint24 txListSize - ) - internal - returns ( - TaikoData.BlockMetadata memory meta - ) - { - TaikoData.TierFee[] memory tierFees = new TaikoData.TierFee[](5); - // Register the tier fees - // Based on OPL2ConfigTier we need 3: - // - LibTiers.TIER_PSE_ZKEVM; - // - LibTiers.TIER_SGX; - // - LibTiers.TIER_OPTIMISTIC; - // - LibTiers.TIER_GUARDIAN; - // - LibTiers.TIER_SGX_AND_PSE_ZKEVM - tierFees[0] = TaikoData.TierFee(LibTiers.TIER_OPTIMISTIC, 1 ether); - tierFees[1] = TaikoData.TierFee(LibTiers.TIER_SGX, 1 ether); - tierFees[2] = TaikoData.TierFee(LibTiers.TIER_PSE_ZKEVM, 2 ether); - tierFees[3] = TaikoData.TierFee(LibTiers.TIER_SGX_AND_PSE_ZKEVM, 2 ether); - tierFees[4] = TaikoData.TierFee(LibTiers.TIER_GUARDIAN, 0 ether); - // For the test not to fail, set the message.value to the highest, the - // rest will be returned - // anyways - uint256 msgValue = 2 ether; - - AssignmentHook.ProverAssignment memory assignment = AssignmentHook.ProverAssignment({ - feeToken: address(0), - tierFees: tierFees, - expiry: uint64(block.timestamp + 60 minutes), - maxBlockId: 0, - maxProposedIn: 0, - metaHash: 0, - signature: new bytes(0) - }); - - assignment.signature = - _signAssignment(prover, assignment, address(L1), keccak256(new bytes(txListSize))); - - (, TaikoData.SlotB memory b) = L1.getStateVariables(); - - uint256 _difficulty; - unchecked { - _difficulty = block.prevrandao * b.numBlocks; - } - - meta.timestamp = uint64(block.timestamp); - meta.l1Height = uint64(block.number - 1); - meta.l1Hash = blockhash(block.number - 1); - meta.difficulty = bytes32(_difficulty); - meta.gasLimit = gasLimit; - - TaikoData.HookCall[] memory hookcalls = new TaikoData.HookCall[](1); - - hookcalls[0] = TaikoData.HookCall(address(assignmentHook), abi.encode(assignment)); - - vm.prank(proposer, proposer); - meta = L1.proposeBlock{ value: msgValue }( - abi.encode(TaikoData.BlockParams(prover, 0, 0, 0, 0, false, 0, hookcalls)), - new bytes(txListSize) - ); - } - - function proveBlock( - address msgSender, - address prover, TaikoData.BlockMetadata memory meta, - bytes32 parentHash, - bytes32 blockHash, - bytes32 signalRoot, - uint16 tier, bytes4 revertReason ) internal + returns (TaikoData.BlockMetadata memory) { - TaikoData.Transition memory tran = TaikoData.Transition({ - parentHash: parentHash, - blockHash: blockHash, - signalRoot: signalRoot, - graffiti: 0x0 - }); - - bytes32 instance = - pv.calcInstance(tran, prover, keccak256(abi.encode(meta)), meta.blobHash, 0); - - TaikoData.TierProof memory proof; - proof.tier = tier; - { - PseZkVerifier.ZkEvmProof memory zkProof; - zkProof.verifierId = 300; - zkProof.zkp = bytes.concat( - bytes16(0), - bytes16(instance), - bytes16(0), - bytes16(uint128(uint256(instance))), - new bytes(100) + // TaikoData.TierFee[] memory tierFees = new TaikoData.TierFee[](5); + // // Register the tier fees + // // Based on OPL2ConfigTier we need 3: + // // - LibTiers.TIER_PSE_ZKEVM; + // // - LibTiers.TIER_SGX; + // // - LibTiers.TIER_OPTIMISTIC; + // // - LibTiers.TIER_GUARDIAN; + // // - LibTiers.TIER_SGX_AND_PSE_ZKEVM + // tierFees[0] = TaikoData.TierFee(LibTiers.TIER_OPTIMISTIC, 1 ether); + // tierFees[1] = TaikoData.TierFee(LibTiers.TIER_SGX, 1 ether); + // tierFees[2] = TaikoData.TierFee(LibTiers.TIER_PSE_ZKEVM, 2 ether); + // tierFees[3] = TaikoData.TierFee(LibTiers.TIER_SGX_AND_PSE_ZKEVM, 2 ether); + // tierFees[4] = TaikoData.TierFee(LibTiers.TIER_GUARDIAN, 0 ether); + // // For the test not to fail, set the message.value to the highest, the + // // rest will be returned + // // anyways + // uint256 msgValue = 2 ether; + + // AssignmentHook.ProverAssignment memory assignment = AssignmentHook.ProverAssignment({ + // feeToken: address(0), + // tierFees: tierFees, + // expiry: uint64(block.timestamp + 60 minutes), + // maxBlockId: 0, + // maxProposedIn: 0, + // metaHash: 0, + // signature: new bytes(0) + // }); + + // assignment.signature = + // _signAssignment(prover, assignment, address(L1), keccak256(new bytes(txListSize))); + + // (, TaikoData.SlotB memory b) = L1.getStateVariables(); + + // uint256 _difficulty; + // unchecked { + // _difficulty = block.prevrandao; + // } + + // meta.blockHash = blockHash; + // meta.parentHash = parentHash; + + // meta.timestamp = uint64(block.timestamp); + // meta.l1Height = uint64(block.number - 1); + // meta.l1Hash = blockhash(block.number - 1); + // meta.difficulty = bytes32(_difficulty); + // meta.gasLimit = gasLimit; + + // TaikoData.HookCall[] memory hookcalls = new TaikoData.HookCall[](1); + + // hookcalls[0] = TaikoData.HookCall(address(assignmentHook), abi.encode(assignment)); + + bytes memory dummyTxList = + hex"0000000000000000000000000000000000000000000000000000000000000001"; + bytes memory emptyTxList; + + if (revertReason == "") { + vm.prank(proposer, proposer); + meta = basedOperator.proposeBlock{ value: 1 ether / 10 }( + abi.encode(meta), meta.blobUsed == true ? emptyTxList : dummyTxList, prover ); - - proof.data = abi.encode(zkProof); - } - - address newInstance; - // Keep changing the pub key associated with an instance to avoid - // attacks, - // obviously just a mock due to 2 addresses changing all the time. - (newInstance,) = sv.instances(0); - if (newInstance == SGX_X_0) { - newInstance = SGX_X_1; } else { - newInstance = SGX_X_0; - } - - if (tier == LibTiers.TIER_SGX) { - bytes memory signature = - createSgxSignatureProof(tran, newInstance, prover, keccak256(abi.encode(meta))); - - proof.data = bytes.concat(bytes4(0), bytes20(newInstance), signature); + vm.prank(proposer, proposer); + vm.expectRevert(revertReason); + meta = basedOperator.proposeBlock{ value: 1 ether / 10 }( + abi.encode(meta), meta.blobUsed == true ? emptyTxList : dummyTxList, prover + ); } - if (tier == LibTiers.TIER_SGX_AND_PSE_ZKEVM) { - bytes memory signature = - createSgxSignatureProof(tran, newInstance, prover, keccak256(abi.encode(meta))); - - bytes memory sgxProof = bytes.concat(bytes4(0), bytes20(newInstance), signature); - // Concatenate SGX and ZK (in this order) - proof.data = bytes.concat(sgxProof, proof.data); - } + return meta; + } - if (tier == LibTiers.TIER_GUARDIAN) { - proof.data = ""; - - // Grant 2 signatures, 3rd might be a revert - vm.prank(David, David); - gp.approve(meta, tran, proof); - vm.prank(Emma, Emma); - gp.approve(meta, tran, proof); - - if (revertReason != "") { - vm.prank(Frank, Frank); - vm.expectRevert(); // Revert reason is 'wrapped' so will not be - // identical to the expectedRevert - gp.approve(meta, tran, proof); - } else { - vm.prank(Frank, Frank); - gp.approve(meta, tran, proof); - } - } else { - if (revertReason != "") { - vm.prank(msgSender, msgSender); - vm.expectRevert(revertReason); - L1.proveBlock(meta.id, abi.encode(meta, tran, proof)); - } else { - vm.prank(msgSender, msgSender); - L1.proveBlock(meta.id, abi.encode(meta, tran, proof)); - } - } + function proveBlock(address prover, bytes memory blockProof) internal { + vm.prank(prover, prover); + basedOperator.proveBlock(blockProof); } - function verifyBlock(address, uint64 count) internal { - L1.verifyBlocks(count); + function verifyBlock(uint64 count) internal { + basedOperator.verifyBlocks(count); } - function setupGuardianProverMultisig() internal { - address[] memory initMultiSig = new address[](5); - initMultiSig[0] = David; - initMultiSig[1] = Emma; - initMultiSig[2] = Frank; - initMultiSig[3] = Grace; - initMultiSig[4] = Henry; + // function setupGuardianProverMultisig() internal { + // address[] memory initMultiSig = new address[](5); + // initMultiSig[0] = David; + // initMultiSig[1] = Emma; + // initMultiSig[2] = Frank; + // initMultiSig[3] = Grace; + // initMultiSig[4] = Henry; - gp.setGuardians(initMultiSig, 3); - } + // gp.setGuardians(initMultiSig, 3); + // } function registerAddress(bytes32 nameHash, address addr) internal { addressManager.setAddress(uint64(block.chainid), nameHash, addr); @@ -350,32 +343,32 @@ data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManag console2.log(conf.chainId, string(abi.encodePacked(nameHash)), unicode"→", addr); } - function _signAssignment( - address signer, - AssignmentHook.ProverAssignment memory assignment, - address taikoAddr, - bytes32 blobHash - ) - internal - view - returns (bytes memory signature) - { - uint256 signerPrivateKey; - - // In the test suite these are the 3 which acts as provers - if (signer == Alice) { - signerPrivateKey = 0x1; - } else if (signer == Bob) { - signerPrivateKey = 0x2; - } else if (signer == Carol) { - signerPrivateKey = 0x3; - } - - (uint8 v, bytes32 r, bytes32 s) = vm.sign( - signerPrivateKey, assignmentHook.hashAssignment(assignment, taikoAddr, blobHash) - ); - signature = abi.encodePacked(r, s, v); - } + // function _signAssignment( + // address signer, + // AssignmentHook.ProverAssignment memory assignment, + // address taikoAddr, + // bytes32 blobHash + // ) + // internal + // view + // returns (bytes memory signature) + // { + // uint256 signerPrivateKey; + + // // In the test suite these are the 3 which acts as provers + // if (signer == Alice) { + // signerPrivateKey = 0x1; + // } else if (signer == Bob) { + // signerPrivateKey = 0x2; + // } else if (signer == Carol) { + // signerPrivateKey = 0x3; + // } + + // (uint8 v, bytes32 r, bytes32 s) = vm.sign( + // signerPrivateKey, assignmentHook.hashAssignment(assignment, taikoAddr, blobHash) + // ); + // signature = abi.encodePacked(r, s, v); + // } function createSgxSignatureProof( TaikoData.Transition memory tran, @@ -387,7 +380,7 @@ data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManag view returns (bytes memory signature) { - bytes32 digest = sv.getSignedHash(tran, newInstance, prover, metaHash); + bytes32 digest = sv1.getSignedHash(tran, newInstance, prover, metaHash); uint256 signerPrivateKey; @@ -408,40 +401,118 @@ data: bytes.concat(AssignmentHook.init.selector, abi.encode(address(addressManag vm.prank(to, to); tko.approve(address(L1), amountTko); - vm.prank(to, to); - tko.approve(address(assignmentHook), amountTko); + // vm.prank(to, to); + // tko.approve(address(assignmentHook), amountTko); console2.log("TKO balance:", to, tko.balanceOf(to)); console2.log("ETH balance:", to, to.balance); } function printVariables(string memory comment) internal { - (TaikoData.SlotA memory a, TaikoData.SlotB memory b) = L1.getStateVariables(); - string memory str = string.concat( Strings.toString(logCount++), ":[", - Strings.toString(b.lastVerifiedBlockId), + Strings.toString(L1.getLastVerifiedBlockId()), unicode"→", - Strings.toString(b.numBlocks), - "]" - ); - - str = string.concat( - str, - " nextEthDepositToProcess:", - Strings.toString(a.nextEthDepositToProcess), - " numEthDeposits:", - Strings.toString(a.numEthDeposits), - " // ", + Strings.toString(L1.getNumOfBlocks()), + "] // ", comment ); console2.log(str); } function mine(uint256 counts) internal { - vm.warp(block.timestamp + 20 * counts); + vm.warp(block.timestamp + 12 * counts); vm.roll(block.number + counts); } + + function createBlockMetaData( + address coinbase, + uint64 l2BlockNumber, + uint32 belowBlockTipHeight, // How many blocks below from current tip (block.id) + bool blobUsed + ) + internal + returns (TaikoData.BlockMetadata memory meta) + { + meta.blockHash = randBytes32(); + + TaikoData.Block memory parentBlock = L1.getBlock(l2BlockNumber - 1); + meta.parentMetaHash = parentBlock.metaHash; + meta.parentBlockHash = parentBlock.blockHash; + meta.l1Hash = blockhash(block.number - belowBlockTipHeight); + meta.difficulty = block.prevrandao; + meta.blobHash = randBytes32(); + meta.coinbase = coinbase; + meta.l2BlockNumber = l2BlockNumber; + meta.gasLimit = L1.getConfig().blockMaxGasLimit; + meta.l1StateBlockNumber = uint32(block.number - belowBlockTipHeight); + meta.timestamp = uint64(block.timestamp - (belowBlockTipHeight * 12)); // x blocks behind + + if (blobUsed) { + meta.txListByteOffset = 0; + meta.txListByteSize = 0; + meta.blobUsed = true; + } else { + meta.txListByteOffset = 0; + meta.txListByteSize = 32; // Corresponding dummyTxList is set during proposeBlock() + meta.blobUsed = false; + } + } + + function createProofs( + TaikoData.BlockMetadata memory meta, + address prover, + bool threeMockSGXProofs // Used to indicate to "trick" the BasedProver with 3 different (but + // same code) deployments of SGX verifier - later we can fine tune to have 3 correct, + // valid proofs. + ) + internal + view + returns (BasedOperator.ProofBatch memory proofBatch) + { + // Set metadata + proofBatch.blockMetadata = meta; + + // Set transition + TaikoData.Transition memory transition; + transition.parentBlockHash = L1.getBlock(meta.l2BlockNumber - 1).blockHash; + transition.blockHash = meta.blockHash; + proofBatch.transition = transition; + + // Set prover + proofBatch.prover = prover; + + address newInstance; + // Keep changing the pub key associated with an instance to avoid + // attacks, + // obviously just a mock due to 2 addresses changing all the time. + (newInstance,) = sv1.instances(0); + if (newInstance == SGX_X_0) { + newInstance = SGX_X_1; + } else { + newInstance = SGX_X_0; + } + + BasedOperator.ProofData[] memory proofs = new BasedOperator.ProofData[](3); + + bytes memory signature = + createSgxSignatureProof(transition, newInstance, prover, keccak256(abi.encode(meta))); + + proofs[0].verifier = sv1; + proofs[0].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); + + if (threeMockSGXProofs) { + proofs[1].verifier = sv2; + proofs[1].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); + + proofs[2].verifier = sv3; + proofs[2].proof = bytes.concat(bytes4(0), bytes20(newInstance), signature); + } else { + //Todo(dani): Implement more proof and verifiers when needed/available but for now, not + // to change the code in BasedOperator, maybe best to mock those 3 + } + + proofBatch.proofs = proofs; + } } -*/ diff --git a/packages/protocol/test/TaikoTest.sol b/packages/protocol/test/TaikoTest.sol index 5a025d4eadc2..3f003d406dbe 100644 --- a/packages/protocol/test/TaikoTest.sol +++ b/packages/protocol/test/TaikoTest.sol @@ -18,8 +18,10 @@ import "../contracts/tokenvault/ERC721Vault.sol"; import "../contracts/tokenvault/ERC1155Vault.sol"; import "../contracts/L1/TaikoToken.sol"; -/*import "../contracts/L1/TaikoL1.sol"; +import "../contracts/L1/BasedOperator.sol"; +import "../contracts/L1/VerifierRegistry.sol"; import "../contracts/L1/verifiers/SgxVerifier.sol"; +/*import "../contracts/L1/TaikoL1.sol"; import "../contracts/L1/verifiers/GuardianVerifier.sol"; import "../contracts/L1/verifiers/PseZkVerifier.sol"; import "../contracts/L1/verifiers/SgxAndZkVerifier.sol";