From ef5e5f7a7ddbf887bfb93c32b633a15c6f604196 Mon Sep 17 00:00:00 2001 From: Stanislav Bezkorovainyi Date: Wed, 4 Oct 2023 16:10:26 +0200 Subject: [PATCH 1/2] Boojum integration (#35) Co-authored-by: Marcin M <128217157+mm-zk@users.noreply.github.com> Co-authored-by: Dennis <10233439+idea404@users.noreply.github.com> Co-authored-by: Shahar Kaminsky Co-authored-by: Vlad Bochok <41153528+vladbochok@users.noreply.github.com> Co-authored-by: koloz193 Co-authored-by: AntonD3 <74021421+AntonD3@users.noreply.github.com> --- .github/ISSUE_TEMPLATE/bug_report.md | 6 +- SystemConfig.json | 2 +- bootloader/bootloader.yul | 811 +++++--- contracts/AccountCodeStorage.sol | 3 +- contracts/BootloaderUtilities.sol | 1 + contracts/BytecodeCompressor.sol | 92 - contracts/ComplexUpgrader.sol | 10 +- contracts/Compressor.sol | 254 +++ contracts/Constants.sol | 80 +- contracts/ContractDeployer.sol | 11 +- contracts/DefaultAccount.sol | 1 + contracts/EmptyContract.sol | 1 + contracts/EventWriter.yul | 5 +- contracts/ImmutableSimulator.sol | 1 + contracts/KnownCodesStorage.sol | 87 +- contracts/L1Messenger.sol | 325 ++- contracts/L2EthToken.sol | 14 +- contracts/MsgValueSimulator.sol | 1 + contracts/NonceHolder.sol | 1 + contracts/SystemContext.sol | 199 +- contracts/interfaces/IBytecodeCompressor.sol | 10 - contracts/interfaces/IComplexUpgrader.sol | 5 +- contracts/interfaces/ICompressor.sol | 24 + contracts/interfaces/IKnownCodesStorage.sol | 6 +- contracts/interfaces/IL1Messenger.sol | 39 + contracts/interfaces/IMailbox.sol | 2 +- contracts/interfaces/ISystemContext.sol | 5 +- .../interfaces/ISystemContextDeprecated.sol | 2 +- contracts/interfaces/ISystemContract.sol | 29 +- contracts/libraries/EfficientCall.sol | 28 +- contracts/libraries/RLPEncoder.sol | 5 + contracts/libraries/SystemContractHelper.sol | 48 +- contracts/libraries/SystemContractsCaller.sol | 1 + contracts/libraries/TransactionHelper.sol | 1 + contracts/libraries/UnsafeBytesCalldata.sol | 20 + contracts/libraries/Utils.sol | 1 + contracts/precompiles/EcAdd.yul | 441 ++++ contracts/precompiles/EcMul.yul | 495 +++++ contracts/precompiles/Ecrecover.yul | 5 +- contracts/precompiles/Keccak256.yul | 5 +- contracts/precompiles/SHA256.yul | 5 +- contracts/test-contracts/Callable.sol | 19 + contracts/test-contracts/Deployable.sol | 19 + contracts/test-contracts/DummyUpgrade.sol | 11 + contracts/test-contracts/EventWriterTest.sol | 31 + contracts/test-contracts/MockERC20Approve.sol | 16 + .../test-contracts/MockKnownCodesStorage.sol | 19 + contracts/test-contracts/MockL1Messenger.sol | 16 + contracts/test-contracts/NotSystemCaller.sol | 30 + contracts/test-contracts/SystemCaller.sol | 25 + .../test-contracts/TestSystemContract.sol | 2 +- contracts/tests/Counter.sol | 11 - contracts/tests/TransactionHelperTest.sol | 13 - hardhat.config.ts | 19 +- package.json | 8 +- scripts/compile-yul.ts | 25 +- scripts/constants.ts | 4 +- scripts/deploy-preimages.ts | 2 +- scripts/process.ts | 17 +- scripts/quick-setup.sh | 15 + test/AccountCodeStorage.spec.ts | 225 ++ test/BootloaderUtilities.spec.ts | 182 ++ test/ComplexUpgrader.spec.ts | 49 + test/Compressor.spec.ts | 533 +++++ test/ContractDeployer.spec.ts | 548 +++++ test/DefaultAccount.spec.ts | 377 ++++ test/EcAdd.spec.ts | 188 ++ test/EcMul.spec.ts | 399 ++++ test/EmptyContract.spec.ts | 44 + test/EventWriter.spec.ts | 82 + test/ImmutableSimulator.spec.ts | 64 + test/KnownCodesStorage.spec.ts | 157 ++ test/shared/constants.ts | 14 + test/shared/transactions.ts | 146 ++ test/shared/utils.ts | 133 ++ test/system-contract-test.test.ts | 51 - test/utils/DiamonCutFacet.json | 295 --- test/utils/DiamondUpgradeInit.json | 446 ---- test/utils/IZkSync.json | 1841 ----------------- test/utils/deployOnAnyAddress.ts | 141 -- yarn.lock | 290 ++- 81 files changed, 6057 insertions(+), 3532 deletions(-) delete mode 100644 contracts/BytecodeCompressor.sol create mode 100644 contracts/Compressor.sol delete mode 100644 contracts/interfaces/IBytecodeCompressor.sol create mode 100644 contracts/interfaces/ICompressor.sol create mode 100644 contracts/precompiles/EcAdd.yul create mode 100644 contracts/precompiles/EcMul.yul create mode 100644 contracts/test-contracts/Callable.sol create mode 100644 contracts/test-contracts/Deployable.sol create mode 100644 contracts/test-contracts/DummyUpgrade.sol create mode 100644 contracts/test-contracts/EventWriterTest.sol create mode 100644 contracts/test-contracts/MockERC20Approve.sol create mode 100644 contracts/test-contracts/MockKnownCodesStorage.sol create mode 100644 contracts/test-contracts/MockL1Messenger.sol create mode 100644 contracts/test-contracts/NotSystemCaller.sol create mode 100644 contracts/test-contracts/SystemCaller.sol delete mode 100644 contracts/tests/Counter.sol delete mode 100644 contracts/tests/TransactionHelperTest.sol create mode 100755 scripts/quick-setup.sh create mode 100644 test/AccountCodeStorage.spec.ts create mode 100644 test/BootloaderUtilities.spec.ts create mode 100644 test/ComplexUpgrader.spec.ts create mode 100644 test/Compressor.spec.ts create mode 100644 test/ContractDeployer.spec.ts create mode 100644 test/DefaultAccount.spec.ts create mode 100644 test/EcAdd.spec.ts create mode 100644 test/EcMul.spec.ts create mode 100644 test/EmptyContract.spec.ts create mode 100644 test/EventWriter.spec.ts create mode 100644 test/ImmutableSimulator.spec.ts create mode 100644 test/KnownCodesStorage.spec.ts create mode 100644 test/shared/constants.ts create mode 100644 test/shared/transactions.ts create mode 100644 test/shared/utils.ts delete mode 100644 test/system-contract-test.test.ts delete mode 100644 test/utils/DiamonCutFacet.json delete mode 100644 test/utils/DiamondUpgradeInit.json delete mode 100644 test/utils/IZkSync.json delete mode 100644 test/utils/deployOnAnyAddress.ts diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 2d3e38a6..3c160c5e 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,12 +1,12 @@ --- -name: Bug report -about: Use this template for reporting issues +name: Scripts-Related Bug Report +about: Use this template for reporting script related bugs. For contract bugs, see our security policy. title: '' labels: bug assignees: '' --- -### 🐛 Bug Report +### 🐛 Script Bug Report #### 📝 Description diff --git a/SystemConfig.json b/SystemConfig.json index ec966e1e..973e0dc0 100644 --- a/SystemConfig.json +++ b/SystemConfig.json @@ -9,7 +9,7 @@ "L1_TX_INTRINSIC_L2_GAS": 167157, "L1_TX_INTRINSIC_PUBDATA": 88, "MAX_GAS_PER_TRANSACTION": 80000000, - "BOOTLOADER_MEMORY_FOR_TXS": 485225, + "BOOTLOADER_MEMORY_FOR_TXS": 273132, "REFUND_GAS": 7343, "KECCAK_ROUND_COST_GAS": 40, "SHA256_ROUND_COST_GAS": 7, diff --git a/bootloader/bootloader.yul b/bootloader/bootloader.yul index 46f77f9c..be77edcc 100644 --- a/bootloader/bootloader.yul +++ b/bootloader/bootloader.yul @@ -3,81 +3,18 @@ object "Bootloader" { } object "Bootloader_deployed" { code { - /// @notice the address that will be the beneficiary of all the fees - let OPERATOR_ADDRESS := mload(0) - - let GAS_PRICE_PER_PUBDATA := 0 - - // Initializing batch params - { - /// @notice The hash of the previous batch - let PREV_BATCH_HASH := mload(32) - /// @notice The timestamp of the batch being processed - let NEW_BATCH_TIMESTAMP := mload(64) - /// @notice The number of the new batch being processed. - /// While this number is deterministic for each batch, we - /// still provide it here to ensure consistency between the state - /// of the VM and the state of the operator. - let NEW_BATCH_NUMBER := mload(96) - - /// @notice The gas price on L1 for ETH. In the future, a trustless value will be enforced. - /// For now, this value is trusted to be fairly provided by the operator. - let L1_GAS_PRICE := mload(128) - - /// @notice The minimal gas price that the operator agrees upon. - /// In the future, it will have an EIP1559-like lower bound. - let FAIR_L2_GAS_PRICE := mload(160) - - /// @notice The expected base fee by the operator. - /// Just like the batch number, while calculated on the bootloader side, - /// the operator still provides it to make sure that its data is in sync. - let EXPECTED_BASE_FEE := mload(192) - - validateOperatorProvidedPrices(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) - - // This implementation of the bootloader relies on the correct version of the SystemContext - // and it can not be upgraded via a standard upgrade transaction, but needs to ensure - // correctness itself before any transaction is executed. - upgradeSystemContextIfNeeded() - - let baseFee := 0 - - - - // Only for the proved batch we enforce that the baseFee proposed - // by the operator is equal to the expected one. For the playground batch, we allow - // the operator to provide any baseFee the operator wants. - baseFee, GAS_PRICE_PER_PUBDATA := getBaseFee(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) - if iszero(eq(baseFee, EXPECTED_BASE_FEE)) { - debugLog("baseFee", baseFee) - debugLog("EXPECTED_BASE_FEE", EXPECTED_BASE_FEE) - assertionError("baseFee inconsistent") - } - - setNewBatch(PREV_BATCH_HASH, NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) - - - - - - baseFee, GAS_PRICE_PER_PUBDATA := getBaseFee(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) - - let SHOULD_SET_NEW_BATCH := mload(224) - - switch SHOULD_SET_NEW_BATCH - case 0 { - unsafeOverrideBatch(NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) - } - default { - setNewBatch(PREV_BATCH_HASH, NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) - } - - - } + //////////////////////////////////////////////////////////////////////////// + // Function Declarations + //////////////////////////////////////////////////////////////////////////// // While we definitely cannot control the gas price on L1, // we need to check the operator does not provide any absurd numbers there - function MAX_ALLOWED_GAS_PRICE() -> ret { + function MAX_ALLOWED_L1_GAS_PRICE() -> ret { + // 100k gwei + ret := 100000000000000 + } + + function MAX_ALLOWED_FAIR_L2_GAS_PRICE() -> ret { // 10k gwei ret := 10000000000000 } @@ -85,11 +22,11 @@ object "Bootloader" { /// @dev This method ensures that the prices provided by the operator /// are not absurdly high function validateOperatorProvidedPrices(l1GasPrice, fairL2GasPrice) { - if gt(l1GasPrice, MAX_ALLOWED_GAS_PRICE()) { + if gt(l1GasPrice, MAX_ALLOWED_L1_GAS_PRICE()) { assertionError("L1 gas price too high") } - if gt(fairL2GasPrice, MAX_ALLOWED_GAS_PRICE()) { + if gt(fairL2GasPrice, MAX_ALLOWED_FAIR_L2_GAS_PRICE()) { assertionError("L2 fair gas price too high") } } @@ -143,12 +80,6 @@ object "Bootloader" { ret := {{MAX_GAS_PER_TRANSACTION}} } - /// @dev The maximum number of pubdata bytes that can be published with one - /// L1 batch - function MAX_PUBDATA_PER_BATCH() -> ret { - ret := {{MAX_PUBDATA_PER_BATCH}} - } - /// @dev The number of L1 gas needed to be spent for /// L1 byte. While a single pubdata byte costs `16` gas, /// we demand at least 17 to cover up for the costs of additional @@ -383,9 +314,47 @@ object "Bootloader" { ret := mul(COMPRESSED_BYTECODES_END_SLOT(), 32) } + /// @dev Slots needed to store priority txs L1 data (`chainedPriorityTxsHash` and `numberOfLayer1Txs`). + function PRIORITY_TXS_L1_DATA_RESERVED_SLOTS() -> ret { + ret := 2 + } + + /// @dev Slot from which storing of the priority txs L1 data begins. + function PRIORITY_TXS_L1_DATA_BEGIN_SLOT() -> ret { + ret := add(COMPRESSED_BYTECODES_BEGIN_SLOT(), COMPRESSED_BYTECODES_SLOTS()) + } + + /// @dev The byte from which storing of the priority txs L1 data begins. + function PRIORITY_TXS_L1_DATA_BEGIN_BYTE() -> ret { + ret := mul(PRIORITY_TXS_L1_DATA_BEGIN_SLOT(), 32) + } + + /// @dev Slot from which storing of the L1 Messenger pubdata begins. + function OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_SLOT() -> ret { + ret := add(PRIORITY_TXS_L1_DATA_BEGIN_SLOT(), PRIORITY_TXS_L1_DATA_RESERVED_SLOTS()) + } + + /// @dev The byte storing of the L1 Messenger pubdata begins. + function OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_BYTE() -> ret { + ret := mul(OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_SLOT(), 32) + } + + /// @dev Slots needed to store L1 Messenger pubdata. + /// @dev Note that are many more these than the maximal pubdata in batch, since + /// it needs to also accomodate uncompressed state diffs that are required for the state diff + /// compression verification. + function OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS() -> ret { + ret := {{OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS}} + } + + /// @dev The slot right after the last slot of the L1 Messenger pubdata memory area. + function OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_END_SLOT() -> ret { + ret := add(OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_SLOT(), OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS()) + } + /// @dev The slot from which the bootloader transactions' descriptions begin function TX_DESCRIPTION_BEGIN_SLOT() -> ret { - ret := COMPRESSED_BYTECODES_END_SLOT() + ret := OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_END_SLOT() } /// @dev The byte from which the bootloader transactions' descriptions begin @@ -517,6 +486,10 @@ object "Bootloader" { ret := 0x000000000000000000000000000000000000800e } + function L1_MESSENGER_ADDR() -> ret { + ret := 0x0000000000000000000000000000000000008008 + } + /// @dev The minimal allowed distance in bytes between the pointer to the compressed data /// and the end of the area dedicated for the compressed bytecodes. /// In fact, only distance of 192 should be sufficient: there it would be possible to insert @@ -550,123 +523,6 @@ object "Bootloader" { ret := 1000000 } - // Now, we iterate over all transactions, processing each of them - // one by one. - // Here, the `resultPtr` is the pointer to the memory slot, where we will write - // `true` or `false` based on whether the tx execution was successful, - - // The position at which the tx offset of the transaction should be placed - let currentExpectedTxOffset := add(TXS_IN_BATCH_LAST_PTR(), mul(MAX_POSTOP_SLOTS(), 32)) - - let txPtr := TX_DESCRIPTION_BEGIN_BYTE() - - // At the COMPRESSED_BYTECODES_BEGIN_BYTE() the pointer to the newest bytecode to be published - // is stored. - mstore(COMPRESSED_BYTECODES_BEGIN_BYTE(), add(COMPRESSED_BYTECODES_BEGIN_BYTE(), 0x20)) - - // Iterating through transaction descriptions - let transactionIndex := 0 - for { - let resultPtr := RESULT_START_PTR() - } lt(txPtr, TXS_IN_BATCH_LAST_PTR()) { - txPtr := add(txPtr, TX_DESCRIPTION_SIZE()) - resultPtr := add(resultPtr, 32) - transactionIndex := add(transactionIndex, 1) - } { - let execute := mload(txPtr) - - debugLog("txPtr", txPtr) - debugLog("execute", execute) - - if iszero(execute) { - // We expect that all transactions that are executed - // are continuous in the array. - break - } - - let txDataOffset := mload(add(txPtr, 0x20)) - - // We strongly enforce the positions of transactions - if iszero(eq(currentExpectedTxOffset, txDataOffset)) { - debugLog("currentExpectedTxOffset", currentExpectedTxOffset) - debugLog("txDataOffset", txDataOffset) - - assertionError("Tx data offset is incorrect") - } - - currentExpectedTxOffset := validateAbiEncoding(txDataOffset) - - // Checking whether the last slot of the transaction's description - // does not go out of bounds. - if gt(sub(currentExpectedTxOffset, 32), LAST_FREE_SLOT()) { - debugLog("currentExpectedTxOffset", currentExpectedTxOffset) - debugLog("LAST_FREE_SLOT", LAST_FREE_SLOT()) - - assertionError("currentExpectedTxOffset too high") - } - - validateTypedTxStructure(add(txDataOffset, 0x20)) - - - { - debugLog("ethCall", 0) - processTx(txDataOffset, resultPtr, transactionIndex, 0, GAS_PRICE_PER_PUBDATA) - } - - - { - let txMeta := mload(txPtr) - let processFlags := getWordByte(txMeta, 31) - debugLog("flags", processFlags) - - - // `processFlags` argument denotes which parts of execution should be done: - // Possible values: - // 0x00: validate & execute (normal mode) - // 0x02: perform ethCall (i.e. use mimicCall to simulate the call) - - let isETHCall := eq(processFlags, 0x02) - debugLog("ethCall", isETHCall) - processTx(txDataOffset, resultPtr, transactionIndex, isETHCall, GAS_PRICE_PER_PUBDATA) - } - - // Signal to the vm that the transaction execution is complete - setHook(VM_HOOK_TX_HAS_ENDED()) - // Increment tx index within the system. - considerNewTx() - } - - // The bootloader doesn't have to pay anything - setPricePerPubdataByte(0) - - // Resetting tx.origin and gasPrice to 0, so we don't pay for - // publishing them on-chain. - setTxOrigin(0) - setGasPrice(0) - - // Transferring all the ETH received in the batch to the operator - directETHTransfer( - selfbalance(), - OPERATOR_ADDRESS - ) - - // Hook that notifies that the operator should provide final information for the batch - setHook(VM_HOOK_FINAL_L2_STATE_INFO()) - - // Each batch typically ends with a special block which contains no transactions. - // So we need to have this method to reflect it in the system contracts too. - // - // The reason is that as of now our node requires that each storage write (event, etc) belongs to a particular - // L2 block. In case a batch is sealed by timeout (i.e. the resources of the batch have not been exhaused, but we need - // to seal it to assure timely finality), we need to process sending funds to the operator *after* the last - // non-empty L2 block has been already sealed. We can not override old L2 blocks, so we need to create a new empty "fictive" block for it. - // - // The other reason why we need to set this block is so that in case of empty batch (i.e. the one which has no transactions), - // the virtual block number as well as miniblock number are incremented. - setL2Block(transactionIndex) - - publishBatchDataToL1() - /// @dev Ceil division of integers function ceilDiv(x, y) -> ret { switch or(eq(x, 0), eq(y, 0)) @@ -702,7 +558,7 @@ object "Bootloader" { // We set the L2 block info for this particular transaction setL2Block(transactionIndex) - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) // By default we assume that the transaction has failed. mstore(resultPtr, 0) @@ -724,11 +580,17 @@ object "Bootloader" { assertionError("Protocol upgrade tx not first") } - processL1Tx(txDataOffset, resultPtr, transactionIndex, userProvidedPubdataPrice) + // This is to be called in the event that the L1 Transaction is a protocol upgrade txn. + // Since this is upgrade transactions, we are okay that the gasUsed by the transaction will + // not cover this additional hash computation + let canonicalL1TxHash := getCanonicalL1TxHash(txDataOffset) + sendToL1Native(true, protocolUpgradeTxHashKey(), canonicalL1TxHash) + + processL1Tx(txDataOffset, resultPtr, transactionIndex, userProvidedPubdataPrice, false) } case 255 { // This is an L1->L2 transaction. - processL1Tx(txDataOffset, resultPtr, transactionIndex, userProvidedPubdataPrice) + processL1Tx(txDataOffset, resultPtr, transactionIndex, userProvidedPubdataPrice, true) } default { // The user has not agreed to this pubdata price @@ -770,45 +632,15 @@ object "Bootloader" { } } - /// @dev Checks whether the code hash of the system context contract is correct and updates it if needed. - /// @dev The bootloader implementation strictly relies of the ability of the system context contract to work with the - /// L2 blocks. However, the old system context did not support the correspodning interface at all. Usually we upgrade system context - /// via an upgrade transaction, but in this case the transaction won't be even processed, because of failure to create an L2 block. - function upgradeSystemContextIfNeeded() { - let expectedCodeHash := {{SYSTEM_CONTEXT_EXPECTED_CODE_HASH}} - - let actualCodeHash := extcodehash(SYSTEM_CONTEXT_ADDR()) - if iszero(eq(expectedCodeHash, actualCodeHash)) { - // Preparing the calldata to upgrade the SystemContext contract - {{UPGRADE_SYSTEM_CONTEXT_CALLDATA}} - - // We'll use a mimicCall to simulate the correct sender. - let success := mimicCallOnlyResult( - CONTRACT_DEPLOYER_ADDR(), - FORCE_DEPLOYER(), - 0, - 0, - 0, - 0, - 0, - 0 - ) - - if iszero(success) { - assertionError("system context upgrade fail") - } - } - } - /// @dev Calculates the canonical hash of the L1->L2 transaction that will be /// sent to L1 as a message to the L1 contract that a certain operation has been processed. function getCanonicalL1TxHash(txDataOffset) -> ret { // Putting the correct value at the `txDataOffset` just in case, since // the correctness of this value is not part of the system invariants. // Note, that the correct ABI encoding of the Transaction structure starts with 0x20 - mstore(txDataOffset, 0x20) + mstore(txDataOffset, 32) - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let dataLength := safeAdd(32, getDataLength(innerTxDataOffset), "qev") debugLog("HASH_OFFSET", innerTxDataOffset) @@ -823,7 +655,7 @@ object "Bootloader" { /// The operator will be paid at the end of the batch. function ensurePayment(txDataOffset, gasPrice) { // Skipping the first 0x20 byte in the encoding of the transaction. - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let from := getFrom(innerTxDataOffset) let requiredETH := safeMul(getGasLimit(innerTxDataOffset), gasPrice, "lal") @@ -942,7 +774,7 @@ object "Bootloader" { // 0x20 || context_len || context_bytes... let returnlen := returndatasize() // The minimal allowed returndatasize is 64: magicValue || offset - if lt(returnlen, 0x40) { + if lt(returnlen, 64) { revertWithReason( PAYMASTER_RETURNED_INVALID_CONTEXT(), 0 @@ -954,7 +786,7 @@ object "Bootloader" { // but it is so in fee estimation and we want to preserve as many operations as // in the original operation. { - returndatacopy(0, 0, 0x20) + returndatacopy(0, 0, 32) let magic := mload(0) let isMagicCorrect := eq(magic, {{SUCCESSFUL_PAYMASTER_VALIDATION_MAGIC_VALUE}}) @@ -1011,14 +843,14 @@ object "Bootloader" { ) } - if gt(add(returnedContextOffset, add(0x20, returnedContextLen)), returnlen) { + if gt(add(returnedContextOffset, add(32, returnedContextLen)), returnlen) { revertWithReason( PAYMASTER_RETURNED_INVALID_CONTEXT(), 0 ) } - returndatacopy(PAYMASTER_CONTEXT_BEGIN_BYTE(), returnedContextOffset, add(0x20, returnedContextLen)) + returndatacopy(PAYMASTER_CONTEXT_BEGIN_BYTE(), returnedContextOffset, add(32, returnedContextLen)) } /// @dev The function responsible for processing L1->L2 transactions. @@ -1026,12 +858,14 @@ object "Bootloader" { /// @param resultPtr The pointer at which the result of the execution of this transaction /// @param transactionIndex The index of the transaction /// @param gasPerPubdata The price per pubdata to be used + /// @param isPriorityOp Whether the transaction is a priority one /// should be stored. function processL1Tx( txDataOffset, resultPtr, transactionIndex, gasPerPubdata, + isPriorityOp ) { // For L1->L2 transactions we always use the pubdata price provided by the transaction. // This is needed to ensure DDoS protection. All the excess expenditure @@ -1039,7 +873,7 @@ object "Bootloader" { setPricePerPubdataByte(gasPerPubdata) // Skipping the first formal 0x20 byte - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let gasLimitForTx, reservedGas := getGasLimitForTx( innerTxDataOffset, @@ -1127,10 +961,17 @@ object "Bootloader" { mstore(resultPtr, success) debugLog("Send message to L1", success) - - // Sending the L2->L1 to notify the L1 contracts that the priority - // operation has been processed. - sendToL1(true, canonicalL1TxHash, success) + + // Sending the L2->L1 log so users will be able to prove transaction execution result on L1. + sendL2LogUsingL1Messenger(true, canonicalL1TxHash, success) + + if isPriorityOp { + // Update priority txs L1 data + mstore(0, mload(PRIORITY_TXS_L1_DATA_BEGIN_BYTE())) + mstore(32, canonicalL1TxHash) + mstore(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), keccak256(0, 64)) + mstore(add(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), 32), add(mload(add(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), 32)), 1)) + } } function getExecuteL1TxAndGetRefund(txDataOffset, gasForExecution) -> potentialRefund, success { @@ -1160,7 +1001,7 @@ object "Bootloader" { /// @return canonicalL1TxHash The hash of processed L1->L2 transaction /// @return gasUsedOnPreparation The number of L2 gas used in the preparation stage function l1TxPreparation(txDataOffset) -> canonicalL1TxHash, gasUsedOnPreparation { - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let gasBeforePreparation := gas() debugLog("gasBeforePreparation", gasBeforePreparation) @@ -1305,7 +1146,7 @@ object "Bootloader" { totalGasLimit := operatorTrustedGasLimit } - let txEncodingLen := safeAdd(0x20, getDataLength(innerTxDataOffset), "lsh") + let txEncodingLen := safeAdd(32, getDataLength(innerTxDataOffset), "lsh") let operatorOverheadForTransaction := getVerifiedOperatorOverheadForTx( transactionIndex, @@ -1452,7 +1293,7 @@ object "Bootloader" { setGasPrice(gasPrice) // Skipping the first 0x20 word of the ABI-encoding - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) debugLog("Starting validation", 0) accountValidateTx(txDataOffset) @@ -1473,7 +1314,7 @@ object "Bootloader" { txDataOffset ) -> success { // Skipping the first word of the ABI-encoding encoding - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let from := getFrom(innerTxDataOffset) debugLog("Executing L2 tx", 0) @@ -1569,7 +1410,7 @@ object "Bootloader" { finalRefund := 0 - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let paymaster := getPaymaster(innerTxDataOffset) let refundRecipient := 0 @@ -1590,7 +1431,9 @@ object "Bootloader" { paymaster, txDataOffset, success, - gasLeft + // Since the paymaster will be refunded with reservedGas, + // it should know about it + safeAdd(gasLeft, reservedGas, "jkl"), )) let gasSpentByPostOp := sub(gasBeforePostOp, gas()) @@ -1791,7 +1634,7 @@ object "Bootloader" { txDataOffset ) -> success { // Skipping the first word of the ABI encoding of the struct - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let from := getFrom(innerTxDataOffset) let gasPrice := getMaxFeePerGas(innerTxDataOffset) @@ -1863,7 +1706,7 @@ object "Bootloader" { txDataOffset, resultPtr ) { - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let to := getTo(innerTxDataOffset) let from := getFrom(innerTxDataOffset) @@ -1981,7 +1824,7 @@ object "Bootloader" { /// 2. Overhead for taking up the bootloader memory. The bootloader memory has a cap on its length, mainly enforced to keep the RAM requirements /// for the node smaller. That is, the user needs to pay a share proportional to the length of the ABI encoding of the transaction. /// 3. Overhead for taking up a slot for the transaction. Since each batch has the limited number of transactions in it, the user must pay - /// at least 1/MAX_TRANSACTIONS_IN_BLOCK part of the overhead. + /// at least 1/MAX_TRANSACTIONS_IN_BATCH part of the overhead. function getTransactionUpfrontOverhead( txGasLimit, gasPerPubdataByte, @@ -2062,6 +1905,11 @@ object "Bootloader" { } } + /// @dev Returns constant that is equal to `keccak256("")` + function EMPTY_STRING_KECCAK() -> ret { + ret := 0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 + } + /// @dev Returns whether x <= y function lte(x, y) -> ret { ret := or(lt(x,y), eq(x,y)) @@ -2092,6 +1940,7 @@ object "Bootloader" { ) } + // This method returns AccountAbstractVersion enum. // Currently only two versions are supported: 1 or 0, which basically // mean whether the contract is an account or not. if iszero(supportedVersion) { @@ -2171,7 +2020,7 @@ object "Bootloader" { mstore(add(txDataWithHashesOffset, 64), 96) let calldataPtr := prependSelector(txDataWithHashesOffset, selector) - let innerTxDataOffst := add(txDataOffset, 0x20) + let innerTxDataOffst := add(txDataOffset, 32) let len := getDataLength(innerTxDataOffst) @@ -2195,7 +2044,7 @@ object "Bootloader" { /// @dev Calculates and saves the explorer hash and the suggested signed hash for the transaction. function saveTxHashes(txDataOffset) { let calldataPtr := prependSelector(txDataOffset, {{GET_TX_HASHES_SELECTOR}}) - let innerTxDataOffst := add(txDataOffset, 0x20) + let innerTxDataOffst := add(txDataOffset, 32) let len := getDataLength(innerTxDataOffst) @@ -2260,7 +2109,7 @@ object "Bootloader" { // The length of selector + the first 7 fields (with context len) + context itself. let preTxLen := add(228, paddedContextLen) - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let calldataPtr := sub(innerTxDataOffset, preTxLen) { @@ -2344,7 +2193,7 @@ object "Bootloader" { /// this method also enforces that the nonce has been marked as used. function accountValidateTx(txDataOffset) { // Skipping the first 0x20 word of the ABI-encoding of the struct - let innerTxDataOffst := add(txDataOffset, 0x20) + let innerTxDataOffst := add(txDataOffset, 32) let from := getFrom(innerTxDataOffst) ensureAccount(from) @@ -2390,7 +2239,7 @@ object "Bootloader" { // `SHOULD_ENSURE_CORRECT_RETURNED_MAGIC` is false. It is never false in production // but it is so in fee estimation and we want to preserve as many operations as // in the original operation. - returndatacopy(0, 0, 0x20) + returndatacopy(0, 0, 32) let returnedValue := mload(0) let isMagicCorrect := eq(returnedValue, {{SUCCESSFUL_ACCOUNT_VALIDATION_MAGIC_VALUE}}) @@ -2521,7 +2370,7 @@ object "Bootloader" { isConstructorCall, isSystemCall ) -> ret { - let dataStart := add(dataPtr, 0x20) + let dataStart := add(dataPtr, 32) let dataLength := mload(dataPtr) // Skip dataOffset and memoryPage, because they are always zeros @@ -2580,38 +2429,143 @@ object "Bootloader" { - /// @dev Sends an L2->L1 log. + /// @dev Sends a L2->L1 log using L1Messengers' `sendL2ToL1Log`. /// @param isService The isService flag of the call. /// @param key The `key` parameter of the log. /// @param value The `value` parameter of the log. - function sendToL1(isService, key, value) { - verbatim_3i_0o("to_l1", isService, key, value) - } - - /// @dev Increment the number of txs in the batch - function considerNewTx() { - verbatim_0i_0o("increment_tx_counter") - } - - /// @dev Set the new price per pubdata byte - function setPricePerPubdataByte(newPrice) { - verbatim_1i_0o("set_pubdata_price", newPrice) - } + function sendL2LogUsingL1Messenger(isService, key, value) { + mstore(0, {{RIGHT_PADDED_SEND_L2_TO_L1_LOG_SELECTOR}}) + mstore(4, isService) + mstore(36, key) + mstore(68, value) - /// @dev Set the new value for the tx origin context value - function setTxOrigin(newTxOrigin) { - let success := setContextVal({{RIGHT_PADDED_SET_TX_ORIGIN}}, newTxOrigin) + let success := call( + gas(), + L1_MESSENGER_ADDR(), + 0, + 0, + 100, + 0, + 0 + ) if iszero(success) { - debugLog("Failed to set txOrigin", newTxOrigin) - nearCallPanic() + debugLog("Failed to send L1Messenger L2Log", key) + debugLog("Failed to send L1Messenger L2Log", value) + + revertWithReason(L1_MESSENGER_LOG_SENDING_FAILED_ERR_CODE(), 1) } } - /// @dev Set the new value for the gas price value - function setGasPrice(newGasPrice) { - let success := setContextVal({{RIGHT_PADDED_SET_GAS_PRICE}}, newGasPrice) - + /// @dev Sends a native (VM) L2->L1 log. + /// @param isService The isService flag of the call. + /// @param key The `key` parameter of the log. + /// @param value The `value` parameter of the log. + function sendToL1Native(isService, key, value) { + verbatim_3i_0o("to_l1", isService, key, value) + } + + /// @notice Performs L1 Messenger pubdata "publishing" call. + /// @dev Expected to be used at the end of the batch. + function l1MessengerPublishingCall() { + let ptr := OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_BEGIN_BYTE() + debugLog("Publishing batch data to L1", 0) + // First slot (only last 4 bytes) -- selector + mstore(ptr, {{PUBLISH_PUBDATA_SELECTOR}}) + // Second slot -- offset + mstore(add(ptr, 32), 32) + setHook(VM_HOOK_PUBDATA_REQUESTED()) + // Third slot -- length of pubdata + let len := mload(add(ptr, 64)) + // 4 bytes for selector, 32 bytes for array offset and 32 bytes for array length + let fullLen := add(len, 68) + + // ptr + 28 because the function selector only takes up the last 4 bytes in the first slot. + let success := call( + gas(), + L1_MESSENGER_ADDR(), + 0, + add(ptr, 28), + fullLen, + 0, + 0 + ) + + if iszero(success) { + debugLog("Failed to publish L2Logs data", 0) + + revertWithReason(L1_MESSENGER_PUBLISHING_FAILED_ERR_CODE(), 1) + } + } + + function publishTimestampDataToL1() { + debugLog("Publishing timestamp data to L1", 0) + + mstore(0, {{RIGHT_PADDED_PUBLISH_TIMESTAMP_DATA_TO_L1_SELECTOR}}) + let success := call( + gas(), + SYSTEM_CONTEXT_ADDR(), + 0, + 0, + 4, + 0, + 0 + ) + + if iszero(success) { + debugLog("Failed publish timestamp data to L1", 0) + revertWithReason(FAILED_TO_PUBLISH_TIMESTAMP_DATA_TO_L1(), 1) + } + } + + /// @notice Performs a call of a System Context + /// method that have no input parameters + function callSystemContext(paddedSelector) { + mstore(0, paddedSelector) + + let success := call( + gas(), + SYSTEM_CONTEXT_ADDR(), + 0, + 0, + 4, + 0, + 0 + ) + + if iszero(success) { + debugLog("Failed to call System Context", 0) + + revertWithReason(FAILED_TO_CALL_SYSTEM_CONTEXT_ERR_CODE(), 1) + } + } + + /// @dev Increment the number of txs in the batch + function considerNewTx() { + verbatim_0i_0o("increment_tx_counter") + + callSystemContext({{RIGHT_PADDED_INCREMENT_TX_NUMBER_IN_BLOCK_SELECTOR}}) + } + + /// @dev Set the new price per pubdata byte + function setPricePerPubdataByte(newPrice) { + verbatim_1i_0o("set_pubdata_price", newPrice) + } + + /// @dev Set the new value for the tx origin context value + function setTxOrigin(newTxOrigin) { + let success := setContextVal({{RIGHT_PADDED_SET_TX_ORIGIN}}, newTxOrigin) + + if iszero(success) { + debugLog("Failed to set txOrigin", newTxOrigin) + nearCallPanic() + } + } + + /// @dev Set the new value for the gas price value + function setGasPrice(newGasPrice) { + let success := setContextVal({{RIGHT_PADDED_SET_GAS_PRICE}}, newGasPrice) + if iszero(success) { debugLog("Failed to set gas price", newGasPrice) nearCallPanic() @@ -2729,26 +2683,6 @@ object "Bootloader" { nearCallPanic() } } - } - - function publishBatchDataToL1() { - debugLog("Publishing batch data to L1", 0) - - mstore(0, {{RIGHT_PADDED_PUBLISH_BATCH_DATA_TO_L1_SELECTOR}}) - let success := call( - gas(), - SYSTEM_CONTEXT_ADDR(), - 0, - 0, - 4, - 0, - 0 - ) - - if iszero(success) { - debugLog("Failed publish batch data to L1", 0) - revertWithReason(FAILED_TO_PUBLISH_BATCH_DATA_TO_L1(), 1) - } } @@ -2897,13 +2831,20 @@ object "Bootloader" { if gt(reservedDynamicLength, 0) { assertionError("non-empty reservedDynamic") } - let txType := getTxType(innerTxDataOffset) switch txType case 0 { let maxFeePerGas := getMaxFeePerGas(innerTxDataOffset) let maxPriorityFeePerGas := getMaxPriorityFeePerGas(innerTxDataOffset) assertEq(maxFeePerGas, maxPriorityFeePerGas, "EIP1559 params wrong") + + + + let from := getFrom(innerTxDataOffset) + let iseoa := isEOA(from) + assertEq(iseoa, true, "Only EIP-712 can use non-EOA") + + // Here, for type 0 transactions the reserved0 field is used as a marker // whether the transaction should include chainId in its encoding. @@ -2924,7 +2865,15 @@ object "Bootloader" { let maxFeePerGas := getMaxFeePerGas(innerTxDataOffset) let maxPriorityFeePerGas := getMaxPriorityFeePerGas(innerTxDataOffset) assertEq(maxFeePerGas, maxPriorityFeePerGas, "EIP1559 params wrong") + + + let from := getFrom(innerTxDataOffset) + let iseoa := isEOA(from) + assertEq(iseoa, true, "Only EIP-712 can use non-EOA") + + + assertEq(lte(getGasPerPubdataByteLimit(innerTxDataOffset), MAX_L2_GAS_PER_PUBDATA()), 1, "Gas per pubdata is wrong") assertEq(getPaymaster(innerTxDataOffset), 0, "paymaster non zero") @@ -2943,7 +2892,15 @@ object "Bootloader" { assertEq(lte(getGasPerPubdataByteLimit(innerTxDataOffset), MAX_L2_GAS_PER_PUBDATA()), 1, "Gas per pubdata is wrong") assertEq(getPaymaster(innerTxDataOffset), 0, "paymaster non zero") - + + + let from := getFrom(innerTxDataOffset) + let iseoa := isEOA(from) + assertEq(iseoa, true, "Only EIP-712 can use non-EOA") + + + + assertEq(gt(getFrom(innerTxDataOffset), MAX_SYSTEM_CONTRACT_ADDR()), 1, "from in kernel space") @@ -2957,6 +2914,12 @@ object "Bootloader" { case 113 { let paymaster := getPaymaster(innerTxDataOffset) assertEq(or(gt(paymaster, MAX_SYSTEM_CONTRACT_ADDR()), iszero(paymaster)), 1, "paymaster in kernel space") + + if iszero(paymaster) { + // Double checking that the paymasterInput is 0 if the paymaster is 0 + assertEq(getPaymasterInputBytesLength(innerTxDataOffset), 0, "paymasterInput non zero") + } + assertEq(gt(getFrom(innerTxDataOffset), MAX_SYSTEM_CONTRACT_ADDR()), 1, "from in kernel space") @@ -2969,6 +2932,8 @@ object "Bootloader" { // Upgrade transaction, no need to validate as it is validated on L1. } case 255 { + // Double-check that the operator doesn't try to do an upgrade transaction via L1 -> L2 transaction. + assertEq(gt(getFrom(innerTxDataOffset), MAX_SYSTEM_CONTRACT_ADDR()), 1, "from in kernel space") // L1 transaction, no need to validate as it is validated on L1. } default { @@ -3091,11 +3056,11 @@ object "Bootloader" { /// This method checks that the transaction's structure is correct /// and tightly packed function validateAbiEncoding(txDataOffset) -> ret { - if iszero(eq(mload(txDataOffset), 0x20)) { + if iszero(eq(mload(txDataOffset), 32)) { assertionError("Encoding offset") } - let innerTxDataOffset := add(txDataOffset, 0x20) + let innerTxDataOffset := add(txDataOffset, 32) let fromValue := getFrom(innerTxDataOffset) if iszero(validateAddress(fromValue)) { @@ -3352,7 +3317,7 @@ object "Bootloader" { /// Since the slot after the transaction is not touched, /// this slot can be used in the in-circuit VM out of box. function askOperatorForRefund(gasLeft) { - storeVmHookParam(0, nonOptimized(gasLeft)) + storeVmHookParam(0, nonOptimized(gasLeft)) setHook(VM_HOOK_ASK_OPERATOR_FOR_REFUND()) } @@ -3464,10 +3429,22 @@ object "Bootloader" { ret := 25 } - function FAILED_TO_PUBLISH_BATCH_DATA_TO_L1() -> ret { + function L1_MESSENGER_PUBLISHING_FAILED_ERR_CODE() -> ret { ret := 26 } + function L1_MESSENGER_LOG_SENDING_FAILED_ERR_CODE() -> ret { + ret := 27 + } + + function FAILED_TO_CALL_SYSTEM_CONTEXT_ERR_CODE() -> ret { + ret := 28 + } + + function FAILED_TO_PUBLISH_TIMESTAMP_DATA_TO_L1() -> ret { + ret := 29 + } + /// @dev Accepts a 1-word literal and returns its length in bytes /// @param str A string literal function getStrLen(str) -> len { @@ -3601,6 +3578,11 @@ object "Bootloader" { ret := 11 } + /// @norice The id of the VM hook that use used to notify the operator that it needs to insert the pubdata. + function VM_HOOK_PUBDATA_REQUESTED() -> ret { + ret := 12 + } + // Need to prevent the compiler from optimizing out similar operations, // which may have different meaning for the offline debugging function unoptimized(val) -> ret { @@ -3623,6 +3605,221 @@ object "Bootloader" { let offset := add(VM_HOOK_PARAMS_OFFSET(), mul(32, paramId)) mstore(offset, unoptimized(value)) } + + /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum + function chainedPriorityTxnHashLogKey() -> ret { + ret := 5 + } + + /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum + function numberOfLayer1TxsLogKey() -> ret { + ret := 6 + } + + /// @dev Log key used by Executor.sol for processing. See Constants.sol::SystemLogKey enum + function protocolUpgradeTxHashKey() -> ret { + ret := 7 + } + + //////////////////////////////////////////////////////////////////////////// + // Main Transaction Processing + //////////////////////////////////////////////////////////////////////////// + + /// @notice the address that will be the beneficiary of all the fees + let OPERATOR_ADDRESS := mload(0) + + let GAS_PRICE_PER_PUBDATA := 0 + + // Initializing block params + { + /// @notice The hash of the previous batch + let PREV_BATCH_HASH := mload(32) + /// @notice The timestamp of the batch being processed + let NEW_BATCH_TIMESTAMP := mload(64) + /// @notice The number of the new batch being processed. + /// While this number is deterministic for each batch, we + /// still provide it here to ensure consistency between the state + /// of the VM and the state of the operator. + let NEW_BATCH_NUMBER := mload(96) + + /// @notice The gas price on L1 for ETH. In the future, a trustless value will be enforced. + /// For now, this value is trusted to be fairly provided by the operator. + let L1_GAS_PRICE := mload(128) + + /// @notice The minimal gas price that the operator agrees upon. + /// In the future, it will have an EIP1559-like lower bound. + let FAIR_L2_GAS_PRICE := mload(160) + + /// @notice The expected base fee by the operator. + /// Just like the batch number, while calculated on the bootloader side, + /// the operator still provides it to make sure that its data is in sync. + let EXPECTED_BASE_FEE := mload(192) + + validateOperatorProvidedPrices(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) + + let baseFee := 0 + + + + // Only for the proved batch we enforce that the baseFee proposed + // by the operator is equal to the expected one. For the playground batch, we allow + // the operator to provide any baseFee the operator wants. + baseFee, GAS_PRICE_PER_PUBDATA := getBaseFee(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) + if iszero(eq(baseFee, EXPECTED_BASE_FEE)) { + debugLog("baseFee", baseFee) + debugLog("EXPECTED_BASE_FEE", EXPECTED_BASE_FEE) + assertionError("baseFee inconsistent") + } + + setNewBatch(PREV_BATCH_HASH, NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) + + + + + + baseFee, GAS_PRICE_PER_PUBDATA := getBaseFee(L1_GAS_PRICE, FAIR_L2_GAS_PRICE) + + let SHOULD_SET_NEW_BATCH := mload(224) + + switch SHOULD_SET_NEW_BATCH + case 0 { + unsafeOverrideBatch(NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) + } + default { + setNewBatch(PREV_BATCH_HASH, NEW_BATCH_TIMESTAMP, NEW_BATCH_NUMBER, EXPECTED_BASE_FEE) + } + + + } + + // Now, we iterate over all transactions, processing each of them + // one by one. + // Here, the `resultPtr` is the pointer to the memory slot, where we will write + // `true` or `false` based on whether the tx execution was successful, + + // The position at which the tx offset of the transaction should be placed + let currentExpectedTxOffset := add(TXS_IN_BATCH_LAST_PTR(), mul(MAX_POSTOP_SLOTS(), 32)) + + let txPtr := TX_DESCRIPTION_BEGIN_BYTE() + + // At the COMPRESSED_BYTECODES_BEGIN_BYTE() the pointer to the newest bytecode to be published + // is stored. + mstore(COMPRESSED_BYTECODES_BEGIN_BYTE(), add(COMPRESSED_BYTECODES_BEGIN_BYTE(), 32)) + + // At start storing keccak256("") as `chainedPriorityTxsHash` and 0 as `numberOfLayer1Txs` + mstore(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), EMPTY_STRING_KECCAK()) + mstore(add(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), 32), 0) + + // Iterating through transaction descriptions + let transactionIndex := 0 + for { + let resultPtr := RESULT_START_PTR() + } lt(txPtr, TXS_IN_BATCH_LAST_PTR()) { + txPtr := add(txPtr, TX_DESCRIPTION_SIZE()) + resultPtr := add(resultPtr, 32) + transactionIndex := add(transactionIndex, 1) + } { + let execute := mload(txPtr) + + debugLog("txPtr", txPtr) + debugLog("execute", execute) + + if iszero(execute) { + // We expect that all transactions that are executed + // are continuous in the array. + break + } + + let txDataOffset := mload(add(txPtr, 32)) + + // We strongly enforce the positions of transactions + if iszero(eq(currentExpectedTxOffset, txDataOffset)) { + debugLog("currentExpectedTxOffset", currentExpectedTxOffset) + debugLog("txDataOffset", txDataOffset) + + assertionError("Tx data offset is incorrect") + } + + currentExpectedTxOffset := validateAbiEncoding(txDataOffset) + + // Checking whether the last slot of the transaction's description + // does not go out of bounds. + if gt(sub(currentExpectedTxOffset, 32), LAST_FREE_SLOT()) { + debugLog("currentExpectedTxOffset", currentExpectedTxOffset) + debugLog("LAST_FREE_SLOT", LAST_FREE_SLOT()) + + assertionError("currentExpectedTxOffset too high") + } + + validateTypedTxStructure(add(txDataOffset, 32)) + + + { + debugLog("ethCall", 0) + processTx(txDataOffset, resultPtr, transactionIndex, 0, GAS_PRICE_PER_PUBDATA) + } + + + { + let txMeta := mload(txPtr) + let processFlags := getWordByte(txMeta, 31) + debugLog("flags", processFlags) + + + // `processFlags` argument denotes which parts of execution should be done: + // Possible values: + // 0x00: validate & execute (normal mode) + // 0x02: perform ethCall (i.e. use mimicCall to simulate the call) + + let isETHCall := eq(processFlags, 0x02) + debugLog("ethCall", isETHCall) + processTx(txDataOffset, resultPtr, transactionIndex, isETHCall, GAS_PRICE_PER_PUBDATA) + } + + // Signal to the vm that the transaction execution is complete + setHook(VM_HOOK_TX_HAS_ENDED()) + // Increment tx index within the system. + considerNewTx() + } + + // The bootloader doesn't have to pay anything + setPricePerPubdataByte(0) + + // Resetting tx.origin and gasPrice to 0, so we don't pay for + // publishing them on-chain. + setTxOrigin(0) + setGasPrice(0) + + // Transfering all the ETH received in the block to the operator + directETHTransfer( + selfbalance(), + OPERATOR_ADDRESS + ) + + // Hook that notifies that the operator should provide final information for the batch + setHook(VM_HOOK_FINAL_L2_STATE_INFO()) + + // Each batch typically ends with a special block which contains no transactions. + // So we need to have this method to reflect it in the system contracts too. + // + // The reason is that as of now our node requires that each storage write (event, etc) belongs to a particular + // L2 block. In case a batch is sealed by timeout (i.e. the resources of the batch have not been exhaused, but we need + // to seal it to assure timely finality), we need to process sending funds to the operator *after* the last + // non-empty L2 block has been already sealed. We can not override old L2 blocks, so we need to create a new empty "fictive" block for it. + // + // The other reason why we need to set this block is so that in case of empty batch (i.e. the one which has no transactions), + // the virtual block number as well as miniblock number are incremented. + setL2Block(transactionIndex) + + callSystemContext({{RIGHT_PADDED_RESET_TX_NUMBER_IN_BLOCK_SELECTOR}}) + + publishTimestampDataToL1() + + // Sending system logs (to be processed on L1) + sendToL1Native(true, chainedPriorityTxnHashLogKey(), mload(PRIORITY_TXS_L1_DATA_BEGIN_BYTE())) + sendToL1Native(true, numberOfLayer1TxsLogKey(), mload(add(PRIORITY_TXS_L1_DATA_BEGIN_BYTE(), 32))) + + l1MessengerPublishingCall() } } } diff --git a/contracts/AccountCodeStorage.sol b/contracts/AccountCodeStorage.sol index 1dbdcabe..21a2311b 100644 --- a/contracts/AccountCodeStorage.sol +++ b/contracts/AccountCodeStorage.sol @@ -8,6 +8,7 @@ import {DEPLOYER_SYSTEM_CONTRACT, NONCE_HOLDER_SYSTEM_CONTRACT, CURRENT_MAX_PREC /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The storage of this contract serves as a mapping for the code hashes of the 32-byte account addresses. * @dev Code hash is not strictly a hash, it's a structure where the first byte denotes the version of the hash, * the second byte denotes whether the contract is constructed, and the next two bytes denote the length in 32-byte words. @@ -44,7 +45,7 @@ contract AccountCodeStorage is IAccountCodeStorage { /// but checks whether the bytecode hash corresponds to the constructed smart contract. function storeAccountConstructedCodeHash(address _address, bytes32 _hash) external override onlyDeployer { // Check that code hash corresponds to the deploying smart contract - require(Utils.isContractConstructed(_hash), "Code hash is not for a contract on constructor"); + require(Utils.isContractConstructed(_hash), "Code hash is not for a constructed contract"); _storeCodeHash(_address, _hash); } diff --git a/contracts/BootloaderUtilities.sol b/contracts/BootloaderUtilities.sol index ad5f13da..5a73eb2f 100644 --- a/contracts/BootloaderUtilities.sol +++ b/contracts/BootloaderUtilities.sol @@ -9,6 +9,7 @@ import "./libraries/EfficientCall.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice A contract that provides some utility methods for the bootloader * that is very hard to write in Yul. */ diff --git a/contracts/BytecodeCompressor.sol b/contracts/BytecodeCompressor.sol deleted file mode 100644 index b406ecb1..00000000 --- a/contracts/BytecodeCompressor.sol +++ /dev/null @@ -1,92 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -import "./interfaces/IBytecodeCompressor.sol"; -import "./Constants.sol"; -import "./libraries/Utils.sol"; -import "./libraries/UnsafeBytesCalldata.sol"; - -/** - * @author Matter Labs - * @notice Simple implementation of the compression algorithm specialized for zkEVM bytecode. - * @dev Every deployed bytecode in zkEVM should be publicly restorable from the L1 data availability. - * For this reason, the user may request the sequencer to publish the original bytecode and mark it as known. - * Or the user may compress the bytecode and publish it instead (fewer data onchain!). - */ -contract BytecodeCompressor is IBytecodeCompressor { - using UnsafeBytesCalldata for bytes; - - modifier onlyBootloader() { - require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); - _; - } - - /// @notice Verify the compressed bytecode and publish it on the L1. - /// @param _bytecode The original bytecode to be verified against. - /// @param _rawCompressedData The compressed bytecode in a format of: - /// - 2 bytes: the length of the dictionary - /// - N bytes: the dictionary - /// - M bytes: the encoded data - /// @dev The dictionary is a sequence of 8-byte chunks, each of them has the associated index. - /// @dev The encoded data is a sequence of 2-byte chunks, each of them is an index of the dictionary. - /// @dev The compression algorithm works as follows: - /// 1. The original bytecode is split into 8-byte chunks. - /// Since the bytecode size is always a multiple of 32, this is always possible. - /// 2. For each 8-byte chunk in the original bytecode: - /// * If the chunk is not already in the dictionary, it is added to the dictionary array. - /// * If the dictionary becomes overcrowded (2^16 + 1 elements), the compression process will fail. - /// * The 2-byte index of the chunk in the dictionary is added to the encoded data. - /// @dev Currently, the method may be called only from the bootloader because the server is not ready to publish bytecodes - /// in internal transactions. However, in the future, we will allow everyone to publish compressed bytecodes. - function publishCompressedBytecode( - bytes calldata _bytecode, - bytes calldata _rawCompressedData - ) external payable onlyBootloader returns (bytes32 bytecodeHash) { - unchecked { - (bytes calldata dictionary, bytes calldata encodedData) = _decodeRawBytecode(_rawCompressedData); - - require(dictionary.length % 8 == 0, "Dictionary length should be a multiple of 8"); - require(dictionary.length <= 2 ** 16 * 8, "Dictionary is too big"); - require( - encodedData.length * 4 == _bytecode.length, - "Encoded data length should be 4 times shorter than the original bytecode" - ); - - for (uint256 encodedDataPointer = 0; encodedDataPointer < encodedData.length; encodedDataPointer += 2) { - uint256 indexOfEncodedChunk = uint256(encodedData.readUint16(encodedDataPointer)) * 8; - require(indexOfEncodedChunk < dictionary.length, "Encoded chunk index is out of bounds"); - - uint64 encodedChunk = dictionary.readUint64(indexOfEncodedChunk); - uint64 realChunk = _bytecode.readUint64(encodedDataPointer * 4); - - require(encodedChunk == realChunk, "Encoded chunk does not match the original bytecode"); - } - } - - bytecodeHash = Utils.hashL2Bytecode(_bytecode); - - bytes32 rawCompressedDataHash = L1_MESSENGER_CONTRACT.sendToL1(_rawCompressedData); - KNOWN_CODE_STORAGE_CONTRACT.markBytecodeAsPublished( - bytecodeHash, - rawCompressedDataHash, - _rawCompressedData.length - ); - } - - /// @notice Decode the raw compressed data into the dictionary and the encoded data. - /// @param _rawCompressedData The compressed bytecode in a format of: - /// - 2 bytes: the bytes length of the dictionary - /// - N bytes: the dictionary - /// - M bytes: the encoded data - function _decodeRawBytecode( - bytes calldata _rawCompressedData - ) internal pure returns (bytes calldata dictionary, bytes calldata encodedData) { - unchecked { - // The dictionary length can't be more than 2^16, so it fits into 2 bytes. - uint256 dictionaryLen = uint256(_rawCompressedData.readUint16(0)); - dictionary = _rawCompressedData[2:2 + dictionaryLen * 8]; - encodedData = _rawCompressedData[2 + dictionaryLen * 8:]; - } - } -} diff --git a/contracts/ComplexUpgrader.sol b/contracts/ComplexUpgrader.sol index 0968b5fd..d45ecd57 100644 --- a/contracts/ComplexUpgrader.sol +++ b/contracts/ComplexUpgrader.sol @@ -2,25 +2,23 @@ pragma solidity ^0.8.0; -import "./interfaces/IComplexUpgrader.sol"; +import {IComplexUpgrader} from "./interfaces/IComplexUpgrader.sol"; import {FORCE_DEPLOYER} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Upgrader which should be used to perform complex multistep upgrades on L2. In case some custom logic for an upgrade is needed * this logic should be deployed into the user space and then this contract will delegatecall to the deployed contract. */ contract ComplexUpgrader is IComplexUpgrader { /// @notice Executes an upgrade process by delegating calls to another contract. - /// @dev This function allows only the `FORCE_DEPLOYER` to initiate the upgrade. + /// @dev This function allows only the `FORCE_DEPLOYER` to initiate the upgrade. /// If the delegate call fails, the function will revert the transaction, returning the error message /// provided by the delegated contract. /// @param _delegateTo the address of the contract to which the calls will be delegated /// @param _calldata the calldata to be delegate called in the `_delegateTo` contract - function upgrade( - address _delegateTo, - bytes calldata _calldata - ) external payable { + function upgrade(address _delegateTo, bytes calldata _calldata) external payable { require(msg.sender == FORCE_DEPLOYER, "Can only be called by FORCE_DEPLOYER"); require(_delegateTo.code.length > 0, "Delegatee is an EOA"); diff --git a/contracts/Compressor.sol b/contracts/Compressor.sol new file mode 100644 index 00000000..4b11fd39 --- /dev/null +++ b/contracts/Compressor.sol @@ -0,0 +1,254 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +import {ICompressor, OPERATION_BITMASK, LENGTH_BITS_OFFSET, MAX_ENUMERATION_INDEX_SIZE} from "./interfaces/ICompressor.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {Utils} from "./libraries/Utils.sol"; +import {UnsafeBytesCalldata} from "./libraries/UnsafeBytesCalldata.sol"; +import {EfficientCall} from "./libraries/EfficientCall.sol"; +import { + L1_MESSENGER_CONTRACT, + INITIAL_WRITE_STARTING_POSITION, + COMPRESSED_INITIAL_WRITE_SIZE, + STATE_DIFF_ENTRY_SIZE, + STATE_DIFF_ENUM_INDEX_OFFSET, + STATE_DIFF_FINAL_VALUE_OFFSET, + STATE_DIFF_DERIVED_KEY_OFFSET, + DERIVED_KEY_LENGTH, + VALUE_LENGTH, + ENUM_INDEX_LENGTH, + KNOWN_CODE_STORAGE_CONTRACT +} from "./Constants.sol"; + +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice Contract with code pertaining to compression for zkEVM; at the moment this is used for bytecode compression + * and state diff compression validation. + * @dev Every deployed bytecode/published state diffs in zkEVM should be publicly restorable from the L1 data availability. + * For this reason, the user may request the sequencer to publish the original bytecode and mark it as known. + * Or the user may compress the bytecode and publish it instead (fewer data onchain!). At the end of every L1 Batch + * we publish pubdata, part of which contains the state diffs that occurred within the batch. + */ +contract Compressor is ICompressor, ISystemContract { + using UnsafeBytesCalldata for bytes; + + /// @notice Verify the compressed bytecode and publish it on the L1. + /// @param _bytecode The original bytecode to be verified against. + /// @param _rawCompressedData The compressed bytecode in a format of: + /// - 2 bytes: the length of the dictionary + /// - N bytes: the dictionary + /// - M bytes: the encoded data + /// @dev The dictionary is a sequence of 8-byte chunks, each of them has the associated index. + /// @dev The encoded data is a sequence of 2-byte chunks, each of them is an index of the dictionary. + /// @dev The compression algorithm works as follows: + /// 1. The original bytecode is split into 8-byte chunks. + /// Since the bytecode size is always a multiple of 32, this is always possible. + /// 2. For each 8-byte chunk in the original bytecode: + /// * If the chunk is not already in the dictionary, it is added to the dictionary array. + /// * If the dictionary becomes overcrowded (2^16 + 1 elements), the compression process will fail. + /// * The 2-byte index of the chunk in the dictionary is added to the encoded data. + /// @dev Currently, the method may be called only from the bootloader because the server is not ready to publish bytecodes + /// in internal transactions. However, in the future, we will allow everyone to publish compressed bytecodes. + function publishCompressedBytecode( + bytes calldata _bytecode, + bytes calldata _rawCompressedData + ) external payable onlyCallFromBootloader returns (bytes32 bytecodeHash) { + unchecked { + (bytes calldata dictionary, bytes calldata encodedData) = _decodeRawBytecode(_rawCompressedData); + + require(dictionary.length % 8 == 0, "Dictionary length should be a multiple of 8"); + require(dictionary.length <= 2 ** 16 * 8, "Dictionary is too big"); + require( + encodedData.length * 4 == _bytecode.length, + "Encoded data length should be 4 times shorter than the original bytecode" + ); + + for (uint256 encodedDataPointer = 0; encodedDataPointer < encodedData.length; encodedDataPointer += 2) { + uint256 indexOfEncodedChunk = uint256(encodedData.readUint16(encodedDataPointer)) * 8; + require(indexOfEncodedChunk < dictionary.length, "Encoded chunk index is out of bounds"); + + uint64 encodedChunk = dictionary.readUint64(indexOfEncodedChunk); + uint64 realChunk = _bytecode.readUint64(encodedDataPointer * 4); + + require(encodedChunk == realChunk, "Encoded chunk does not match the original bytecode"); + } + } + + bytecodeHash = Utils.hashL2Bytecode(_bytecode); + L1_MESSENGER_CONTRACT.sendToL1(_rawCompressedData); + KNOWN_CODE_STORAGE_CONTRACT.markBytecodeAsPublished(bytecodeHash); + } + + /// @notice Verifies that the compression of state diffs has been done correctly for the {_stateDiffs} param. + /// @param _numberOfStateDiffs The number of state diffs being checked. + /// @param _enumerationIndexSize Number of bytes used to represent an enumeration index for repeated writes. + /// @param _stateDiffs Encoded full state diff structs. See the first dev comment below for encoding. + /// @param _compressedStateDiffs The compressed state diffs + /// @dev We don't verify that the size of {_stateDiffs} is equivalent to {_numberOfStateDiffs} * STATE_DIFF_ENTRY_SIZE since that check is + /// done within the L1Messenger calling contract. + /// @return stateDiffHash Hash of the encoded (uncompressed) state diffs to be committed to via system log. + /// @dev This check assumes that the ordering of state diffs are sorted by (address, key) for the encoded state diffs and + /// then the compressed are sorted the same but with all the initial writes coming before the repeated writes. + /// @dev state diff: [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value] + /// @dev The compression format: + /// - 2 bytes: number of initial writes + /// - N bytes initial writes + /// - 32 bytes derived key + /// - 1 byte metadata: + /// - first 5 bits: length in bytes of compressed value + /// - last 3 bits: operation + /// - 0 -> Nothing (32 bytes) + /// - 1 -> Add + /// - 2 -> Subtract + /// - 3 -> Transform (< 32 bytes) + /// - Len Bytes: Compressed Value + /// - M bytes repeated writes + /// - {_enumerationIndexSize} bytes for enumeration index + /// - 1 byte metadata: + /// - first 5 bits: length in bytes of compressed value + /// - last 3 bits: operation + /// - 0 -> Nothing (32 bytes) + /// - 1 -> Add + /// - 2 -> Subtract + /// - 3 -> Transform (< 32 bytes) + /// - Len Bytes: Compressed Value + function verifyCompressedStateDiffs( + uint256 _numberOfStateDiffs, + uint256 _enumerationIndexSize, + bytes calldata _stateDiffs, + bytes calldata _compressedStateDiffs + ) external payable onlyCallFrom(address(L1_MESSENGER_CONTRACT)) returns (bytes32 stateDiffHash) { + // We do not enforce the operator to use the optimal, i.e. the minimally possible _enumerationIndexSize. + // We do enforce however, that the _enumerationIndexSize is not larger than 8 bytes long, which is the + // maximal ever possible size for enumeration index. + require(_enumerationIndexSize <= MAX_ENUMERATION_INDEX_SIZE, "enumeration index size is too large"); + + uint256 numberOfInitialWrites = uint256(_compressedStateDiffs.readUint16(0)); + + uint256 stateDiffPtr = 2; + uint256 numInitialWritesProcessed = 0; + + // Process initial writes + for (uint256 i = 0; i < _numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; i += STATE_DIFF_ENTRY_SIZE) { + bytes calldata stateDiff = _stateDiffs[i:i + STATE_DIFF_ENTRY_SIZE]; + uint64 enumIndex = stateDiff.readUint64(84); + if (enumIndex != 0) { + // It is a repeated write, so we skip it. + continue; + } + + numInitialWritesProcessed++; + + bytes32 derivedKey = stateDiff.readBytes32(52); + uint256 initValue = stateDiff.readUint256(92); + uint256 finalValue = stateDiff.readUint256(124); + require(derivedKey == _compressedStateDiffs.readBytes32(stateDiffPtr), "iw: initial key mismatch"); + stateDiffPtr += 32; + + uint8 metadata = uint8(bytes1(_compressedStateDiffs[stateDiffPtr])); + stateDiffPtr++; + uint8 operation = metadata & OPERATION_BITMASK; + uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET; + _verifyValueCompression( + initValue, + finalValue, + operation, + _compressedStateDiffs[stateDiffPtr:stateDiffPtr + len] + ); + stateDiffPtr += len; + } + + require(numInitialWritesProcessed == numberOfInitialWrites, "Incorrect number of initial storage diffs"); + + // Process repeated writes + for (uint256 i = 0; i < _numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; i += STATE_DIFF_ENTRY_SIZE) { + bytes calldata stateDiff = _stateDiffs[i:i + STATE_DIFF_ENTRY_SIZE]; + uint64 enumIndex = stateDiff.readUint64(84); + if (enumIndex == 0) { + continue; + } + + uint256 initValue = stateDiff.readUint256(92); + uint256 finalValue = stateDiff.readUint256(124); + uint256 compressedEnumIndex = _sliceToUint256(_compressedStateDiffs[stateDiffPtr:stateDiffPtr + _enumerationIndexSize]); + require(enumIndex == compressedEnumIndex, "rw: enum key mismatch"); + stateDiffPtr += _enumerationIndexSize; + + uint8 metadata = uint8(bytes1(_compressedStateDiffs[stateDiffPtr])); + stateDiffPtr += 1; + uint8 operation = metadata & OPERATION_BITMASK; + uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET; + _verifyValueCompression( + initValue, + finalValue, + operation, + _compressedStateDiffs[stateDiffPtr:stateDiffPtr + len] + ); + stateDiffPtr += len; + } + + require(stateDiffPtr == _compressedStateDiffs.length, "Extra data in _compressedStateDiffs"); + + stateDiffHash = EfficientCall.keccak(_stateDiffs); + } + + /// @notice Decode the raw compressed data into the dictionary and the encoded data. + /// @param _rawCompressedData The compressed bytecode in a format of: + /// - 2 bytes: the bytes length of the dictionary + /// - N bytes: the dictionary + /// - M bytes: the encoded data + function _decodeRawBytecode( + bytes calldata _rawCompressedData + ) internal pure returns (bytes calldata dictionary, bytes calldata encodedData) { + unchecked { + // The dictionary length can't be more than 2^16, so it fits into 2 bytes. + uint256 dictionaryLen = uint256(_rawCompressedData.readUint16(0)); + dictionary = _rawCompressedData[2:2 + dictionaryLen * 8]; + encodedData = _rawCompressedData[2 + dictionaryLen * 8:]; + } + } + + /// @notice Verify value compression was done correct given initial value, final value, operation, and compressed value + /// @param _initialValue Previous value of key/enumeration index. + /// @param _finalValue Updated value of key/enumeration index. + /// @param _operation The operation that was performed on value. + /// @param _compressedValue The slice of calldata with compressed value either representing the final + /// value or difference between initial and final value. It should be of arbitrary length less than or equal to 32 bytes. + /// @dev It is the responsibility of the caller of this function to ensure that the `_compressedValue` has length no longer than 32 bytes. + /// @dev Operation id mapping: + /// 0 -> Nothing (32 bytes) + /// 1 -> Add + /// 2 -> Subtract + /// 3 -> Transform (< 32 bytes) + function _verifyValueCompression( + uint256 _initialValue, + uint256 _finalValue, + uint256 _operation, + bytes calldata _compressedValue + ) internal pure { + uint256 convertedValue = _sliceToUint256(_compressedValue); + + unchecked { + if (_operation == 0 || _operation == 3) { + require(convertedValue == _finalValue, "transform or no compression: compressed and final mismatch"); + } else if (_operation == 1) { + require(_initialValue + convertedValue == _finalValue, "add: initial plus converted not equal to final"); + } else if (_operation == 2) { + require(_initialValue - convertedValue == _finalValue, "sub: initial minus converted not equal to final"); + } else { + revert("unsupported operation"); + } + } + } + + /// @notice Converts a calldata slice into uint256. It is the responsibility of the caller to ensure that + /// the _calldataSlice has length no longer than 32 bytes + /// @param _calldataSlice The calldata slice to convert to uint256 + /// @return number The uint256 representation of the calldata slice + function _sliceToUint256(bytes calldata _calldataSlice) internal pure returns (uint256 number) { + number = uint256(bytes32(_calldataSlice)); + number >>= (256 - (_calldataSlice.length * 8)); + } +} diff --git a/contracts/Constants.sol b/contracts/Constants.sol index 048f9b54..507d3437 100644 --- a/contracts/Constants.sol +++ b/contracts/Constants.sol @@ -2,17 +2,17 @@ pragma solidity ^0.8.0; -import "./interfaces/IAccountCodeStorage.sol"; -import "./interfaces/INonceHolder.sol"; -import "./interfaces/IContractDeployer.sol"; -import "./interfaces/IKnownCodesStorage.sol"; -import "./interfaces/IImmutableSimulator.sol"; -import "./interfaces/IEthToken.sol"; -import "./interfaces/IL1Messenger.sol"; -import "./interfaces/ISystemContext.sol"; -import "./interfaces/IBytecodeCompressor.sol"; -import "./interfaces/IComplexUpgrader.sol"; -import "./BootloaderUtilities.sol"; +import {IAccountCodeStorage} from "./interfaces/IAccountCodeStorage.sol"; +import {INonceHolder} from "./interfaces/INonceHolder.sol"; +import {IContractDeployer} from "./interfaces/IContractDeployer.sol"; +import {IKnownCodesStorage} from "./interfaces/IKnownCodesStorage.sol"; +import {IImmutableSimulator} from "./interfaces/IImmutableSimulator.sol"; +import {IEthToken} from "./interfaces/IEthToken.sol"; +import {IL1Messenger} from "./interfaces/IL1Messenger.sol"; +import {ISystemContext} from "./interfaces/ISystemContext.sol"; +import {ICompressor} from "./interfaces/ICompressor.sol"; +import {IComplexUpgrader} from "./interfaces/IComplexUpgrader.sol"; +import {IBootloaderUtilities} from "./interfaces/IBootloaderUtilities.sol"; /// @dev All the system contracts introduced by zkSync have their addresses /// started from 2^15 in order to avoid collision with Ethereum precompiles. @@ -24,6 +24,8 @@ uint160 constant MAX_SYSTEM_CONTRACT_ADDRESS = 0xffff; // 2^16 - 1 address constant ECRECOVER_SYSTEM_CONTRACT = address(0x01); address constant SHA256_SYSTEM_CONTRACT = address(0x02); +address constant ECADD_SYSTEM_CONTRACT = address(0x06); +address constant ECMUL_SYSTEM_CONTRACT = address(0x07); /// @dev The current maximum deployed precompile address. /// Note: currently only two precompiles are deployed: @@ -55,17 +57,13 @@ address constant KECCAK256_SYSTEM_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0 ISystemContext constant SYSTEM_CONTEXT_CONTRACT = ISystemContext(payable(address(SYSTEM_CONTRACTS_OFFSET + 0x0b))); -BootloaderUtilities constant BOOTLOADER_UTILITIES = BootloaderUtilities(address(SYSTEM_CONTRACTS_OFFSET + 0x0c)); +IBootloaderUtilities constant BOOTLOADER_UTILITIES = IBootloaderUtilities(address(SYSTEM_CONTRACTS_OFFSET + 0x0c)); address constant EVENT_WRITER_CONTRACT = address(SYSTEM_CONTRACTS_OFFSET + 0x0d); -IBytecodeCompressor constant BYTECODE_COMPRESSOR_CONTRACT = IBytecodeCompressor( - address(SYSTEM_CONTRACTS_OFFSET + 0x0e) -); +ICompressor constant COMPRESSOR_CONTRACT = ICompressor(address(SYSTEM_CONTRACTS_OFFSET + 0x0e)); -IComplexUpgrader constant COMPLEX_UPGRADER_CONTRACT = IComplexUpgrader( - address(SYSTEM_CONTRACTS_OFFSET + 0x0f) -); +IComplexUpgrader constant COMPLEX_UPGRADER_CONTRACT = IComplexUpgrader(address(SYSTEM_CONTRACTS_OFFSET + 0x0f)); /// @dev If the bitwise AND of the extraAbi[2] param when calling the MSG_VALUE_SIMULATOR /// is non-zero, the call will be assumed to be a system one. @@ -80,3 +78,49 @@ bytes32 constant CREATE2_PREFIX = 0x2020dba91b30cc0006188af794c2fb30dd8520db7e2c /// @dev Prefix used during derivation of account addresses using CREATE /// @dev keccak256("zksyncCreate") bytes32 constant CREATE_PREFIX = 0x63bae3a9951d38e8a3fbb7b70909afc1200610fc5bc55ade242f815974674f23; + +/// @dev Each state diff consists of 156 bytes of actual data and 116 bytes of unused padding, needed for circuit efficiency. +uint256 constant STATE_DIFF_ENTRY_SIZE = 272; + +/// @dev While the "real" amount of pubdata that can be sent rarely exceeds the 110k - 120k, it is better to +/// allow the operator to provide any reasonably large value in order to avoid unneeded constraints on the operator. +uint256 constant MAX_ALLOWED_PUBDATA_PER_BATCH = 520000; + +enum SystemLogKey { + L2_TO_L1_LOGS_TREE_ROOT_KEY, + TOTAL_L2_TO_L1_PUBDATA_KEY, + STATE_DIFF_HASH_KEY, + PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + PREV_BATCH_HASH_KEY, + CHAINED_PRIORITY_TXN_HASH_KEY, + NUMBER_OF_LAYER_1_TXS_KEY, + EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY +} + +/// @dev The number of leaves in the L2->L1 log Merkle tree. +/// While formally a tree of any length is acceptable, the node supports only a constant length of 2048 leaves. +uint256 constant L2_TO_L1_LOGS_MERKLE_TREE_LEAVES = 2048; + +/// @dev The length of the derived key in bytes inside compressed state diffs. +uint256 constant DERIVED_KEY_LENGTH = 32; +/// @dev The length of the enum index in bytes inside compressed state diffs. +uint256 constant ENUM_INDEX_LENGTH = 8; +/// @dev The length of value in bytes inside compressed state diffs. +uint256 constant VALUE_LENGTH = 32; + +/// @dev The length of the compressed initial storage write in bytes. +uint256 constant COMPRESSED_INITIAL_WRITE_SIZE = DERIVED_KEY_LENGTH + VALUE_LENGTH; +/// @dev The length of the compressed repeated storage write in bytes. +uint256 constant COMPRESSED_REPEATED_WRITE_SIZE = ENUM_INDEX_LENGTH + VALUE_LENGTH; + +/// @dev The position from which the initial writes start in the compressed state diffs. +uint256 constant INITIAL_WRITE_STARTING_POSITION = 4; + +/// @dev Each storage diffs consists of the following elements: +/// [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value] +/// @dev The offset of the deriived key in a storage diff. +uint256 constant STATE_DIFF_DERIVED_KEY_OFFSET = 52; +/// @dev The offset of the enum index in a storage diff. +uint256 constant STATE_DIFF_ENUM_INDEX_OFFSET = 84; +/// @dev The offset of the final value in a storage diff. +uint256 constant STATE_DIFF_FINAL_VALUE_OFFSET = 124; diff --git a/contracts/ContractDeployer.sol b/contracts/ContractDeployer.sol index 029e48d9..ed6d3fc2 100644 --- a/contracts/ContractDeployer.sol +++ b/contracts/ContractDeployer.sol @@ -3,16 +3,17 @@ pragma solidity ^0.8.0; import {ImmutableData} from "./interfaces/IImmutableSimulator.sol"; -import "./interfaces/IContractDeployer.sol"; +import {IContractDeployer} from "./interfaces/IContractDeployer.sol"; import {CREATE2_PREFIX, CREATE_PREFIX, NONCE_HOLDER_SYSTEM_CONTRACT, ACCOUNT_CODE_STORAGE_SYSTEM_CONTRACT, FORCE_DEPLOYER, MAX_SYSTEM_CONTRACT_ADDRESS, KNOWN_CODE_STORAGE_CONTRACT, ETH_TOKEN_SYSTEM_CONTRACT, IMMUTABLE_SIMULATOR_SYSTEM_CONTRACT, COMPLEX_UPGRADER_CONTRACT} from "./Constants.sol"; -import "./libraries/Utils.sol"; -import "./libraries/EfficientCall.sol"; +import {Utils} from "./libraries/Utils.sol"; +import {EfficientCall} from "./libraries/EfficientCall.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; -import "./interfaces/ISystemContract.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice System smart contract that is responsible for deploying other smart contracts on zkSync. * @dev The contract is responsible for generating the address of the deployed smart contract, * incrementing the deployment nonce and making sure that the constructor is never called twice in a contract. @@ -236,7 +237,7 @@ contract ContractDeployer is IContractDeployer, ISystemContract { /// by `FORCE_DEPLOYER`. function forceDeployOnAddresses(ForceDeployment[] calldata _deployments) external payable { require( - msg.sender == FORCE_DEPLOYER || msg.sender == address(COMPLEX_UPGRADER_CONTRACT), + msg.sender == FORCE_DEPLOYER || msg.sender == address(COMPLEX_UPGRADER_CONTRACT), "Can only be called by FORCE_DEPLOYER or COMPLEX_UPGRADER_CONTRACT" ); diff --git a/contracts/DefaultAccount.sol b/contracts/DefaultAccount.sol index 2658947c..0021839e 100644 --- a/contracts/DefaultAccount.sol +++ b/contracts/DefaultAccount.sol @@ -10,6 +10,7 @@ import {BOOTLOADER_FORMAL_ADDRESS, NONCE_HOLDER_SYSTEM_CONTRACT, DEPLOYER_SYSTEM /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The default implementation of account. * @dev The bytecode of the contract is set by default for all addresses for which no other bytecodes are deployed. * @notice If the caller is not a bootloader always returns empty data on call, just like EOA does. diff --git a/contracts/EmptyContract.sol b/contracts/EmptyContract.sol index 75b788dc..711f8ba1 100644 --- a/contracts/EmptyContract.sol +++ b/contracts/EmptyContract.sol @@ -4,6 +4,7 @@ pragma solidity ^0.8.0; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The "empty" contract that is put into some system contracts by default. * @dev The bytecode of the contract is set by default for all addresses for which no other bytecodes are deployed. */ diff --git a/contracts/EventWriter.yul b/contracts/EventWriter.yul index 0a64510d..4cd4a381 100644 --- a/contracts/EventWriter.yul +++ b/contracts/EventWriter.yul @@ -1,11 +1,14 @@ /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The contract responsible for decoding and writing events using low-level instructions. * @dev The metadata and topics are passed via registers, and the first accessible register contains their number. * The rest of the data is passed via calldata without copying. */ object "EventWriter" { - code { } + code { + return(0, 0) + } object "EventWriter_deployed" { code { //////////////////////////////////////////////////////////////// diff --git a/contracts/ImmutableSimulator.sol b/contracts/ImmutableSimulator.sol index e56f1ce7..54fb4c9d 100644 --- a/contracts/ImmutableSimulator.sol +++ b/contracts/ImmutableSimulator.sol @@ -7,6 +7,7 @@ import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice System smart contract that simulates the behavior of immutable variables in Solidity. * @dev The contract stores the immutable variables created during deployment by other contracts on his storage. * @dev This simulator is needed so that smart contracts with the same Solidity code but different diff --git a/contracts/KnownCodesStorage.sol b/contracts/KnownCodesStorage.sol index b3cb637f..29006389 100644 --- a/contracts/KnownCodesStorage.sol +++ b/contracts/KnownCodesStorage.sol @@ -2,38 +2,34 @@ pragma solidity ^0.8.0; -import "./interfaces/IKnownCodesStorage.sol"; -import "./libraries/Utils.sol"; -import "./libraries/SystemContractHelper.sol"; -import {BOOTLOADER_FORMAL_ADDRESS, BYTECODE_COMPRESSOR_CONTRACT} from "./Constants.sol"; +import {IKnownCodesStorage} from "./interfaces/IKnownCodesStorage.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {Utils} from "./libraries/Utils.sol"; +import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; +import {COMPRESSOR_CONTRACT, L1_MESSENGER_CONTRACT} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The storage of this contract will basically serve as a mapping for the known code hashes. * @dev Code hash is not strictly a hash, it's a structure where the first byte denotes the version of the hash, * the second byte denotes whether the contract is constructed, and the next two bytes denote the length in 32-byte words. * words. And then the next 28 bytes is the truncated hash. */ -contract KnownCodesStorage is IKnownCodesStorage { - modifier onlyBootloader() { - require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); - _; - } - - modifier onlyBytecodeCompressor() { - require(msg.sender == address(BYTECODE_COMPRESSOR_CONTRACT), "Callable only by the bytecode compressor"); +contract KnownCodesStorage is IKnownCodesStorage, ISystemContract { + modifier onlyCompressor() { + require(msg.sender == address(COMPRESSOR_CONTRACT), "Callable only by the compressor"); _; } /// @notice The method that is used by the bootloader to mark several bytecode hashes as known. /// @param _shouldSendToL1 Whether the bytecode should be sent on L1. /// @param _hashes Hashes of the bytecodes to be marked as known. - function markFactoryDeps(bool _shouldSendToL1, bytes32[] calldata _hashes) external onlyBootloader { + function markFactoryDeps(bool _shouldSendToL1, bytes32[] calldata _hashes) external onlyCallFromBootloader { unchecked { uint256 hashesLen = _hashes.length; for (uint256 i = 0; i < hashesLen; ++i) { - uint256 codeLengthInBytes = Utils.bytecodeLenInBytes(_hashes[i]); - _markBytecodeAsPublished(_hashes[i], 0, codeLengthInBytes, _shouldSendToL1); + _markBytecodeAsPublished(_hashes[i], _shouldSendToL1); } } } @@ -41,32 +37,19 @@ contract KnownCodesStorage is IKnownCodesStorage { /// @notice The method used to mark a single bytecode hash as known. /// @dev Only trusted contacts can call this method, currently only the bytecode compressor. /// @param _bytecodeHash The hash of the bytecode that is marked as known. - /// @param _l1PreimageHash The hash of the preimage is be shown on L1 if zero - the full bytecode will be shown. - /// @param _l1PreimageBytesLen The length of the preimage in bytes. - function markBytecodeAsPublished( - bytes32 _bytecodeHash, - bytes32 _l1PreimageHash, - uint256 _l1PreimageBytesLen - ) external onlyBytecodeCompressor { - _markBytecodeAsPublished(_bytecodeHash, _l1PreimageHash, _l1PreimageBytesLen, false); + function markBytecodeAsPublished(bytes32 _bytecodeHash) external onlyCompressor { + _markBytecodeAsPublished(_bytecodeHash, false); } /// @notice The method used to mark a single bytecode hash as known /// @param _bytecodeHash The hash of the bytecode that is marked as known - /// @param _l1PreimageHash The hash of the preimage to be shown on L1 if zero - the full bytecode will be shown - /// @param _l1PreimageBytesLen The length of the preimage in bytes /// @param _shouldSendToL1 Whether the bytecode should be sent on L1 - function _markBytecodeAsPublished( - bytes32 _bytecodeHash, - bytes32 _l1PreimageHash, - uint256 _l1PreimageBytesLen, - bool _shouldSendToL1 - ) internal { + function _markBytecodeAsPublished(bytes32 _bytecodeHash, bool _shouldSendToL1) internal { if (getMarker(_bytecodeHash) == 0) { _validateBytecode(_bytecodeHash); if (_shouldSendToL1) { - _sendBytecodeToL1(_bytecodeHash, _l1PreimageHash, _l1PreimageBytesLen); + L1_MESSENGER_CONTRACT.requestBytecodeL1Publication(_bytecodeHash); } // Save as known, to not resend the log to L1 @@ -78,46 +61,6 @@ contract KnownCodesStorage is IKnownCodesStorage { } } - /// @notice Method used for sending the bytecode (preimage for the bytecode hash) on L1. - /// @dev While bytecode must be visible to L1 observers, it's not necessary to disclose the whole raw bytecode. - /// To achieve this, it's possible to utilize compressed data using a known compression algorithm. Thus, the - /// L1 preimage data may differ from the raw bytecode. - /// @param _bytecodeHash The hash of the bytecode that is marked as known. - /// @param _l1PreimageHash The hash of the preimage to be shown on L1 if zero - the full bytecode will be shown. - /// @param _l1PreimageBytesLen The length of the preimage in bytes. - /// @dev This method sends a single L2->L1 log with the bytecodeHash and l1PreimageHash. It is the responsibility of the L1 - /// smart contracts to make sure that the preimage for this bytecode hash has been shown. - function _sendBytecodeToL1(bytes32 _bytecodeHash, bytes32 _l1PreimageHash, uint256 _l1PreimageBytesLen) internal { - // Burn gas to cover the cost of publishing pubdata on L1 - - // Get the cost of 1 pubdata byte in gas - uint256 meta = SystemContractHelper.getZkSyncMetaBytes(); - uint256 pricePerPubdataByteInGas = SystemContractHelper.getGasPerPubdataByteFromMeta(meta); - - // Calculate how many bytes of calldata will need to be transferred to L1. - // We published the data as ABI-encoded `bytes`, so we pay for: - // - bytecode length in bytes, rounded up to a multiple of 32 (it always is, because of the bytecode format) - // - 32 bytes of encoded offset - // - 32 bytes of encoded length - - uint256 gasToPay = (_l1PreimageBytesLen + 64) * pricePerPubdataByteInGas; - _burnGas(Utils.safeCastToU32(gasToPay)); - - // Send a log to L1 that bytecode should be known. - // L1 smart contract will check the availability of bytecodeHash preimage. - SystemContractHelper.toL1(true, _bytecodeHash, _l1PreimageHash); - } - - /// @notice Method used for burning a certain amount of gas - /// @param _gasToPay The number of gas to burn. - function _burnGas(uint32 _gasToPay) internal view { - bool precompileCallSuccess = SystemContractHelper.precompileCall( - 0, // The precompile parameters are formal ones. We only need the precompile call to burn gas. - _gasToPay - ); - require(precompileCallSuccess, "Failed to charge gas"); - } - /// @notice Returns the marker stored for a bytecode hash. 1 means that the bytecode hash is known /// and can be used for deploying contracts. 0 otherwise. function getMarker(bytes32 _hash) public view override returns (uint256 marker) { diff --git a/contracts/L1Messenger.sol b/contracts/L1Messenger.sol index 00363f14..c5a03c8d 100644 --- a/contracts/L1Messenger.sol +++ b/contracts/L1Messenger.sol @@ -2,12 +2,24 @@ pragma solidity ^0.8.0; -import "./interfaces/IL1Messenger.sol"; -import "./libraries/SystemContractHelper.sol"; -import "./libraries/EfficientCall.sol"; +import {IL1Messenger, L2ToL1Log, L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, L2_TO_L1_LOG_SERIALIZE_SIZE, STATE_DIFF_COMPRESSION_VERSION_NUMBER} from "./interfaces/IL1Messenger.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; +import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; +import {EfficientCall} from "./libraries/EfficientCall.sol"; +import {Utils} from "./libraries/Utils.sol"; +import { + SystemLogKey, + SYSTEM_CONTEXT_CONTRACT, + KNOWN_CODE_STORAGE_CONTRACT, + COMPRESSOR_CONTRACT, + STATE_DIFF_ENTRY_SIZE, + MAX_ALLOWED_PUBDATA_PER_BATCH, + L2_TO_L1_LOGS_MERKLE_TREE_LEAVES +} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Smart contract for sending arbitrary length messages to L1 * @dev by default ZkSync can send fixed length messages on L1. * A fixed length message has 4 parameters `senderAddress` `isService`, `key`, `value`, @@ -18,39 +30,308 @@ import "./libraries/EfficientCall.sol"; * - The contract on L1 accepts all sent messages and if the message came from this system contract * it requires that the preimage of `value` be provided. */ -contract L1Messenger is IL1Messenger { - /// @notice Sends an arbitrary length message to L1. - /// @param _message The variable length message to be sent to L1. - /// @return hash The keccak256 hashed value of the message. +contract L1Messenger is IL1Messenger, ISystemContract { + /// @notice Sequential hash of logs sent in the current block. + /// @dev Will be reset at the end of the block to zero value. + bytes32 internal chainedLogsHash; + + /// @notice Number of logs sent in the current block. + /// @dev Will be reset at the end of the block to zero value. + uint256 internal numberOfLogsToProcess; + + /// @notice Sequential hash of hashes of the messages sent in the current block. + /// @dev Will be reset at the end of the block to zero value. + bytes32 internal chainedMessagesHash; + + /// @notice Sequential hash of bytecode hashes that needs to published + /// according to the current block execution invariant. + /// @dev Will be reset at the end of the block to zero value. + bytes32 internal chainedL1BytecodesRevealDataHash; + + /// The gas cost of processing one keccak256 round. + uint256 internal constant KECCAK_ROUND_GAS_COST = 40; + + /// The number of bytes processed in one keccak256 round. + uint256 internal constant KECCAK_ROUND_NUMBER_OF_BYTES = 136; + + /// The gas cost of calculation of keccak256 of bytes array of such length. + function keccakGasCost(uint256 _length) internal pure returns (uint256) { + return KECCAK_ROUND_GAS_COST * (_length / KECCAK_ROUND_NUMBER_OF_BYTES + 1); + } + + /// The gas cost of processing one sha256 round. + uint256 internal constant SHA256_ROUND_GAS_COST = 7; + + /// The number of bytes processed in one sha256 round. + uint256 internal constant SHA256_ROUND_NUMBER_OF_BYTES = 64; + + /// The gas cost of calculation of sha256 of bytes array of such length. + function sha256GasCost(uint256 _length) internal pure returns (uint256) { + return SHA256_ROUND_GAS_COST * ((_length + 8) / SHA256_ROUND_NUMBER_OF_BYTES + 1); + } + + /// @notice Sends L2ToL1Log. + /// @dev Can be called only by a system contract. + function sendL2ToL1Log( + bool _isService, + bytes32 _key, + bytes32 _value + ) external onlyCallFromSystemContract returns (uint256 logIdInMerkleTree) { + L2ToL1Log memory l2ToL1Log = L2ToL1Log({ + l2ShardId: 0, + isService: _isService, + txNumberInBlock: SYSTEM_CONTEXT_CONTRACT.txNumberInBlock(), + sender: msg.sender, + key: _key, + value: _value + }); + logIdInMerkleTree = _processL2ToL1Log(l2ToL1Log); + + // We need to charge cost of hashing, as it will be used in `publishPubdataAndClearState`: + // - keccakGasCost(L2_TO_L1_LOG_SERIALIZE_SIZE) and keccakGasCost(64) when reconstructing L2ToL1Log + // - at most 2 times keccakGasCost(64) (as merkle tree can contain ~2*N leaves) + uint256 gasToPay = keccakGasCost(L2_TO_L1_LOG_SERIALIZE_SIZE) + 3 * keccakGasCost(64); + SystemContractHelper.burnGas(Utils.safeCastToU32(gasToPay)); + } + + /// @notice Internal function to send L2ToL1Log. + function _processL2ToL1Log(L2ToL1Log memory _l2ToL1Log) internal returns (uint256 logIdInMerkleTree) { + bytes32 hashedLog = keccak256( + abi.encodePacked( + _l2ToL1Log.l2ShardId, + _l2ToL1Log.isService, + _l2ToL1Log.txNumberInBlock, + _l2ToL1Log.sender, + _l2ToL1Log.key, + _l2ToL1Log.value + ) + ); + + chainedLogsHash = keccak256(abi.encode(chainedLogsHash, hashedLog)); + + logIdInMerkleTree = numberOfLogsToProcess; + numberOfLogsToProcess++; + + emit L2ToL1LogSent(_l2ToL1Log); + } + + /// @notice Public functionality to send messages to L1. function sendToL1(bytes calldata _message) external override returns (bytes32 hash) { + uint256 gasBeforeMessageHashing = gasleft(); hash = EfficientCall.keccak(_message); + uint256 gasSpentOnMessageHashing = gasBeforeMessageHashing - gasleft(); + + /// Store message record + chainedMessagesHash = keccak256(abi.encode(chainedMessagesHash, hash)); + + /// Store log record + L2ToL1Log memory l2ToL1Log = L2ToL1Log({ + l2ShardId: 0, + isService: true, + txNumberInBlock: SYSTEM_CONTEXT_CONTRACT.txNumberInBlock(), + sender: address(this), + key: bytes32(uint256(uint160(msg.sender))), + value: hash + }); + _processL2ToL1Log(l2ToL1Log); // Get cost of one byte pubdata in gas from context. uint256 meta = SystemContractHelper.getZkSyncMetaBytes(); uint32 gasPerPubdataBytes = SystemContractHelper.getGasPerPubdataByteFromMeta(meta); - // Calculate how many bytes of calldata will need to be transferred to L1. - // We published the data as ABI-encoded `bytes`, so we pay for: - // - message length in bytes, rounded up to a multiple of 32 - // - 32 bytes of encoded offset - // - 32 bytes of encoded length + uint256 pubdataLen; + unchecked { + // 4 bytes used to encode the length of the message (see `publishPubdataAndClearState`) + // L2_TO_L1_LOG_SERIALIZE_SIZE bytes used to encode L2ToL1Log + pubdataLen = 4 + _message.length + L2_TO_L1_LOG_SERIALIZE_SIZE; + } + + // We need to charge cost of hashing, as it will be used in `publishPubdataAndClearState`: + // - keccakGasCost(L2_TO_L1_LOG_SERIALIZE_SIZE) and keccakGasCost(64) when reconstructing L2ToL1Log + // - keccakGasCost(64) and gasSpentOnMessageHashing when reconstructing Messages + // - at most 2 times keccakGasCost(64) (as merkle tree can contain ~2*N leaves) + uint256 gasToPay = pubdataLen * + gasPerPubdataBytes + + keccakGasCost(L2_TO_L1_LOG_SERIALIZE_SIZE) + + 4 * + keccakGasCost(64) + + gasSpentOnMessageHashing; + SystemContractHelper.burnGas(Utils.safeCastToU32(gasToPay)); + + emit L1MessageSent(msg.sender, hash, _message); + } + + /// @dev Can be called only by KnownCodesStorage system contract. + function requestBytecodeL1Publication( + bytes32 _bytecodeHash + ) external override onlyCallFrom(address(KNOWN_CODE_STORAGE_CONTRACT)) { + chainedL1BytecodesRevealDataHash = keccak256(abi.encode(chainedL1BytecodesRevealDataHash, _bytecodeHash)); + + uint256 bytecodeLen = Utils.bytecodeLenInBytes(_bytecodeHash); + + // Get cost of one byte pubdata in gas from context. + uint256 meta = SystemContractHelper.getZkSyncMetaBytes(); + uint32 gasPerPubdataBytes = SystemContractHelper.getGasPerPubdataByteFromMeta(meta); uint256 pubdataLen; unchecked { - pubdataLen = ((_message.length + 31) / 32) * 32 + 64; + // 4 bytes used to encode the length of the bytecode (see `publishPubdataAndClearState`) + pubdataLen = 4 + bytecodeLen; } - uint256 gasToPay = pubdataLen * gasPerPubdataBytes; - // Call precompile to burn gas to cover the cost of publishing pubdata to L1. - uint256 precompileParams = SystemContractHelper.packPrecompileParams(0, 0, 0, 0, 0); - bool precompileCallSuccess = SystemContractHelper.precompileCall( - precompileParams, - Utils.safeCastToU32(gasToPay) + // We need to charge cost of hashing, as it will be used in `publishPubdataAndClearState` + uint256 gasToPay = pubdataLen * gasPerPubdataBytes + sha256GasCost(bytecodeLen) + keccakGasCost(64); + SystemContractHelper.burnGas(Utils.safeCastToU32(gasToPay)); + + emit BytecodeL1PublicationRequested(_bytecodeHash); + } + + /// @notice Verifies that the {_totalL2ToL1PubdataAndStateDiffs} reflects what occurred within the L1Batch and that + /// the compressed statediffs are equivalent to the full state diffs. + /// @param _totalL2ToL1PubdataAndStateDiffs The total pubdata and uncompressed state diffs of transactions that were + /// processed in the current L1 Batch. Pubdata consists of L2 to L1 Logs, messages, deployed bytecode, and state diffs. + /// @dev Function that should be called exactly once per L1 Batch by the bootloader. + /// @dev Checks that totalL2ToL1Pubdata is strictly packed data that should to be published to L1. + /// @dev The data passed in also contains the encoded state diffs to be checked again, however this is aux data that is not + /// part of the committed pubdata. + /// @dev Performs calculation of L2ToL1Logs merkle tree root, "sends" such root and keccak256(totalL2ToL1Pubdata) + /// to L1 using low-level (VM) L2Log. + function publishPubdataAndClearState( + bytes calldata _totalL2ToL1PubdataAndStateDiffs + ) external onlyCallFromBootloader { + uint256 calldataPtr = 0; + + /// Check logs + uint32 numberOfL2ToL1Logs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + require(numberOfL2ToL1Logs <= numberOfL2ToL1Logs, "Too many L2->L1 logs"); + calldataPtr += 4; + + bytes32[] memory l2ToL1LogsTreeArray = new bytes32[](L2_TO_L1_LOGS_MERKLE_TREE_LEAVES); + bytes32 reconstructedChainedLogsHash; + for (uint256 i = 0; i < numberOfL2ToL1Logs; ++i) { + bytes32 hashedLog = EfficientCall.keccak( + _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + L2_TO_L1_LOG_SERIALIZE_SIZE] + ); + calldataPtr += L2_TO_L1_LOG_SERIALIZE_SIZE; + l2ToL1LogsTreeArray[i] = hashedLog; + reconstructedChainedLogsHash = keccak256(abi.encode(reconstructedChainedLogsHash, hashedLog)); + } + require( + reconstructedChainedLogsHash == chainedLogsHash, + "reconstructedChainedLogsHash is not equal to chainedLogsHash" ); - require(precompileCallSuccess, "Failed to burn gas"); + for (uint256 i = numberOfL2ToL1Logs; i < L2_TO_L1_LOGS_MERKLE_TREE_LEAVES; ++i) { + l2ToL1LogsTreeArray[i] = L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH; + } + uint256 nodesOnCurrentLevel = L2_TO_L1_LOGS_MERKLE_TREE_LEAVES; + while (nodesOnCurrentLevel > 1) { + nodesOnCurrentLevel /= 2; + for (uint256 i = 0; i < nodesOnCurrentLevel; ++i) { + l2ToL1LogsTreeArray[i] = keccak256( + abi.encode(l2ToL1LogsTreeArray[2 * i], l2ToL1LogsTreeArray[2 * i + 1]) + ); + } + } + bytes32 l2ToL1LogsTreeRoot = l2ToL1LogsTreeArray[0]; - SystemContractHelper.toL1(true, bytes32(uint256(uint160(msg.sender))), hash); + /// Check messages + uint32 numberOfMessages = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 reconstructedChainedMessagesHash; + for (uint256 i = 0; i < numberOfMessages; ++i) { + uint32 currentMessageLength = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 hashedMessage = EfficientCall.keccak( + _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentMessageLength] + ); + calldataPtr += currentMessageLength; + reconstructedChainedMessagesHash = keccak256(abi.encode(reconstructedChainedMessagesHash, hashedMessage)); + } + require( + reconstructedChainedMessagesHash == chainedMessagesHash, + "reconstructedChainedMessagesHash is not equal to chainedMessagesHash" + ); - emit L1MessageSent(msg.sender, hash, _message); + /// Check bytecodes + uint32 numberOfBytecodes = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + bytes32 reconstructedChainedL1BytecodesRevealDataHash; + for (uint256 i = 0; i < numberOfBytecodes; ++i) { + uint32 currentBytecodeLength = uint32( + bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]) + ); + calldataPtr += 4; + reconstructedChainedL1BytecodesRevealDataHash = keccak256( + abi.encode( + reconstructedChainedL1BytecodesRevealDataHash, + Utils.hashL2Bytecode( + _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + currentBytecodeLength] + ) + ) + ); + calldataPtr += currentBytecodeLength; + } + require( + reconstructedChainedL1BytecodesRevealDataHash == chainedL1BytecodesRevealDataHash, + "reconstructedChainedL1BytecodesRevealDataHash is not equal to chainedL1BytecodesRevealDataHash" + ); + + /// Check State Diffs + /// encoding is as follows: + /// header (1 byte version, 2 bytes total len of compressed, 1 byte enumeration index size, 2 bytes number of initial writes) + /// body (N bytes of initial writes [32 byte derived key || compressed value], M bytes repeated writes [enumeration index || compressed value]) + /// encoded state diffs: [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value] + require( + uint256(uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr]))) == + STATE_DIFF_COMPRESSION_VERSION_NUMBER, + "state diff compression version mismatch" + ); + calldataPtr++; + + uint24 compressedStateDiffSize = uint24(bytes3(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 3])); + calldataPtr += 3; + + uint8 enumerationIndexSize = uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr])); + calldataPtr++; + + bytes calldata compressedStateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + + compressedStateDiffSize]; + calldataPtr += compressedStateDiffSize; + + bytes calldata totalL2ToL1Pubdata = _totalL2ToL1PubdataAndStateDiffs[:calldataPtr]; + + require(calldataPtr <= MAX_ALLOWED_PUBDATA_PER_BATCH, "L1 Messenger pubdata is too long"); + + uint32 numberOfStateDiffs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])); + calldataPtr += 4; + + bytes calldata stateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + + (numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE)]; + calldataPtr += numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; + + bytes32 stateDiffHash = COMPRESSOR_CONTRACT.verifyCompressedStateDiffs( + numberOfStateDiffs, + enumerationIndexSize, + stateDiffs, + compressedStateDiffs + ); + + /// Check for calldata strict format + require(calldataPtr == _totalL2ToL1PubdataAndStateDiffs.length, "Extra data in the totalL2ToL1Pubdata array"); + + /// Native (VM) L2 to L1 log + SystemContractHelper.toL1(true, bytes32(uint256(SystemLogKey.L2_TO_L1_LOGS_TREE_ROOT_KEY)), l2ToL1LogsTreeRoot); + SystemContractHelper.toL1( + true, + bytes32(uint256(SystemLogKey.TOTAL_L2_TO_L1_PUBDATA_KEY)), + EfficientCall.keccak(totalL2ToL1Pubdata) + ); + SystemContractHelper.toL1(true, bytes32(uint256(SystemLogKey.STATE_DIFF_HASH_KEY)), stateDiffHash); + + /// Clear logs state + chainedLogsHash = bytes32(0); + numberOfLogsToProcess = 0; + chainedMessagesHash = bytes32(0); + chainedL1BytecodesRevealDataHash = bytes32(0); } } diff --git a/contracts/L2EthToken.sol b/contracts/L2EthToken.sol index d59a8fb6..6a2ca48e 100644 --- a/contracts/L2EthToken.sol +++ b/contracts/L2EthToken.sol @@ -3,29 +3,25 @@ pragma solidity ^0.8.0; import {IEthToken} from "./interfaces/IEthToken.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; import {MSG_VALUE_SYSTEM_CONTRACT, DEPLOYER_SYSTEM_CONTRACT, BOOTLOADER_FORMAL_ADDRESS, L1_MESSENGER_CONTRACT} from "./Constants.sol"; -import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; import {IMailbox} from "./interfaces/IMailbox.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Native ETH contract. * @dev It does NOT provide interfaces for personal interaction with tokens like `transfer`, `approve`, and `transferFrom`. * Instead, this contract is used by the bootloader and `MsgValueSimulator`/`ContractDeployer` system contracts * to perform the balance changes while simulating the `msg.value` Ethereum behavior. */ -contract L2EthToken is IEthToken { +contract L2EthToken is IEthToken, ISystemContract { /// @notice The balances of the users. - mapping(address => uint256) balance; + mapping(address => uint256) internal balance; /// @notice The total amount of tokens that have been minted. uint256 public override totalSupply; - modifier onlyBootloader() { - require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); - _; - } - /// @notice Transfer tokens from one address to another. /// @param _from The address to transfer the ETH from. /// @param _to The address to transfer the ETH to. @@ -65,7 +61,7 @@ contract L2EthToken is IEthToken { /// @dev This method is only callable by the bootloader. /// @param _account The address which to mint the funds to. /// @param _amount The amount of ETH in wei to be minted. - function mint(address _account, uint256 _amount) external override onlyBootloader { + function mint(address _account, uint256 _amount) external override onlyCallFromBootloader { totalSupply += _amount; balance[_account] += _amount; emit Mint(_account, _amount); diff --git a/contracts/MsgValueSimulator.sol b/contracts/MsgValueSimulator.sol index 5d66c304..6a6a9d9f 100644 --- a/contracts/MsgValueSimulator.sol +++ b/contracts/MsgValueSimulator.sol @@ -10,6 +10,7 @@ import {MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, ETH_TOKEN_SYSTEM_CONTRACT} from "./Co /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The contract responsible for simulating transactions with `msg.value` inside zkEVM. * @dev It accepts value and whether the call should be system in the first extraAbi param and * the address to call in the second extraAbi param, transfers the funds and uses `mimicCall` to continue the diff --git a/contracts/NonceHolder.sol b/contracts/NonceHolder.sol index e74a5664..f5a08a6b 100644 --- a/contracts/NonceHolder.sol +++ b/contracts/NonceHolder.sol @@ -9,6 +9,7 @@ import {DEPLOYER_SYSTEM_CONTRACT} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice A contract used for managing nonces for accounts. Together with bootloader, * this contract ensures that the pair (sender, nonce) is always unique, ensuring * unique transaction hashes. diff --git a/contracts/SystemContext.sol b/contracts/SystemContext.sol index 5d801f34..ad20d4bb 100644 --- a/contracts/SystemContext.sol +++ b/contracts/SystemContext.sol @@ -3,21 +3,18 @@ pragma solidity ^0.8.0; import {ISystemContext} from "./interfaces/ISystemContext.sol"; +import {ISystemContract} from "./interfaces/ISystemContract.sol"; import {ISystemContextDeprecated} from "./interfaces/ISystemContextDeprecated.sol"; import {SystemContractHelper} from "./libraries/SystemContractHelper.sol"; -import {BOOTLOADER_FORMAL_ADDRESS} from "./Constants.sol"; +import {BOOTLOADER_FORMAL_ADDRESS, SystemLogKey} from "./Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Contract that stores some of the context variables, that may be either * block-scoped, tx-scoped or system-wide. */ -contract SystemContext is ISystemContext, ISystemContextDeprecated { - modifier onlyBootloader() { - require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); - _; - } - +contract SystemContext is ISystemContext, ISystemContextDeprecated, ISystemContract { /// @notice The number of latest L2 blocks to store. /// @dev EVM requires us to be able to query the hashes of previous 256 blocks. /// We could either: @@ -64,11 +61,11 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { bytes32 internal currentL2BlockTxsRollingHash; /// @notice The hashes of L2 blocks. - /// @dev It stores block hashes for previous L2 blocks. Note, in order to make publishing the hashes - /// of the miniblocks cheaper, we only store the previous MINIBLOCK_HASHES_TO_STORE ones. Since whenever we need to publish a state + /// @dev It stores block hashes for previous L2 blocks. Note, in order to make publishing the hashes + /// of the miniblocks cheaper, we only store the previous MINIBLOCK_HASHES_TO_STORE ones. Since whenever we need to publish a state /// diff, a pair of is published and for cached keys only 8-byte id is used instead of 32 bytes. /// By having this data in a cyclic array of MINIBLOCK_HASHES_TO_STORE blocks, we bring the costs down by 40% (i.e. 40 bytes per miniblock instead of 64 bytes). - /// @dev The hash of a miniblock with number N would be stored under slot N%MINIBLOCK_HASHES_TO_STORE. + /// @dev The hash of a miniblock with number N would be stored under slot N%MINIBLOCK_HASHES_TO_STORE. /// @dev Hashes of the blocks older than the ones which are stored here can be calculated as _calculateLegacyL2BlockHash(blockNumber). bytes32[MINIBLOCK_HASHES_TO_STORE] internal l2BlockHash; @@ -82,22 +79,25 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { /// @notice The information about the virtual blocks upgrade, which tracks when the migration to the L2 blocks has started and finished. VirtualBlockUpgradeInfo internal virtualBlockUpgradeInfo; + /// @notice Number of current transaction in block. + uint16 public txNumberInBlock; + /// @notice Set the current tx origin. /// @param _newOrigin The new tx origin. - function setTxOrigin(address _newOrigin) external onlyBootloader { + function setTxOrigin(address _newOrigin) external onlyCallFromBootloader { origin = _newOrigin; } /// @notice Set the the current gas price. /// @param _gasPrice The new tx gasPrice. - function setGasPrice(uint256 _gasPrice) external onlyBootloader { + function setGasPrice(uint256 _gasPrice) external onlyCallFromBootloader { gasPrice = _gasPrice; } /// @notice The method that emulates `blockhash` opcode in EVM. /// @dev Just like the blockhash in the EVM, it returns bytes32(0), /// when queried about hashes that are older than 256 blocks ago. - /// @dev Since zksolc compiler calls this method to emulate `blockhash`, + /// @dev Since zksolc compiler calls this method to emulate `blockhash`, /// its signature can not be changed to `getL2BlockHashEVM`. /// @return hash The blockhash of the block with the given number. function getBlockHashEVM(uint256 _block) external view returns (bytes32 hash) { @@ -107,7 +107,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { // Due to virtual blocks upgrade, we'll have to use the following logic for retreiving the blockhash: // 1. If the block number is out of the 256-block supported range, return 0. - // 2. If the block was created before the upgrade for the virtual blocks (i.e. there we used to use hashes of the batches), + // 2. If the block was created before the upgrade for the virtual blocks (i.e. there we used to use hashes of the batches), // we return the hash of the batch. // 3. If the block was created after the day when the virtual blocks have caught up with the L2 blocks, i.e. // all the information which is returned for users should be for L2 blocks, we return the hash of the corresponding L2 block. @@ -118,11 +118,14 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { // Note, that we will get into this branch only for a brief moment of time, right after the upgrade // for virtual blocks before 256 virtual blocks are produced. hash = batchHash[_block]; - } else if (_block >= currentVirtualBlockUpgradeInfo.virtualBlockFinishL2Block && currentVirtualBlockUpgradeInfo.virtualBlockFinishL2Block > 0) { + } else if ( + _block >= currentVirtualBlockUpgradeInfo.virtualBlockFinishL2Block && + currentVirtualBlockUpgradeInfo.virtualBlockFinishL2Block > 0 + ) { hash = _getLatest257L2blockHash(_block); } else { - // Important: we do not want this number to ever collide with the L2 block hash (either new or old one) and so - // that's why the legacy L2 blocks' hashes are keccak256(abi.encodePacked(uint32(_block))), while these are equivalent to + // Important: we do not want this number to ever collide with the L2 block hash (either new or old one) and so + // that's why the legacy L2 blocks' hashes are keccak256(abi.encodePacked(uint32(_block))), while these are equivalent to // keccak256(abi.encodePacked(_block)) hash = keccak256(abi.encode(_block)); } @@ -152,7 +155,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { } /// @notice Returns the current L2 block's number. - /// @dev Since zksolc compiler calls this method to emulate `block.number`, + /// @dev Since zksolc compiler calls this method to emulate `block.number`, /// its signature can not be changed to `getL2BlockNumber`. /// @return blockNumber The current L2 block's number. function getBlockNumber() public view returns (uint128) { @@ -160,7 +163,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { } /// @notice Returns the current L2 block's timestamp. - /// @dev Since zksolc compiler calls this method to emulate `block.timestamp`, + /// @dev Since zksolc compiler calls this method to emulate `block.timestamp`, /// its signature can not be changed to `getL2BlockTimestamp`. /// @return timestamp The current L2 block's timestamp. function getBlockTimestamp() public view returns (uint128) { @@ -195,12 +198,10 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { return keccak256(abi.encode(_blockNumber, _blockTimestamp, _prevL2BlockHash, _blockTxsRollingHash)); } - /// @notice Calculates the legacy block hash of L2 block, which were used before the upgrade where + /// @notice Calculates the legacy block hash of L2 block, which were used before the upgrade where /// the advanced block hashes were introduced. /// @param _blockNumber The number of the L2 block. - function _calculateLegacyL2BlockHash( - uint128 _blockNumber - ) internal pure returns (bytes32) { + function _calculateLegacyL2BlockHash(uint128 _blockNumber) internal pure returns (bytes32) { return keccak256(abi.encodePacked(uint32(_blockNumber))); } @@ -208,21 +209,17 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { /// @param _l2BlockNumber The number of the new L2 block. /// @param _expectedPrevL2BlockHash The expected hash of the previous L2 block. /// @param _isFirstInBatch Whether this method is called for the first time in the batch. - function _upgradeL2Blocks( - uint128 _l2BlockNumber, - bytes32 _expectedPrevL2BlockHash, - bool _isFirstInBatch - ) internal { + function _upgradeL2Blocks(uint128 _l2BlockNumber, bytes32 _expectedPrevL2BlockHash, bool _isFirstInBatch) internal { require(_isFirstInBatch, "Upgrade transaction must be first"); - + // This is how it will be commonly done in practice, but it will simplify some logic later require(_l2BlockNumber > 0, "L2 block number is never expected to be zero"); - + unchecked { bytes32 correctPrevBlockHash = _calculateLegacyL2BlockHash(uint128(_l2BlockNumber - 1)); require(correctPrevBlockHash == _expectedPrevL2BlockHash, "The previous L2 block hash is incorrect"); - // Whenever we'll be queried about the hashes of the blocks before the upgrade, + // Whenever we'll be queried about the hashes of the blocks before the upgrade, // we'll use batches' hashes, so we don't need to store 256 previous hashes. // However, we do need to store the last previous hash in order to be able to correctly calculate the // hash of the new L2 block. @@ -239,11 +236,11 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { uint128 _maxVirtualBlocksToCreate, uint128 _newTimestamp ) internal { - if(virtualBlockUpgradeInfo.virtualBlockFinishL2Block != 0) { + if (virtualBlockUpgradeInfo.virtualBlockFinishL2Block != 0) { // No need to to do anything about virtual blocks anymore // All the info is the same as for L2 blocks. currentVirtualL2BlockInfo = currentL2BlockInfo; - return; + return; } BlockInfo memory virtualBlockInfo = currentVirtualL2BlockInfo; @@ -252,7 +249,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { uint128 currentBatchNumber = currentBatchInfo.number; // The virtual block is set for the first time. We can count it as 1 creation of a virtual block. - // Note, that when setting the virtual block number we use the batch number to make a smoother upgrade from batch number to + // Note, that when setting the virtual block number we use the batch number to make a smoother upgrade from batch number to // the L2 block number. virtualBlockInfo.number = currentBatchNumber; // Remembering the batch number on which the upgrade to the virtual blocks has been done. @@ -261,7 +258,7 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { require(_maxVirtualBlocksToCreate > 0, "Can't initialize the first virtual block"); _maxVirtualBlocksToCreate -= 1; } else if (_maxVirtualBlocksToCreate == 0) { - // The virtual blocks have been already initialized, but the operator didn't ask to create + // The virtual blocks have been already initialized, but the operator didn't ask to create // any new virtual blocks. So we can just return. return; } @@ -271,13 +268,13 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { // The virtual block number must never exceed the L2 block number. // We do not use a `require` here, since the virtual blocks are a temporary solution to let the Solidity's `block.number` - // catch up with the L2 block number and so the situation where virtualBlockInfo.number starts getting larger + // catch up with the L2 block number and so the situation where virtualBlockInfo.number starts getting larger // than _l2BlockNumber is expected once virtual blocks have caught up the L2 blocks. if (virtualBlockInfo.number >= _l2BlockNumber) { virtualBlockUpgradeInfo.virtualBlockFinishL2Block = _l2BlockNumber; virtualBlockInfo.number = _l2BlockNumber; } - + currentVirtualL2BlockInfo = virtualBlockInfo; } @@ -285,16 +282,9 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { /// @param _l2BlockNumber The number of the new L2 block. /// @param _l2BlockTimestamp The timestamp of the new L2 block. /// @param _prevL2BlockHash The hash of the previous L2 block. - function _setNewL2BlockData( - uint128 _l2BlockNumber, - uint128 _l2BlockTimestamp, - bytes32 _prevL2BlockHash - ) internal { + function _setNewL2BlockData(uint128 _l2BlockNumber, uint128 _l2BlockTimestamp, bytes32 _prevL2BlockHash) internal { // In the unsafe version we do not check that the block data is correct - currentL2BlockInfo = BlockInfo({ - number: _l2BlockNumber, - timestamp: _l2BlockTimestamp - }); + currentL2BlockInfo = BlockInfo({number: _l2BlockNumber, timestamp: _l2BlockTimestamp}); // It is always assumed in production that _l2BlockNumber > 0 _setL2BlockHash(_l2BlockNumber - 1, _prevL2BlockHash); @@ -305,64 +295,69 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { /// @notice Sets the current block number and timestamp of the L2 block. /// @dev Called by the bootloader before each transaction. This is needed to ensure - /// that the data about the block is consistent with the sequencer. - /// @dev If the new block number is the same as the current one, we ensure that the block's data is + /// that the data about the block is consistent with the sequencer. + /// @dev If the new block number is the same as the current one, we ensure that the block's data is /// consistent with the one in the current block. - /// @dev If the new block number is greater than the current one by 1, + /// @dev If the new block number is greater than the current one by 1, /// then we ensure that timestamp has increased. - /// @dev If the currently stored number is 0, we assume that it is the first upgrade transaction + /// @dev If the currently stored number is 0, we assume that it is the first upgrade transaction /// and so we will fill up the old data. /// @param _l2BlockNumber The number of the new L2 block. /// @param _l2BlockTimestamp The timestamp of the new L2 block. /// @param _expectedPrevL2BlockHash The expected hash of the previous L2 block. /// @param _isFirstInBatch Whether this method is called for the first time in the batch. /// @param _maxVirtualBlocksToCreate The maximum number of virtual block to create with this L2 block. - /// @dev It is a strict requirement that a new virtual block is created at the start of the batch. + /// @dev It is a strict requirement that a new virtual block is created at the start of the batch. /// @dev It is also enforced that the number of the current virtual L2 block can not exceed the number of the L2 block. function setL2Block( - uint128 _l2BlockNumber, + uint128 _l2BlockNumber, uint128 _l2BlockTimestamp, bytes32 _expectedPrevL2BlockHash, bool _isFirstInBatch, uint128 _maxVirtualBlocksToCreate - ) external onlyBootloader { + ) external onlyCallFromBootloader { // We check that the timestamp of the L2 block is consistent with the timestamp of the batch. - if(_isFirstInBatch) { + if (_isFirstInBatch) { uint128 currentBatchTimestamp = currentBatchInfo.timestamp; - require(_l2BlockTimestamp >= currentBatchTimestamp, "The timestamp of the L2 block must be greater than or equal to the timestamp of the current batch"); + require( + _l2BlockTimestamp >= currentBatchTimestamp, + "The timestamp of the L2 block must be greater than or equal to the timestamp of the current batch" + ); require(_maxVirtualBlocksToCreate > 0, "There must be a virtual block created at the start of the batch"); } (uint128 currentL2BlockNumber, uint128 currentL2BlockTimestamp) = getL2BlockNumberAndTimestamp(); if (currentL2BlockNumber == 0 && currentL2BlockTimestamp == 0) { - // Since currentL2BlockNumber and currentL2BlockTimestamp are zero it means that it is + // Since currentL2BlockNumber and currentL2BlockTimestamp are zero it means that it is // the first ever batch with L2 blocks, so we need to initialize those. - _upgradeL2Blocks( - _l2BlockNumber, - _expectedPrevL2BlockHash, - _isFirstInBatch - ); + _upgradeL2Blocks(_l2BlockNumber, _expectedPrevL2BlockHash, _isFirstInBatch); _setNewL2BlockData(_l2BlockNumber, _l2BlockTimestamp, _expectedPrevL2BlockHash); } else if (currentL2BlockNumber == _l2BlockNumber) { require(!_isFirstInBatch, "Can not reuse L2 block number from the previous batch"); require(currentL2BlockTimestamp == _l2BlockTimestamp, "The timestamp of the same L2 block must be same"); - require(_expectedPrevL2BlockHash == _getLatest257L2blockHash(_l2BlockNumber - 1), "The previous hash of the same L2 block must be same"); + require( + _expectedPrevL2BlockHash == _getLatest257L2blockHash(_l2BlockNumber - 1), + "The previous hash of the same L2 block must be same" + ); require(_maxVirtualBlocksToCreate == 0, "Can not create virtual blocks in the middle of the miniblock"); } else if (currentL2BlockNumber + 1 == _l2BlockNumber) { // From the checks in _upgradeL2Blocks it is known that currentL2BlockNumber can not be 0 bytes32 prevL2BlockHash = _getLatest257L2blockHash(currentL2BlockNumber - 1); bytes32 pendingL2BlockHash = _calculateL2BlockHash( - currentL2BlockNumber, - currentL2BlockTimestamp, - prevL2BlockHash, + currentL2BlockNumber, + currentL2BlockTimestamp, + prevL2BlockHash, currentL2BlockTxsRollingHash ); require(_expectedPrevL2BlockHash == pendingL2BlockHash, "The current L2 block hash is incorrect"); - require(_l2BlockTimestamp > currentL2BlockTimestamp, "The timestamp of the new L2 block must be greater than the timestamp of the previous L2 block"); + require( + _l2BlockTimestamp > currentL2BlockTimestamp, + "The timestamp of the new L2 block must be greater than the timestamp of the previous L2 block" + ); // Since the new block is created, we'll clear out the rolling hash _setNewL2BlockData(_l2BlockNumber, _l2BlockTimestamp, _expectedPrevL2BlockHash); @@ -373,9 +368,15 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { _setVirtualBlock(_l2BlockNumber, _maxVirtualBlocksToCreate, _l2BlockTimestamp); } + /// @notice Appends the transaction hash to the rolling hash of the current L2 block. + /// @param _txHash The hash of the transaction. + function appendTransactionToCurrentL2Block(bytes32 _txHash) external onlyCallFromBootloader { + currentL2BlockTxsRollingHash = keccak256(abi.encode(currentL2BlockTxsRollingHash, _txHash)); + } + /// @notice Publishes L2->L1 logs needed to verify the validity of this batch on L1. /// @dev Should be called at the end of the current batch. - function publishBatchDataToL1() external onlyBootloader { + function publishTimestampDataToL1() external onlyCallFromBootloader { (uint128 currentBatchNumber, uint128 currentBatchTimestamp) = getBatchNumberAndTimestamp(); (, uint128 currentL2BlockTimestamp) = getL2BlockNumberAndTimestamp(); @@ -386,24 +387,21 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { // In order to spend less pubdata, the packed version is published uint256 packedTimestamps = (uint256(currentBatchTimestamp) << 128) | currentL2BlockTimestamp; - SystemContractHelper.toL1(false, bytes32(packedTimestamps), prevBatchHash); - } - - /// @notice Appends the transaction hash to the rolling hash of the current L2 block. - /// @param _txHash The hash of the transaction. - function appendTransactionToCurrentL2Block( - bytes32 _txHash - ) external onlyBootloader { - currentL2BlockTxsRollingHash = keccak256(abi.encode(currentL2BlockTxsRollingHash, _txHash)); + SystemContractHelper.toL1( + false, + bytes32(uint256(SystemLogKey.PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY)), + bytes32(packedTimestamps) + ); } /// @notice Ensures that the timestamp of the batch is greater than the timestamp of the last L2 block. /// @param _newTimestamp The timestamp of the new batch. - function _ensureBatchConsistentWithL2Block( - uint128 _newTimestamp - ) internal view { + function _ensureBatchConsistentWithL2Block(uint128 _newTimestamp) internal view { uint128 currentBlockTimestamp = currentL2BlockInfo.timestamp; - require(_newTimestamp > currentBlockTimestamp, "The timestamp of the batch must be greater than the timestamp of the previous block"); + require( + _newTimestamp > currentBlockTimestamp, + "The timestamp of the batch must be greater than the timestamp of the previous block" + ); } /// @notice Increments the current batch number and sets the new timestamp @@ -420,57 +418,66 @@ contract SystemContext is ISystemContext, ISystemContextDeprecated { uint128 _newTimestamp, uint128 _expectedNewNumber, uint256 _baseFee - ) external onlyBootloader { + ) external onlyCallFromBootloader { (uint128 previousBatchNumber, uint128 previousBatchTimestamp) = getBatchNumberAndTimestamp(); require(_newTimestamp > previousBatchTimestamp, "Timestamps should be incremental"); require(previousBatchNumber + 1 == _expectedNewNumber, "The provided block number is not correct"); - + _ensureBatchConsistentWithL2Block(_newTimestamp); batchHash[previousBatchNumber] = _prevBatchHash; // Setting new block number and timestamp - BlockInfo memory newBlockInfo = BlockInfo({ - number: previousBatchNumber + 1, - timestamp: _newTimestamp - }); + BlockInfo memory newBlockInfo = BlockInfo({number: previousBatchNumber + 1, timestamp: _newTimestamp}); currentBatchInfo = newBlockInfo; baseFee = _baseFee; + + // The correctness of this block hash: + SystemContractHelper.toL1(false, bytes32(uint256(SystemLogKey.PREV_BATCH_HASH_KEY)), _prevBatchHash); } /// @notice A testing method that manually sets the current blocks' number and timestamp. /// @dev Should be used only for testing / ethCalls and should never be used in production. - function unsafeOverrideBatch(uint256 _newTimestamp, uint256 _number, uint256 _baseFee) external onlyBootloader { - BlockInfo memory newBlockInfo = BlockInfo({ - number: uint128(_number), - timestamp: uint128(_newTimestamp) - }); + function unsafeOverrideBatch( + uint256 _newTimestamp, + uint256 _number, + uint256 _baseFee + ) external onlyCallFromBootloader { + BlockInfo memory newBlockInfo = BlockInfo({number: uint128(_number), timestamp: uint128(_newTimestamp)}); currentBatchInfo = newBlockInfo; baseFee = _baseFee; } + function incrementTxNumberInBatch() external onlyCallFromBootloader { + txNumberInBlock += 1; + } + + function resetTxNumberInBatch() external onlyCallFromBootloader { + txNumberInBlock = 0; + } + /*////////////////////////////////////////////////////////////// DEPRECATED METHODS //////////////////////////////////////////////////////////////*/ /// @notice Returns the current batch's number and timestamp. - /// @dev Deprecated in favor of getBatchNumberAndTimestamp. + /// @dev Deprecated in favor of getBatchNumberAndTimestamp. function currentBlockInfo() external view returns (uint256 blockInfo) { (uint128 blockNumber, uint128 blockTimestamp) = getBatchNumberAndTimestamp(); - blockInfo = uint256(blockNumber) << 128 | uint256(blockTimestamp); + blockInfo = (uint256(blockNumber) << 128) | uint256(blockTimestamp); } - + /// @notice Returns the current batch's number and timestamp. - /// @dev Deprecated in favor of getBatchNumberAndTimestamp. + /// @dev Deprecated in favor of getBatchNumberAndTimestamp. function getBlockNumberAndTimestamp() external view returns (uint256 blockNumber, uint256 blockTimestamp) { (blockNumber, blockTimestamp) = getBatchNumberAndTimestamp(); } /// @notice Returns the hash of the given batch. - /// @dev Deprecated in favor of getBatchHash. + /// @dev Deprecated in favor of getBatchHash. function blockHash(uint256 _blockNumber) external view returns (bytes32 hash) { hash = batchHash[_blockNumber]; } diff --git a/contracts/interfaces/IBytecodeCompressor.sol b/contracts/interfaces/IBytecodeCompressor.sol deleted file mode 100644 index 1958f888..00000000 --- a/contracts/interfaces/IBytecodeCompressor.sol +++ /dev/null @@ -1,10 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -interface IBytecodeCompressor { - function publishCompressedBytecode( - bytes calldata _bytecode, - bytes calldata _rawCompressedData - ) external payable returns (bytes32 bytecodeHash); -} diff --git a/contracts/interfaces/IComplexUpgrader.sol b/contracts/interfaces/IComplexUpgrader.sol index f535356a..91095cfc 100644 --- a/contracts/interfaces/IComplexUpgrader.sol +++ b/contracts/interfaces/IComplexUpgrader.sol @@ -3,8 +3,5 @@ pragma solidity ^0.8.0; interface IComplexUpgrader { - function upgrade( - address _delegateTo, - bytes calldata _calldata - ) external payable; + function upgrade(address _delegateTo, bytes calldata _calldata) external payable; } diff --git a/contracts/interfaces/ICompressor.sol b/contracts/interfaces/ICompressor.sol new file mode 100644 index 00000000..602cb70b --- /dev/null +++ b/contracts/interfaces/ICompressor.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +// The bitmask by applying which to the compressed state diff metadata we retrieve its operation. +uint8 constant OPERATION_BITMASK = 7; +// The number of bits shifting the compressed state diff metadata by which we retrieve its length. +uint8 constant LENGTH_BITS_OFFSET = 3; +// The maximal length in bytes that an enumeration index can have. +uint8 constant MAX_ENUMERATION_INDEX_SIZE = 8; + +interface ICompressor { + function publishCompressedBytecode( + bytes calldata _bytecode, + bytes calldata _rawCompressedData + ) external payable returns (bytes32 bytecodeHash); + + function verifyCompressedStateDiffs( + uint256 _numberOfStateDiffs, + uint256 _enumerationIndexSize, + bytes calldata _stateDiffs, + bytes calldata _compressedStateDiffs + ) external payable returns (bytes32 stateDiffHash); +} diff --git a/contracts/interfaces/IKnownCodesStorage.sol b/contracts/interfaces/IKnownCodesStorage.sol index c56327ad..075ad95f 100644 --- a/contracts/interfaces/IKnownCodesStorage.sol +++ b/contracts/interfaces/IKnownCodesStorage.sol @@ -7,11 +7,7 @@ interface IKnownCodesStorage { function markFactoryDeps(bool _shouldSendToL1, bytes32[] calldata _hashes) external; - function markBytecodeAsPublished( - bytes32 _bytecodeHash, - bytes32 _l1PreimageHash, - uint256 _l1PreimageBytesLen - ) external; + function markBytecodeAsPublished(bytes32 _bytecodeHash) external; function getMarker(bytes32 _hash) external view returns (uint256); } diff --git a/contracts/interfaces/IL1Messenger.sol b/contracts/interfaces/IL1Messenger.sol index fbf57e53..05919edb 100644 --- a/contracts/interfaces/IL1Messenger.sol +++ b/contracts/interfaces/IL1Messenger.sol @@ -2,10 +2,49 @@ pragma solidity ^0.8.0; +/// @dev The log passed from L2 +/// @param l2ShardId The shard identifier, 0 - rollup, 1 - porter. All other values are not used but are reserved for the future +/// @param isService A boolean flag that is part of the log along with `key`, `value`, and `sender` address. +/// This field is required formally but does not have any special meaning. +/// @param txNumberInBlock The L2 transaction number in a block, in which the log was sent +/// @param sender The L2 address which sent the log +/// @param key The 32 bytes of information that was sent in the log +/// @param value The 32 bytes of information that was sent in the log +// Both `key` and `value` are arbitrary 32-bytes selected by the log sender +struct L2ToL1Log { + uint8 l2ShardId; + bool isService; + uint16 txNumberInBlock; + address sender; + bytes32 key; + bytes32 value; +} + +/// @dev Bytes in raw L2 to L1 log +/// @dev Equal to the bytes size of the tuple - (uint8 ShardId, bool isService, uint16 txNumberInBlock, address sender, bytes32 key, bytes32 value) +uint256 constant L2_TO_L1_LOG_SERIALIZE_SIZE = 88; + +/// @dev The value of default leaf hash for L2 to L1 logs Merkle tree +/// @dev An incomplete fixed-size tree is filled with this value to be a full binary tree +/// @dev Actually equal to the `keccak256(new bytes(L2_TO_L1_LOG_SERIALIZE_SIZE))` +bytes32 constant L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH = 0x72abee45b59e344af8a6e520241c4744aff26ed411f4c4b00f8af09adada43ba; + +/// @dev The current version of state diff compression being used. +uint256 constant STATE_DIFF_COMPRESSION_VERSION_NUMBER = 1; + interface IL1Messenger { // Possibly in the future we will be able to track the messages sent to L1 with // some hooks in the VM. For now, it is much easier to track them with L2 events. event L1MessageSent(address indexed _sender, bytes32 indexed _hash, bytes _message); + event L2ToL1LogSent(L2ToL1Log _l2log); + + event BytecodeL1PublicationRequested(bytes32 _bytecodeHash); + function sendToL1(bytes memory _message) external returns (bytes32); + + function sendL2ToL1Log(bool _isService, bytes32 _key, bytes32 _value) external returns (uint256 logIdInMerkleTree); + + // This function is expected to be called only by the KnownCodesStorage system contract + function requestBytecodeL1Publication(bytes32 _bytecodeHash) external; } diff --git a/contracts/interfaces/IMailbox.sol b/contracts/interfaces/IMailbox.sol index f362f639..b82305fc 100644 --- a/contracts/interfaces/IMailbox.sol +++ b/contracts/interfaces/IMailbox.sol @@ -4,7 +4,7 @@ pragma solidity ^0.8.0; interface IMailbox { function finalizeEthWithdrawal( - uint256 _l2BlockNumber, + uint256 _l2BatchNumber, uint256 _l2MessageIndex, uint16 _l2TxNumberInBlock, bytes calldata _message, diff --git a/contracts/interfaces/ISystemContext.sol b/contracts/interfaces/ISystemContext.sol index 6bc4476b..096243f6 100644 --- a/contracts/interfaces/ISystemContext.sol +++ b/contracts/interfaces/ISystemContext.sol @@ -16,11 +16,10 @@ interface ISystemContext { /// @notice A structure representing the timeline for the upgrade from the batch numbers to the L2 block numbers. /// @dev It will used for the L1 batch -> L2 block migration in Q3 2023 only. struct VirtualBlockUpgradeInfo { - /// @notice In order to maintain consistent results for `blockhash` requests, we'll + /// @notice In order to maintain consistent results for `blockhash` requests, we'll /// have to remember the number of the batch when the upgrade to the virtual blocks has been done. /// The hashes for virtual blocks before the upgrade are identical to the hashes of the corresponding batches. uint128 virtualBlockStartBatch; - /// @notice L2 block when the virtual blocks have caught up with the L2 blocks. Starting from this block, /// all the information returned to users for block.timestamp/number, etc should be the information about the L2 blocks and /// not virtual blocks. @@ -41,6 +40,8 @@ interface ISystemContext { function baseFee() external view returns (uint256); + function txNumberInBlock() external view returns (uint16); + function getBlockHashEVM(uint256 _block) external view returns (bytes32); function getBatchHash(uint256 _batchNumber) external view returns (bytes32 hash); diff --git a/contracts/interfaces/ISystemContextDeprecated.sol b/contracts/interfaces/ISystemContextDeprecated.sol index 40ead86d..6a647c7e 100644 --- a/contracts/interfaces/ISystemContextDeprecated.sol +++ b/contracts/interfaces/ISystemContextDeprecated.sol @@ -7,7 +7,7 @@ pragma solidity ^0.8.0; * @notice The interface with deprecated functions of the SystemContext contract. It is aimed for backward compatibility. */ interface ISystemContextDeprecated { - function currentBlockInfo() external view returns(uint256); + function currentBlockInfo() external view returns (uint256); function getBlockNumberAndTimestamp() external view returns (uint256 blockNumber, uint256 blockTimestamp); diff --git a/contracts/interfaces/ISystemContract.sol b/contracts/interfaces/ISystemContract.sol index 66c8565f..7a66587a 100644 --- a/contracts/interfaces/ISystemContract.sol +++ b/contracts/interfaces/ISystemContract.sol @@ -3,17 +3,44 @@ pragma solidity ^0.8.0; import {SystemContractHelper} from "../libraries/SystemContractHelper.sol"; +import {BOOTLOADER_FORMAL_ADDRESS} from "../Constants.sol"; /// @dev Solidity does not allow exporting modifiers via libraries, so /// the only way to do reuse modifiers is to have a base contract +/// @dev Never add storage variables into this contract as some +/// system contracts rely on this abstract contract as on interface! abstract contract ISystemContract { /// @notice Modifier that makes sure that the method /// can only be called via a system call. modifier onlySystemCall() { require( SystemContractHelper.isSystemCall() || SystemContractHelper.isSystemContract(msg.sender), - "This method requires the system call flag" + "This method require system call flag" ); _; } + + /// @notice Modifier that makes sure that the method + /// can only be called from a system contract. + modifier onlyCallFromSystemContract() { + require( + SystemContractHelper.isSystemContract(msg.sender), + "This method require the caller to be system contract" + ); + _; + } + + /// @notice Modifier that makes sure that the method + /// can only be called from a special given address. + modifier onlyCallFrom(address caller) { + require(msg.sender == caller, "Inappropriate caller"); + _; + } + + /// @notice Modifier that makes sure that the method + /// can only be called from the bootloader. + modifier onlyCallFromBootloader() { + require(msg.sender == BOOTLOADER_FORMAL_ADDRESS, "Callable only by the bootloader"); + _; + } } diff --git a/contracts/libraries/EfficientCall.sol b/contracts/libraries/EfficientCall.sol index 2983a518..16a6b535 100644 --- a/contracts/libraries/EfficientCall.sol +++ b/contracts/libraries/EfficientCall.sol @@ -4,10 +4,11 @@ pragma solidity ^0.8.0; import "./SystemContractHelper.sol"; import "./Utils.sol"; -import {SHA256_SYSTEM_CONTRACT, KECCAK256_SYSTEM_CONTRACT} from "../Constants.sol"; +import {SHA256_SYSTEM_CONTRACT, KECCAK256_SYSTEM_CONTRACT, MSG_VALUE_SYSTEM_CONTRACT} from "../Constants.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice This library is used to perform ultra-efficient calls using zkEVM-specific features. * @dev EVM calls always accept a memory slice as input and return a memory slice as output. * Therefore, even if the user has a ready-made calldata slice, they still need to copy it to memory @@ -249,24 +250,17 @@ library EfficientCall { ) private view { SystemContractHelper.loadCalldataIntoActivePtr(); - // Currently, zkEVM considers the pointer valid if(ptr.offset < ptr.length || (ptr.length == 0 && ptr.offset == 0)), otherwise panics. - // So, if the data is empty we need to make the `ptr.length = ptr.offset = 0`, otherwise follow standard logic. - if (_data.length == 0) { - // Safe to cast, offset is never bigger than `type(uint32).max` - SystemContractHelper.ptrShrinkIntoActive(uint32(msg.data.length)); - } else { - uint256 dataOffset; - assembly { - dataOffset := _data.offset - } - - // Safe to cast, offset is never bigger than `type(uint32).max` - SystemContractHelper.ptrAddIntoActive(uint32(dataOffset)); - // Safe to cast, `data.length` is never bigger than `type(uint32).max` - uint32 shrinkTo = uint32(msg.data.length - (_data.length + dataOffset)); - SystemContractHelper.ptrShrinkIntoActive(shrinkTo); + uint256 dataOffset; + assembly { + dataOffset := _data.offset } + // Safe to cast, offset is never bigger than `type(uint32).max` + SystemContractHelper.ptrAddIntoActive(uint32(dataOffset)); + // Safe to cast, `data.length` is never bigger than `type(uint32).max` + uint32 shrinkTo = uint32(msg.data.length - (_data.length + dataOffset)); + SystemContractHelper.ptrShrinkIntoActive(shrinkTo); + uint32 gas = Utils.safeCastToU32(_gas); uint256 farCallAbi = SystemContractsCaller.getFarCallABIWithEmptyFatPointer( gas, diff --git a/contracts/libraries/RLPEncoder.sol b/contracts/libraries/RLPEncoder.sol index ddef39ab..50da4624 100644 --- a/contracts/libraries/RLPEncoder.sol +++ b/contracts/libraries/RLPEncoder.sol @@ -2,6 +2,11 @@ pragma solidity ^0.8.0; +/** + * @author Matter Labs + * @custom:security-contact security@matterlabs.dev + * @notice This library provides RLP encoding functionality. +*/ library RLPEncoder { function encodeAddress(address _val) internal pure returns (bytes memory encoded) { // The size is equal to 20 bytes of the address itself + 1 for encoding bytes length in RLP. diff --git a/contracts/libraries/SystemContractHelper.sol b/contracts/libraries/SystemContractHelper.sol index 68a8e37c..2878e423 100644 --- a/contracts/libraries/SystemContractHelper.sol +++ b/contracts/libraries/SystemContractHelper.sol @@ -1,10 +1,37 @@ // SPDX-License-Identifier: MIT -pragma solidity ^0.8; +pragma solidity ^0.8.0; import {MAX_SYSTEM_CONTRACT_ADDRESS} from "../Constants.sol"; -import "./SystemContractsCaller.sol"; +import { + SystemContractsCaller, + CalldataForwardingMode, + CALLFLAGS_CALL_ADDRESS, + CODE_ADDRESS_CALL_ADDRESS, + EVENT_WRITE_ADDRESS, + EVENT_INITIALIZE_ADDRESS, + GET_EXTRA_ABI_DATA_ADDRESS, + LOAD_CALLDATA_INTO_ACTIVE_PTR_CALL_ADDRESS, + META_CODE_SHARD_ID_OFFSET, + META_CALLER_SHARD_ID_OFFSET, + META_SHARD_ID_OFFSET, + META_AUX_HEAP_SIZE_OFFSET, + META_HEAP_SIZE_OFFSET, + META_GAS_PER_PUBDATA_BYTE_OFFSET, + MIMIC_CALL_BY_REF_CALL_ADDRESS, + META_CALL_ADDRESS, + MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT, + PTR_CALLDATA_CALL_ADDRESS, + PTR_ADD_INTO_ACTIVE_CALL_ADDRESS, + PTR_SHRINK_INTO_ACTIVE_CALL_ADDRESS, + PTR_PACK_INTO_ACTIVE_CALL_ADDRESS, + RAW_FAR_CALL_BY_REF_CALL_ADDRESS, + PRECOMPILE_CALL_ADDRESS, + SET_CONTEXT_VALUE_CALL_ADDRESS, + SYSTEM_CALL_BY_REF_CALL_ADDRESS, + TO_L1_CALL_ADDRESS +} from "./SystemContractsCaller.sol"; uint256 constant UINT32_MASK = 0xffffffff; uint256 constant UINT128_MASK = 0xffffffffffffffffffffffffffffffff; @@ -31,6 +58,7 @@ enum Global { /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Library used for accessing zkEVM-specific opcodes, needed for the development * of system contracts. * @dev While this library will be eventually available to public, some of the provided @@ -143,12 +171,10 @@ library SystemContractHelper { /// NOTE: The precompile type depends on `this` which calls precompile, which means that only /// system contracts corresponding to the list of precompiles above can do `precompileCall`. /// @dev If used not in the `sha256`, `keccak256` or `ecrecover` contracts, it will just burn the gas provided. - function precompileCall(uint256 _rawParams, uint32 _gasToBurn) internal view returns (bool success) { + /// @dev This method is `unsafe` because it does not check whether there is enough gas to burn. + function unsafePrecompileCall(uint256 _rawParams, uint32 _gasToBurn) internal view returns (bool success) { address callAddr = PRECOMPILE_CALL_ADDRESS; - // After `precompileCall` gas will be burned down to 0 if there are not enough of them, - // thats why it should be checked before the call. - require(gasleft() >= _gasToBurn); uint256 cleanupMask = UINT32_MASK; assembly { // Clearing input params as they are not cleaned by Solidity by default @@ -328,4 +354,14 @@ library SystemContractHelper { function isSystemContract(address _address) internal pure returns (bool) { return uint160(_address) <= uint160(MAX_SYSTEM_CONTRACT_ADDRESS); } + + /// @notice Method used for burning a certain amount of gas. + /// @param _gasToPay The number of gas to burn. + function burnGas(uint32 _gasToPay) internal view { + bool precompileCallSuccess = unsafePrecompileCall( + 0, // The precompile parameters are formal ones. We only need the precompile call to burn gas. + _gasToPay + ); + require(precompileCallSuccess, "Failed to charge gas"); + } } diff --git a/contracts/libraries/SystemContractsCaller.sol b/contracts/libraries/SystemContractsCaller.sol index 594b5b3f..fe35341b 100644 --- a/contracts/libraries/SystemContractsCaller.sol +++ b/contracts/libraries/SystemContractsCaller.sol @@ -61,6 +61,7 @@ enum CalldataForwardingMode { /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice A library that allows calling contracts with the `isSystem` flag. * @dev It is needed to call ContractDeployer and NonceHolder. */ diff --git a/contracts/libraries/TransactionHelper.sol b/contracts/libraries/TransactionHelper.sol index 5d5cc6cf..10065f56 100644 --- a/contracts/libraries/TransactionHelper.sol +++ b/contracts/libraries/TransactionHelper.sol @@ -72,6 +72,7 @@ struct Transaction { /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice Library is used to help custom accounts to work with common methods for the Transaction type. */ library TransactionHelper { diff --git a/contracts/libraries/UnsafeBytesCalldata.sol b/contracts/libraries/UnsafeBytesCalldata.sol index ff265fbd..7beca859 100644 --- a/contracts/libraries/UnsafeBytesCalldata.sol +++ b/contracts/libraries/UnsafeBytesCalldata.sol @@ -4,6 +4,7 @@ pragma solidity ^0.8.0; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @dev The library provides a set of functions that help read data from calldata bytes. * @dev Each of the functions accepts the `bytes calldata` and the offset where data should be read and returns a value of a certain type. * @@ -22,10 +23,29 @@ library UnsafeBytesCalldata { } } + function readUint32(bytes calldata _bytes, uint256 _start) internal pure returns (uint32 result) { + assembly { + let offset := sub(_bytes.offset, 28) + result := calldataload(add(offset, _start)) + } + } + function readUint64(bytes calldata _bytes, uint256 _start) internal pure returns (uint64 result) { assembly { let offset := sub(_bytes.offset, 24) result := calldataload(add(offset, _start)) } } + + function readBytes32(bytes calldata _bytes, uint256 _start) internal pure returns (bytes32 result) { + assembly { + result := calldataload(add(_bytes.offset, _start)) + } + } + + function readUint256(bytes calldata _bytes, uint256 _start) internal pure returns (uint256 result) { + assembly { + result := calldataload(add(_bytes.offset, _start)) + } + } } diff --git a/contracts/libraries/Utils.sol b/contracts/libraries/Utils.sol index d1f219de..8e66e35f 100644 --- a/contracts/libraries/Utils.sol +++ b/contracts/libraries/Utils.sol @@ -5,6 +5,7 @@ import "./EfficientCall.sol"; /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @dev Common utilities used in zkSync system contracts */ library Utils { diff --git a/contracts/precompiles/EcAdd.yul b/contracts/precompiles/EcAdd.yul new file mode 100644 index 00000000..c5581457 --- /dev/null +++ b/contracts/precompiles/EcAdd.yul @@ -0,0 +1,441 @@ +object "EcAdd" { + code { + return(0, 0) + } + object "EcAdd_deployed" { + code { + //////////////////////////////////////////////////////////////// + // CONSTANTS + //////////////////////////////////////////////////////////////// + + /// @notice Constant function for value three in Montgomery form. + /// @dev This value was precomputed using Python. + /// @return m_three The value three in Montgomery form. + function MONTGOMERY_THREE() -> m_three { + m_three := 19052624634359457937016868847204597229365286637454337178037183604060995791063 + } + + /// @notice Constant function for the alt_bn128 field order. + /// @dev See https://eips.ethereum.org/EIPS/eip-196 for further details. + /// @return ret The alt_bn128 field order. + function P() -> ret { + ret := 21888242871839275222246405745257275088696311157297823662689037894645226208583 + } + + /// @notice Constant function for the pre-computation of R^2 % N for the Montgomery REDC algorithm. + /// @dev R^2 is the Montgomery residue of the value 2^512. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further detals. + /// @dev This value was precomputed using Python. + /// @return ret The value R^2 modulus the curve field order. + function R2_MOD_P() -> ret { + ret := 3096616502983703923843567936837374451735540968419076528771170197431451843209 + } + + /// @notice Constant function for the pre-computation of N' for the Montgomery REDC algorithm. + /// @dev N' is a value such that NN' = -1 mod R, with N being the curve field order. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further detals. + /// @dev This value was precomputed using Python. + /// @return ret The value N'. + function N_PRIME() -> ret { + ret := 111032442853175714102588374283752698368366046808579839647964533820976443843465 + } + + ////////////////////////////////////////////////////////////////// + // HELPER FUNCTIONS + ////////////////////////////////////////////////////////////////// + + /// @dev Executes the `precompileCall` opcode. + function precompileCall(precompileParams, gasToBurn) -> ret { + // Compiler simulation for calling `precompileCall` opcode + ret := verbatim_2i_1o("precompile", precompileParams, gasToBurn) + } + + /// @notice Burns remaining gas until revert. + /// @dev This function is used to burn gas in the case of a failed precompile call. + function burnGas() { + // Precompiles that do not have a circuit counterpart + // will burn the provided gas by calling this function. + precompileCall(0, gas()) + } + + /// @notice Retrieves the highest half of the multiplication result. + /// @param multiplicand The value to multiply. + /// @param multiplier The multiplier. + /// @return ret The highest half of the multiplication result. + function getHighestHalfOfMultiplication(multiplicand, multiplier) -> ret { + ret := verbatim_2i_1o("mul_high", multiplicand, multiplier) + } + + /// @notice Computes the modular subtraction of two values. + /// @param minuend The value to subtract from. + /// @param subtrahend The value to subtract. + /// @param modulus The modulus. + /// @return difference The modular subtraction of the two values. + function submod(minuend, subtrahend, modulus) -> difference { + difference := addmod(minuend, sub(modulus, subtrahend), modulus) + } + + /// @notice Computes an addition and checks for overflow. + /// @param augend The value to add to. + /// @param addend The value to add. + /// @return sum The sum of the two values. + /// @return overflowed True if the addition overflowed, false otherwise. + function overflowingAdd(augend, addend) -> sum, overflowed { + sum := add(augend, addend) + overflowed := lt(sum, augend) + } + + // @notice Checks if a point is on the curve. + // @dev The curve in question is the alt_bn128 curve. + // @dev The Short Weierstrass equation of the curve is y^2 = x^3 + 3. + // @param x The x coordinate of the point in Montgomery form. + // @param y The y coordinate of the point in Montgomery form. + // @return ret True if the point is on the curve, false otherwise. + function pointIsInCurve(x, y) -> ret { + let ySquared := montgomeryMul(y, y) + let xSquared := montgomeryMul(x, x) + let xQubed := montgomeryMul(xSquared, x) + let xQubedPlusThree := montgomeryAdd(xQubed, MONTGOMERY_THREE()) + + ret := eq(ySquared, xQubedPlusThree) + } + + /// @notice Checks if a point is the point at infinity. + /// @dev The point at infinity is defined as the point (0, 0). + /// @dev See https://eips.ethereum.org/EIPS/eip-196 for further details. + /// @param x The x coordinate of the point. + /// @param y The y coordinate of the point. + /// @return ret True if the point is the point at infinity, false otherwise. + function isInfinity(x, y) -> ret { + ret := iszero(or(x, y)) + } + + /// @notice Checks if a coordinate is on the curve field order. + /// @dev A coordinate is on the curve field order if it is on the range [0, curveFieldOrder). + /// @dev This check is required in the precompile specification. See https://eips.ethereum.org/EIPS/eip-196 for further details. + /// @param coordinate The coordinate to check. + /// @return ret True if the coordinate is in the range, false otherwise. + function isOnFieldOrder(coordinate) -> ret { + ret := lt(coordinate, P()) + } + + /// @notice Computes the inverse in Montgomery Form of a number in Montgomery Form. + /// @dev Reference: https://github.com/lambdaclass/lambdaworks/blob/main/math/src/field/fields/montgomery_backed_prime_fields.rs#L169 + /// @dev Let `base` be a number in Montgomery Form, then base = a*R mod P() being `a` the base number (not in Montgomery Form) + /// @dev Let `inv` be the inverse of a number `a` in Montgomery Form, then inv = a^(-1)*R mod P() + /// @dev The original binary extended euclidean algorithms takes a number a and returns a^(-1) mod N + /// @dev In our case N is P(), and we'd like the input and output to be in Montgomery Form (a*R mod P() + /// @dev and a^(-1)*R mod P() respectively). + /// @dev If we just pass the input as a number in Montgomery Form the result would be a^(-1)*R^(-1) mod P(), + /// @dev but we want it to be a^(-1)*R mod P(). + /// @dev For that, we take advantage of the algorithm's linearity and multiply the result by R^2 mod P() + /// @dev to get R^2*a^(-1)*R^(-1) mod P() = a^(-1)*R mod P() as the desired result in Montgomery Form. + /// @dev `inv` takes the value of `b` or `c` being the result sometimes `b` and sometimes `c`. In paper + /// @dev multiplying `b` or `c` by R^2 mod P() results on starting their values as b = R2_MOD_P() and c = 0. + /// @param base A number `a` in Montgomery Form, then base = a*R mod P(). + /// @return inv The inverse of a number `a` in Montgomery Form, then inv = a^(-1)*R mod P(). + function binaryExtendedEuclideanAlgorithm(base) -> inv { + let modulus := P() + let u := base + let v := modulus + // Avoids unnecessary reduction step. + let b := R2_MOD_P() + let c := 0 + + for {} and(iszero(eq(u, 1)), iszero(eq(v, 1))) {} { + for {} iszero(and(u, 1)) {} { + u := shr(1, u) + let current := b + switch and(current, 1) + case 0 { + b := shr(1, b) + } + case 1 { + b := shr(1, add(b, modulus)) + } + } + + for {} iszero(and(v, 1)) {} { + v := shr(1, v) + let current := c + switch and(current, 1) + case 0 { + c := shr(1, c) + } + case 1 { + c := shr(1, add(c, modulus)) + } + } + + switch gt(v, u) + case 0 { + u := sub(u, v) + if lt(b, c) { + b := add(b, modulus) + } + b := sub(b, c) + } + case 1 { + v := sub(v, u) + if lt(c, b) { + c := add(c, modulus) + } + c := sub(c, b) + } + } + + switch eq(u, 1) + case 0 { + inv := c + } + case 1 { + inv := b + } + } + + /// @notice Implementation of the Montgomery reduction algorithm (a.k.a. REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm + /// @param lowestHalfOfT The lowest half of the value T. + /// @param higherHalfOfT The higher half of the value T. + /// @return S The result of the Montgomery reduction. + function REDC(lowestHalfOfT, higherHalfOfT) -> S { + let m := mul(lowestHalfOfT, N_PRIME()) + let hi := add(higherHalfOfT, getHighestHalfOfMultiplication(m, P())) + let lo, overflowed := overflowingAdd(lowestHalfOfT, mul(m, P())) + if overflowed { + hi := add(hi, 1) + } + S := hi + if iszero(lt(hi, P())) { + S := sub(hi, P()) + } + } + + /// @notice Encodes a field element into the Montgomery form using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further details on transforming a field element into the Montgomery form. + /// @param a The field element to encode. + /// @return ret The field element in Montgomery form. + function intoMontgomeryForm(a) -> ret { + let hi := getHighestHalfOfMultiplication(a, R2_MOD_P()) + let lo := mul(a, R2_MOD_P()) + ret := REDC(lo, hi) + } + + /// @notice Decodes a field element out of the Montgomery form using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further details on transforming a field element out of the Montgomery form. + /// @param m The field element in Montgomery form to decode. + /// @return ret The decoded field element. + function outOfMontgomeryForm(m) -> ret { + let hi := 0 + let lo := m + ret := REDC(lo, hi) + } + + /// @notice Computes the Montgomery addition. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param augend The augend in Montgomery form. + /// @param addend The addend in Montgomery form. + /// @return ret The result of the Montgomery addition. + function montgomeryAdd(augend, addend) -> ret { + ret := add(augend, addend) + if iszero(lt(ret, P())) { + ret := sub(ret, P()) + } + } + + /// @notice Computes the Montgomery subtraction. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param minuend The minuend in Montgomery form. + /// @param subtrahend The subtrahend in Montgomery form. + /// @return ret The result of the Montgomery addition. + function montgomerySub(minuend, subtrahend) -> ret { + ret := montgomeryAdd(minuend, sub(P(), subtrahend)) + } + + /// @notice Computes the Montgomery multiplication using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param multiplicand The multiplicand in Montgomery form. + /// @param multiplier The multiplier in Montgomery form. + /// @return ret The result of the Montgomery multiplication. + function montgomeryMul(multiplicand, multiplier) -> ret { + let higherHalfOfProduct := getHighestHalfOfMultiplication(multiplicand, multiplier) + let lowestHalfOfProduct := mul(multiplicand, multiplier) + ret := REDC(lowestHalfOfProduct, higherHalfOfProduct) + } + + /// @notice Computes the Montgomery modular inverse skipping the Montgomery reduction step. + /// @dev The Montgomery reduction step is skept because a modification in the binary extended Euclidean algorithm is used to compute the modular inverse. + /// @dev See the function `binaryExtendedEuclideanAlgorithm` for further details. + /// @param a The field element in Montgomery form to compute the modular inverse of. + /// @return invmod The result of the Montgomery modular inverse (in Montgomery form). + function montgomeryModularInverse(a) -> invmod { + invmod := binaryExtendedEuclideanAlgorithm(a) + } + + /// @notice Computes the Montgomery division. + /// @dev The Montgomery division is computed by multiplying the dividend with the modular inverse of the divisor. + /// @param dividend The dividend in Montgomery form. + /// @param divisor The divisor in Montgomery form. + /// @return quotient The result of the Montgomery division. + function montgomeryDiv(dividend, divisor) -> quotient { + quotient := montgomeryMul(dividend, montgomeryModularInverse(divisor)) + } + + //////////////////////////////////////////////////////////////// + // FALLBACK + //////////////////////////////////////////////////////////////// + + // Retrieve the coordinates from the calldata + let x1 := calldataload(0) + let y1 := calldataload(32) + let x2 := calldataload(64) + let y2 := calldataload(96) + + let p1IsInfinity := isInfinity(x1, y1) + let p2IsInfinity := isInfinity(x2, y2) + + if and(p1IsInfinity, p2IsInfinity) { + // Infinity + Infinity = Infinity + mstore(0, 0) + mstore(32, 0) + return(0, 64) + } + if and(p1IsInfinity, iszero(p2IsInfinity)) { + // Infinity + P = P + + // Ensure that the coordinates are between 0 and the field order. + if or(iszero(isOnFieldOrder(x2)), iszero(isOnFieldOrder(y2))) { + burnGas() + } + + let m_x2 := intoMontgomeryForm(x2) + let m_y2 := intoMontgomeryForm(y2) + + // Ensure that the point is in the curve (Y^2 = X^3 + 3). + if iszero(pointIsInCurve(m_x2, m_y2)) { + burnGas() + } + + // We just need to go into the Montgomery form to perform the + // computations in pointIsInCurve, but we do not need to come back. + + mstore(0, x2) + mstore(32, y2) + return(0, 64) + } + if and(iszero(p1IsInfinity), p2IsInfinity) { + // P + Infinity = P + + // Ensure that the coordinates are between 0 and the field order. + if or(iszero(isOnFieldOrder(x1)), iszero(isOnFieldOrder(y1))) { + burnGas() + } + + let m_x1 := intoMontgomeryForm(x1) + let m_y1 := intoMontgomeryForm(y1) + + // Ensure that the point is in the curve (Y^2 = X^3 + 3). + if iszero(pointIsInCurve(m_x1, m_y1)) { + burnGas() + } + + // We just need to go into the Montgomery form to perform the + // computations in pointIsInCurve, but we do not need to come back. + + mstore(0, x1) + mstore(32, y1) + return(0, 64) + } + + // Ensure that the coordinates are between 0 and the field order. + if or(iszero(isOnFieldOrder(x1)), iszero(isOnFieldOrder(y1))) { + burnGas() + } + + // Ensure that the coordinates are between 0 and the field order. + if or(iszero(isOnFieldOrder(x2)), iszero(isOnFieldOrder(y2))) { + burnGas() + } + + // There's no need for transforming into Montgomery form + // for this case. + if and(eq(x1, x2), eq(submod(0, y1, P()), y2)) { + // P + (-P) = Infinity + + let m_x1 := intoMontgomeryForm(x1) + let m_y1 := intoMontgomeryForm(y1) + let m_x2 := intoMontgomeryForm(x2) + let m_y2 := intoMontgomeryForm(y2) + + // Ensure that the points are in the curve (Y^2 = X^3 + 3). + if or(iszero(pointIsInCurve(m_x1, m_y1)), iszero(pointIsInCurve(m_x2, m_y2))) { + burnGas() + } + + // We just need to go into the Montgomery form to perform the + // computations in pointIsInCurve, but we do not need to come back. + + mstore(0, 0) + mstore(32, 0) + return(0, 64) + } + + if and(eq(x1, x2), and(iszero(eq(y1, y2)), iszero(eq(y1, submod(0, y2, P()))))) { + burnGas() + } + + if and(eq(x1, x2), eq(y1, y2)) { + // P + P = 2P + + let x := intoMontgomeryForm(x1) + let y := intoMontgomeryForm(y1) + + // Ensure that the points are in the curve (Y^2 = X^3 + 3). + if iszero(pointIsInCurve(x, y)) { + burnGas() + } + + // (3 * x1^2 + a) / (2 * y1) + let x1_squared := montgomeryMul(x, x) + let slope := montgomeryDiv(addmod(x1_squared, addmod(x1_squared, x1_squared, P()), P()), addmod(y, y, P())) + // x3 = slope^2 - 2 * x1 + let x3 := submod(montgomeryMul(slope, slope), addmod(x, x, P()), P()) + // y3 = slope * (x1 - x3) - y1 + let y3 := submod(montgomeryMul(slope, submod(x, x3, P())), y, P()) + + x3 := outOfMontgomeryForm(x3) + y3 := outOfMontgomeryForm(y3) + + mstore(0, x3) + mstore(32, y3) + return(0, 64) + } + + // P1 + P2 = P3 + + x1 := intoMontgomeryForm(x1) + y1 := intoMontgomeryForm(y1) + x2 := intoMontgomeryForm(x2) + y2 := intoMontgomeryForm(y2) + + // Ensure that the points are in the curve (Y^2 = X^3 + 3). + if or(iszero(pointIsInCurve(x1, y1)), iszero(pointIsInCurve(x2, y2))) { + burnGas() + } + + // (y2 - y1) / (x2 - x1) + let slope := montgomeryDiv(submod(y2, y1, P()), submod(x2, x1, P())) + // x3 = slope^2 - x1 - x2 + let x3 := submod(montgomeryMul(slope, slope), addmod(x1, x2, P()), P()) + // y3 = slope * (x1 - x3) - y1 + let y3 := submod(montgomeryMul(slope, submod(x1, x3, P())), y1, P()) + + x3 := outOfMontgomeryForm(x3) + y3 := outOfMontgomeryForm(y3) + + mstore(0, x3) + mstore(32, y3) + return(0, 64) + } + } +} diff --git a/contracts/precompiles/EcMul.yul b/contracts/precompiles/EcMul.yul new file mode 100644 index 00000000..5de5dee0 --- /dev/null +++ b/contracts/precompiles/EcMul.yul @@ -0,0 +1,495 @@ +object "EcMul" { + code { + return(0, 0) + } + object "EcMul_deployed" { + code { + //////////////////////////////////////////////////////////////// + // CONSTANTS + //////////////////////////////////////////////////////////////// + + /// @notice Constant function for value one in Montgomery form. + /// @dev This value was precomputed using Python. + /// @return m_one The value one in Montgomery form. + function MONTGOMERY_ONE() -> m_one { + m_one := 6350874878119819312338956282401532409788428879151445726012394534686998597021 + } + + /// @notice Constant function for value three in Montgomery form. + /// @dev This value was precomputed using Python. + /// @return m_three The value three in Montgomery form. + function MONTGOMERY_THREE() -> m_three { + m_three := 19052624634359457937016868847204597229365286637454337178037183604060995791063 + } + + /// @notice Constant function for value 3*b (i.e. 9) in Montgomery form. + /// @dev This value was precomputed using Python. + /// @return m_b3 The value 9 in Montgomery form. + function MONTGOMERY_B3() -> m_b3 { + m_b3 := 13381388159399823366557795051099241510703237597767364208733475022892534956023 + } + + /// @notice Constant function for the alt_bn128 field order. + /// @dev See https://eips.ethereum.org/EIPS/eip-196 for further details. + /// @return ret The alt_bn128 field order. + function P() -> ret { + ret := 21888242871839275222246405745257275088696311157297823662689037894645226208583 + } + + /// @notice Constant function for the pre-computation of R^2 % N for the Montgomery REDC algorithm. + /// @dev R^2 is the Montgomery residue of the value 2^512. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further detals. + /// @dev This value was precomputed using Python. + /// @return ret The value R^2 modulus the curve field order. + function R2_MOD_P() -> ret { + ret := 3096616502983703923843567936837374451735540968419076528771170197431451843209 + } + + /// @notice Constant function for the pre-computation of N' for the Montgomery REDC algorithm. + /// @dev N' is a value such that NN' = -1 mod R, with N being the curve field order. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm for further detals. + /// @dev This value was precomputed using Python. + /// @return ret The value N'. + function N_PRIME() -> ret { + ret := 111032442853175714102588374283752698368366046808579839647964533820976443843465 + } + + // //////////////////////////////////////////////////////////////// + // HELPER FUNCTIONS + // //////////////////////////////////////////////////////////////// + + /// @dev Executes the `precompileCall` opcode. + function precompileCall(precompileParams, gasToBurn) -> ret { + // Compiler simulation for calling `precompileCall` opcode + ret := verbatim_2i_1o("precompile", precompileParams, gasToBurn) + } + + /// @notice Burns remaining gas until revert. + /// @dev This function is used to burn gas in the case of a failed precompile call. + function burnGas() { + // Precompiles that do not have a circuit counterpart + // will burn the provided gas by calling this function. + precompileCall(0, gas()) + } + + /// @notice Retrieves the highest half of the multiplication result. + /// @param multiplicand The value to multiply. + /// @param multiplier The multiplier. + /// @return ret The highest half of the multiplication result. + function getHighestHalfOfMultiplication(multiplicand, multiplier) -> ret { + ret := verbatim_2i_1o("mul_high", multiplicand, multiplier) + } + + /// @notice Computes an addition and checks for overflow. + /// @param augend The value to add to. + /// @param addend The value to add. + /// @return sum The sum of the two values. + /// @return overflowed True if the addition overflowed, false otherwise. + function overflowingAdd(augend, addend) -> sum, overflowed { + sum := add(augend, addend) + overflowed := lt(sum, augend) + } + + /// @notice Checks if the LSB of a number is 1. + /// @param x The number to check. + /// @return ret True if the LSB is 1, false otherwise. + function lsbIsOne(x) -> ret { + ret := and(x, 1) + } + + /// @notice Computes the inverse in Montgomery Form of a number in Montgomery Form. + /// @dev Reference: https://github.com/lambdaclass/lambdaworks/blob/main/math/src/field/fields/montgomery_backed_prime_fields.rs#L169 + /// @dev Let `base` be a number in Montgomery Form, then base = a*R mod P() being `a` the base number (not in Montgomery Form) + /// @dev Let `inv` be the inverse of a number `a` in Montgomery Form, then inv = a^(-1)*R mod P() + /// @dev The original binary extended euclidean algorithms takes a number a and returns a^(-1) mod N + /// @dev In our case N is P(), and we'd like the input and output to be in Montgomery Form (a*R mod P() + /// @dev and a^(-1)*R mod P() respectively). + /// @dev If we just pass the input as a number in Montgomery Form the result would be a^(-1)*R^(-1) mod P(), + /// @dev but we want it to be a^(-1)*R mod P(). + /// @dev For that, we take advantage of the algorithm's linearity and multiply the result by R^2 mod P() + /// @dev to get R^2*a^(-1)*R^(-1) mod P() = a^(-1)*R mod P() as the desired result in Montgomery Form. + /// @dev `inv` takes the value of `b` or `c` being the result sometimes `b` and sometimes `c`. In paper + /// @dev multiplying `b` or `c` by R^2 mod P() results on starting their values as b = R2_MOD_P() and c = 0. + /// @param base A number `a` in Montgomery Form, then base = a*R mod P(). + /// @return inv The inverse of a number `a` in Montgomery Form, then inv = a^(-1)*R mod P(). + function binaryExtendedEuclideanAlgorithm(base) -> inv { + let modulus := P() + let u := base + let v := modulus + // Avoids unnecessary reduction step. + let b := R2_MOD_P() + let c := 0 + + for {} and(iszero(eq(u, 1)), iszero(eq(v, 1))) {} { + for {} iszero(and(u, 1)) {} { + u := shr(1, u) + let current := b + switch and(current, 1) + case 0 { + b := shr(1, b) + } + case 1 { + b := shr(1, add(b, modulus)) + } + } + + for {} iszero(and(v, 1)) {} { + v := shr(1, v) + let current := c + switch and(current, 1) + case 0 { + c := shr(1, c) + } + case 1 { + c := shr(1, add(c, modulus)) + } + } + + switch gt(v, u) + case 0 { + u := sub(u, v) + if lt(b, c) { + b := add(b, modulus) + } + b := sub(b, c) + } + case 1 { + v := sub(v, u) + if lt(c, b) { + c := add(c, modulus) + } + c := sub(c, b) + } + } + + switch eq(u, 1) + case 0 { + inv := c + } + case 1 { + inv := b + } + } + + /// @notice Implementation of the Montgomery reduction algorithm (a.k.a. REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm + /// @param lowestHalfOfT The lowest half of the value T. + /// @param higherHalfOfT The higher half of the value T. + /// @return S The result of the Montgomery reduction. + function REDC(lowestHalfOfT, higherHalfOfT) -> S { + let m := mul(lowestHalfOfT, N_PRIME()) + let hi := add(higherHalfOfT, getHighestHalfOfMultiplication(m, P())) + let lo, overflowed := overflowingAdd(lowestHalfOfT, mul(m, P())) + if overflowed { + hi := add(hi, 1) + } + S := hi + if iszero(lt(hi, P())) { + S := sub(hi, P()) + } + } + + /// @notice Encodes a field element into the Montgomery form using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm for further details on transforming a field element into the Montgomery form. + /// @param a The field element to encode. + /// @return ret The field element in Montgomery form. + function intoMontgomeryForm(a) -> ret { + let hi := getHighestHalfOfMultiplication(a, R2_MOD_P()) + let lo := mul(a, R2_MOD_P()) + ret := REDC(lo, hi) + } + + /// @notice Decodes a field element out of the Montgomery form using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm for further details on transforming a field element out of the Montgomery form. + /// @param m The field element in Montgomery form to decode. + /// @return ret The decoded field element. + function outOfMontgomeryForm(m) -> ret { + let hi := 0 + let lo := m + ret := REDC(lo, hi) + } + + /// @notice Computes the Montgomery addition. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param augend The augend in Montgomery form. + /// @param addend The addend in Montgomery form. + /// @return ret The result of the Montgomery addition. + function montgomeryAdd(augend, addend) -> ret { + ret := add(augend, addend) + if iszero(lt(ret, P())) { + ret := sub(ret, P()) + } + } + + /// @notice Computes the Montgomery subtraction. + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param minuend The minuend in Montgomery form. + /// @param subtrahend The subtrahend in Montgomery form. + /// @return ret The result of the Montgomery addition. + function montgomerySub(minuend, subtrahend) -> ret { + ret := montgomeryAdd(minuend, sub(P(), subtrahend)) + } + + /// @notice Computes the Montgomery multiplication using the Montgomery reduction algorithm (REDC). + /// @dev See https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm for further details on the Montgomery multiplication. + /// @param multiplicand The multiplicand in Montgomery form. + /// @param multiplier The multiplier in Montgomery form. + /// @return ret The result of the Montgomery multiplication. + function montgomeryMul(multiplicand, multiplier) -> ret { + let hi := getHighestHalfOfMultiplication(multiplicand, multiplier) + let lo := mul(multiplicand, multiplier) + ret := REDC(lo, hi) + } + + /// @notice Computes the Montgomery modular inverse skipping the Montgomery reduction step. + /// @dev The Montgomery reduction step is skept because a modification in the binary extended Euclidean algorithm is used to compute the modular inverse. + /// @dev See the function `binaryExtendedEuclideanAlgorithm` for further details. + /// @param a The field element in Montgomery form to compute the modular inverse of. + /// @return invmod The result of the Montgomery modular inverse (in Montgomery form). + function montgomeryModularInverse(a) -> invmod { + invmod := binaryExtendedEuclideanAlgorithm(a) + } + + /// @notice Checks if a coordinate is on the curve field order. + /// @dev A coordinate is on the curve field order if it is on the range [0, curveFieldOrder). + /// @param coordinate The coordinate to check. + /// @return ret True if the coordinate is in the range, false otherwise. + function coordinateIsOnFieldOrder(coordinate) -> ret { + ret := lt(coordinate, P()) + } + + /// @notice Checks if affine coordinates are on the curve field order. + /// @dev Affine coordinates are on the curve field order if both coordinates are on the range [0, curveFieldOrder). + /// @param x The x coordinate to check. + /// @param y The y coordinate to check. + /// @return ret True if the coordinates are in the range, false otherwise. + function affinePointCoordinatesAreOnFieldOrder(x, y) -> ret { + ret := and(coordinateIsOnFieldOrder(x), coordinateIsOnFieldOrder(y)) + } + + /// @notice Checks if projective coordinates are on the curve field order. + /// @dev Projective coordinates are on the curve field order if the coordinates are on the range [0, curveFieldOrder) and the z coordinate is not zero. + /// @param x The x coordinate to check. + /// @param y The y coordinate to check. + /// @param z The z coordinate to check. + /// @return ret True if the coordinates are in the range, false otherwise. + function projectivePointCoordinatesAreOnFieldOrder(x, y, z) -> ret { + let _x, _y := projectiveIntoAffine(x, y, z) + ret := and(z, affinePointCoordinatesAreOnFieldOrder(_x, _y)) + } + + // @notice Checks if a point in affine coordinates in Montgomery form is on the curve. + // @dev The curve in question is the alt_bn128 curve. + // @dev The Short Weierstrass equation of the curve is y^2 = x^3 + 3. + // @param x The x coordinate of the point in Montgomery form. + // @param y The y coordinate of the point in Montgomery form. + // @return ret True if the point is on the curve, false otherwise. + function affinePointIsOnCurve(x, y) -> ret { + let ySquared := montgomeryMul(y, y) + let xSquared := montgomeryMul(x, x) + let xQubed := montgomeryMul(xSquared, x) + let xQubedPlusThree := montgomeryAdd(xQubed, MONTGOMERY_THREE()) + + ret := eq(ySquared, xQubedPlusThree) + } + + /// @notice Checks if a point in affine coordinates is the point at infinity. + /// @dev The point at infinity is defined as the point (0, 0). + /// @dev See https://eips.ethereum.org/EIPS/eip-196 for further details. + /// @param x The x coordinate of the point in Montgomery form. + /// @param y The y coordinate of the point in Montgomery form. + /// @return ret True if the point is the point at infinity, false otherwise. + function affinePointIsInfinity(x, y) -> ret { + ret := and(iszero(x), iszero(y)) + } + + /// @notice Checks if a point in projective coordinates in Montgomery form is the point at infinity. + /// @dev The point at infinity is defined as the point (0, 0, 0). + /// @param x The x coordinate of the point in Montgomery form. + /// @param y The y coordinate of the point in Montgomery form. + /// @param z The z coordinate of the point in Montgomery form. + /// @return ret True if the point is the point at infinity, false otherwise. + function projectivePointIsInfinity(x, y, z) -> ret { + ret := iszero(z) + } + + /// @notice Converts a point in affine coordinates to projective coordinates in Montgomery form. + /// @dev The point at infinity is defined as the point (0, 0, 0). + /// @dev For performance reasons, the point is assumed to be previously checked to be on the + /// @dev curve and not the point at infinity. + /// @param xp The x coordinate of the point P in affine coordinates in Montgomery form. + /// @param yp The y coordinate of the point P in affine coordinates in Montgomery form. + /// @return xr The x coordinate of the point P in projective coordinates in Montgomery form. + /// @return yr The y coordinate of the point P in projective coordinates in Montgomery form. + /// @return zr The z coordinate of the point P in projective coordinates in Montgomery form. + function projectiveFromAffine(xp, yp) -> xr, yr, zr { + xr := xp + yr := yp + zr := MONTGOMERY_ONE() + } + + /// @notice Converts a point in projective coordinates to affine coordinates in Montgomery form. + /// @dev See https://www.nayuki.io/page/elliptic-curve-point-addition-in-projective-coordinates for further details. + /// @dev Reverts if the point is not on the curve. + /// @param xp The x coordinate of the point P in projective coordinates in Montgomery form. + /// @param yp The y coordinate of the point P in projective coordinates in Montgomery form. + /// @param zp The z coordinate of the point P in projective coordinates in Montgomery form. + /// @return xr The x coordinate of the point P in affine coordinates in Montgomery form. + /// @return yr The y coordinate of the point P in affine coordinates in Montgomery form. + function projectiveIntoAffine(xp, yp, zp) -> xr, yr { + if zp { + let zp_inv := montgomeryModularInverse(zp) + xr := montgomeryMul(xp, zp_inv) + yr := montgomeryMul(yp, zp_inv) + } + } + + /// @notice Doubles a point in projective coordinates in Montgomery form. + /// @dev See Algorithm 9 in https://eprint.iacr.org/2015/1060.pdf for further details. + /// @dev The point is assumed to be on the curve. + /// @dev It works correctly for the point at infinity. + /// @param xp The x coordinate of the point P in projective coordinates in Montgomery form. + /// @param yp The y coordinate of the point P in projective coordinates in Montgomery form. + /// @param zp The z coordinate of the point P in projective coordinates in Montgomery form. + /// @return xr The x coordinate of the point 2P in projective coordinates in Montgomery form. + /// @return yr The y coordinate of the point 2P in projective coordinates in Montgomery form. + /// @return zr The z coordinate of the point 2P in projective coordinates in Montgomery form. + function projectiveDouble(xp, yp, zp) -> xr, yr, zr { + let t0 := montgomeryMul(yp, yp) + zr := montgomeryAdd(t0, t0) + zr := montgomeryAdd(zr, zr) + zr := montgomeryAdd(zr, zr) + let t1 := montgomeryMul(yp, zp) + let t2 := montgomeryMul(zp, zp) + t2 := montgomeryMul(MONTGOMERY_B3(), t2) + xr := montgomeryMul(t2, zr) + yr := montgomeryAdd(t0, t2) + zr := montgomeryMul(t1, zr) + t1 := montgomeryAdd(t2, t2) + t2 := montgomeryAdd(t1, t2) + t0 := montgomerySub(t0, t2) + yr := montgomeryMul(t0, yr) + yr := montgomeryAdd(xr, yr) + t1 := montgomeryMul(xp, yp) + xr := montgomeryMul(t0, t1) + xr := montgomeryAdd(xr, xr) + } + + //////////////////////////////////////////////////////////////// + // FALLBACK + //////////////////////////////////////////////////////////////// + + // Retrieve the coordinates from the calldata + let x := calldataload(0) + let y := calldataload(32) + if iszero(affinePointCoordinatesAreOnFieldOrder(x, y)) { + burnGas() + } + let scalar := calldataload(64) + + if affinePointIsInfinity(x, y) { + // Infinity * scalar = Infinity + return(0x00, 0x40) + } + + let m_x := intoMontgomeryForm(x) + let m_y := intoMontgomeryForm(y) + + // Ensure that the point is in the curve (Y^2 = X^3 + 3). + if iszero(affinePointIsOnCurve(m_x, m_y)) { + burnGas() + } + + if eq(scalar, 0) { + // P * 0 = Infinity + return(0x00, 0x40) + } + if eq(scalar, 1) { + // P * 1 = P + mstore(0x00, x) + mstore(0x20, y) + return(0x00, 0x40) + } + + let xp, yp, zp := projectiveFromAffine(m_x, m_y) + + if eq(scalar, 2) { + let xr, yr, zr := projectiveDouble(xp, yp, zp) + + xr, yr := projectiveIntoAffine(xr, yr, zr) + xr := outOfMontgomeryForm(xr) + yr := outOfMontgomeryForm(yr) + + mstore(0x00, xr) + mstore(0x20, yr) + return(0x00, 0x40) + } + + let xq := xp + let yq := yp + let zq := zp + let xr := 0 + let yr := MONTGOMERY_ONE() + let zr := 0 + for {} scalar {} { + if lsbIsOne(scalar) { + let rIsInfinity := projectivePointIsInfinity(xr, yr, zr) + + if rIsInfinity { + // Infinity + P = P + xr := xq + yr := yq + zr := zq + + xq, yq, zq := projectiveDouble(xq, yq, zq) + // Check next bit + scalar := shr(1, scalar) + continue + } + + let t0 := montgomeryMul(yq, zr) + let t1 := montgomeryMul(yr, zq) + let t := montgomerySub(t0, t1) + let u0 := montgomeryMul(xq, zr) + let u1 := montgomeryMul(xr, zq) + let u := montgomerySub(u0, u1) + + // t = (yq*zr - yr*zq); u = (xq*zr - xr*zq) + if iszero(or(t, u)) { + // P + P = 2P + xr, yr, zr := projectiveDouble(xr, yr, zr) + + xq := xr + yq := yr + zq := zr + // Check next bit + scalar := shr(1, scalar) + continue + } + + // P1 + P2 = P3 + let u2 := montgomeryMul(u, u) + let u3 := montgomeryMul(u2, u) + let v := montgomeryMul(zq, zr) + let w := montgomerySub(montgomeryMul(montgomeryMul(t, t), v), montgomeryMul(u2, montgomeryAdd(u0, u1))) + + xr := montgomeryMul(u, w) + yr := montgomerySub(montgomeryMul(t, montgomerySub(montgomeryMul(u0, u2), w)), montgomeryMul(t0, u3)) + zr := montgomeryMul(u3, v) + } + + xq, yq, zq := projectiveDouble(xq, yq, zq) + // Check next bit + scalar := shr(1, scalar) + } + + xr, yr := projectiveIntoAffine(xr, yr, zr) + xr := outOfMontgomeryForm(xr) + yr := outOfMontgomeryForm(yr) + + mstore(0, xr) + mstore(32, yr) + return(0, 64) + } + } +} diff --git a/contracts/precompiles/Ecrecover.yul b/contracts/precompiles/Ecrecover.yul index 8f8889d5..d0e5924b 100644 --- a/contracts/precompiles/Ecrecover.yul +++ b/contracts/precompiles/Ecrecover.yul @@ -1,10 +1,13 @@ /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The contract used to emulate EVM's ecrecover precompile. * @dev It uses `precompileCall` to call the zkEVM built-in precompiles. */ object "Ecrecover" { - code { } + code { + return(0, 0) + } object "Ecrecover_deployed" { code { //////////////////////////////////////////////////////////////// diff --git a/contracts/precompiles/Keccak256.yul b/contracts/precompiles/Keccak256.yul index 15c39029..b078d580 100644 --- a/contracts/precompiles/Keccak256.yul +++ b/contracts/precompiles/Keccak256.yul @@ -1,12 +1,15 @@ /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The contract used to emulate EVM's keccak256 opcode. * @dev It accepts the data to be hashed, pad it by the specification * and uses `precompileCall` to call the zkEVM built-in precompiles. * @dev Thus keccak256 precompile circuit operates over padded data to perform efficient sponge round computation. */ object "Keccak256" { - code { } + code { + return(0, 0) + } object "Keccak256_deployed" { code { //////////////////////////////////////////////////////////////// diff --git a/contracts/precompiles/SHA256.yul b/contracts/precompiles/SHA256.yul index d594f55d..fba02d5e 100644 --- a/contracts/precompiles/SHA256.yul +++ b/contracts/precompiles/SHA256.yul @@ -1,12 +1,15 @@ /** * @author Matter Labs + * @custom:security-contact security@matterlabs.dev * @notice The contract used to emulate EVM's sha256 precompile. * @dev It accepts the data to be hashed, pad it by the specification * and uses `precompileCall` to call the zkEVM built-in precompiles. * @dev Thus sha256 precompile circuit operates over padded data to perform efficient sponge round computation. */ object "SHA256" { - code { } + code { + return(0, 0) + } object "SHA256_deployed" { code { //////////////////////////////////////////////////////////////// diff --git a/contracts/test-contracts/Callable.sol b/contracts/test-contracts/Callable.sol new file mode 100644 index 00000000..d2d56dc4 --- /dev/null +++ b/contracts/test-contracts/Callable.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract Callable { + event Called(uint256 value, bytes data); + + fallback() external payable { + uint256 len; + assembly { + len := calldatasize() + } + bytes memory data = new bytes(len); + assembly { + calldatacopy(add(data, 0x20), 0, len) + } + emit Called(msg.value, data); + } +} diff --git a/contracts/test-contracts/Deployable.sol b/contracts/test-contracts/Deployable.sol new file mode 100644 index 00000000..88b3c797 --- /dev/null +++ b/contracts/test-contracts/Deployable.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract Deployable { + event Deployed(uint256 value, bytes data); + + constructor() payable { + uint256 len; + assembly { + len := codesize() + } + bytes memory data = new bytes(len); + assembly { + codecopy(add(data, 0x20), 0, len) + } + emit Deployed(msg.value, data); + } +} diff --git a/contracts/test-contracts/DummyUpgrade.sol b/contracts/test-contracts/DummyUpgrade.sol new file mode 100644 index 00000000..680df42a --- /dev/null +++ b/contracts/test-contracts/DummyUpgrade.sol @@ -0,0 +1,11 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract DummyUpgrade { + event Upgraded(); + + function performUpgrade() public { + emit Upgraded(); + } +} diff --git a/contracts/test-contracts/EventWriterTest.sol b/contracts/test-contracts/EventWriterTest.sol new file mode 100644 index 00000000..3ad494f4 --- /dev/null +++ b/contracts/test-contracts/EventWriterTest.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract EventWriterTest { + event ZeroTopics(bytes data) anonymous; + event OneTopic(bytes data); + event TwoTopics(uint256 indexed topic1, bytes data); + event ThreeTopics(uint256 indexed topic1, uint256 indexed topic2, bytes data); + event FourTopics(uint256 indexed topic1, uint256 indexed topic2, uint256 indexed topic3, bytes data); + + function zeroTopics(bytes calldata data) external { + emit ZeroTopics(data); + } + + function oneTopic(bytes calldata data) external { + emit OneTopic(data); + } + + function twoTopics(uint256 topic1, bytes calldata data) external { + emit TwoTopics(topic1, data); + } + + function threeTopics(uint256 topic1, uint256 topic2, bytes calldata data) external { + emit ThreeTopics(topic1, topic2, data); + } + + function fourTopics(uint256 topic1, uint256 topic2, uint256 topic3, bytes calldata data) external { + emit FourTopics(topic1, topic2, topic3, data); + } +} diff --git a/contracts/test-contracts/MockERC20Approve.sol b/contracts/test-contracts/MockERC20Approve.sol new file mode 100644 index 00000000..826ed41b --- /dev/null +++ b/contracts/test-contracts/MockERC20Approve.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract MockERC20Approve { + event Approved(address to, uint256 value); + + function approve(address spender, uint256 value) external returns (bool) { + emit Approved(spender, value); + return true; + } + + function allowance(address owner, address spender) external view returns (uint256) { + return 0; + } +} diff --git a/contracts/test-contracts/MockKnownCodesStorage.sol b/contracts/test-contracts/MockKnownCodesStorage.sol new file mode 100644 index 00000000..c8ae0b9d --- /dev/null +++ b/contracts/test-contracts/MockKnownCodesStorage.sol @@ -0,0 +1,19 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract MockKnownCodesStorage { + event MockBytecodePublished(bytes32 indexed bytecodeHash); + + function markBytecodeAsPublished(bytes32 _bytecodeHash) external { + emit MockBytecodePublished(_bytecodeHash); + } + + // To be able to deploy original know codes storage again + function getMarker(bytes32) public pure returns (uint256 marker) { + return 1; + } + + // To prevent failing during calls from the bootloader + fallback() external {} +} diff --git a/contracts/test-contracts/MockL1Messenger.sol b/contracts/test-contracts/MockL1Messenger.sol new file mode 100644 index 00000000..9b74f929 --- /dev/null +++ b/contracts/test-contracts/MockL1Messenger.sol @@ -0,0 +1,16 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.0; + +contract MockL1Messenger { + event MockBytecodeL1Published(bytes32 indexed bytecodeHash); + + function requestBytecodeL1Publication(bytes32 _bytecodeHash) external { + emit MockBytecodeL1Published(_bytecodeHash); + } + + // To prevent failing during calls from the bootloader + function sendToL1(bytes calldata) external returns (bytes32) { + return bytes32(0); + } +} diff --git a/contracts/test-contracts/NotSystemCaller.sol b/contracts/test-contracts/NotSystemCaller.sol new file mode 100644 index 00000000..c570a469 --- /dev/null +++ b/contracts/test-contracts/NotSystemCaller.sol @@ -0,0 +1,30 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8; + +contract NotSystemCaller { + address immutable to; + + constructor(address _to) { + to = _to; + } + + fallback() external payable { + address _to = to; + assembly { + calldatacopy(0, 0, calldatasize()) + + let result := call(gas(), _to, callvalue(), 0, calldatasize(), 0, 0) + + returndatacopy(0, 0, returndatasize()) + + switch result + case 0 { + revert(0, returndatasize()) + } + default { + return(0, returndatasize()) + } + } + } +} diff --git a/contracts/test-contracts/SystemCaller.sol b/contracts/test-contracts/SystemCaller.sol new file mode 100644 index 00000000..096f2a63 --- /dev/null +++ b/contracts/test-contracts/SystemCaller.sol @@ -0,0 +1,25 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8; + +import {SystemContractsCaller} from "../libraries/SystemContractsCaller.sol"; + +contract SystemCaller { + address immutable to; + + constructor(address _to) { + to = _to; + } + + fallback() external payable { + bytes memory result = SystemContractsCaller.systemCallWithPropagatedRevert( + uint32(gasleft()), + to, + uint128(msg.value), + msg.data + ); + assembly { + return(add(result, 0x20), mload(result)) + } + } +} diff --git a/contracts/test-contracts/TestSystemContract.sol b/contracts/test-contracts/TestSystemContract.sol index f4e08c62..135e2cd7 100644 --- a/contracts/test-contracts/TestSystemContract.sol +++ b/contracts/test-contracts/TestSystemContract.sol @@ -29,7 +29,7 @@ contract TestSystemContract is ISystemContract { { uint256 gasBefore = gasleft(); - SystemContractHelper.precompileCall(0, 10000); + SystemContractHelper.unsafePrecompileCall(0, 10000); uint256 gasAfter = gasleft(); require(gasBefore - gasAfter > 10000, "Did not spend enough gas"); require(gasBefore - gasAfter < 10100, "Spent too much gas"); diff --git a/contracts/tests/Counter.sol b/contracts/tests/Counter.sol deleted file mode 100644 index 736a8b78..00000000 --- a/contracts/tests/Counter.sol +++ /dev/null @@ -1,11 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -contract Counter { - uint256 public counter; - - function increment() public { - counter += 1; - } -} diff --git a/contracts/tests/TransactionHelperTest.sol b/contracts/tests/TransactionHelperTest.sol deleted file mode 100644 index df8e7e67..00000000 --- a/contracts/tests/TransactionHelperTest.sol +++ /dev/null @@ -1,13 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -import "../libraries/TransactionHelper.sol"; - -contract TransactionHelperTest { - using TransactionHelper for Transaction; - - function encodeHash(Transaction calldata _transaction) public view returns (bytes32 resultHash) { - resultHash = _transaction.encodeHash(); - } -} diff --git a/hardhat.config.ts b/hardhat.config.ts index 4fa0df56..5877945a 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -1,13 +1,14 @@ import '@nomiclabs/hardhat-solpp'; -import 'hardhat-typechain'; +import '@typechain/hardhat'; import '@nomiclabs/hardhat-ethers'; import '@matterlabs/hardhat-zksync-solc'; +import '@matterlabs/hardhat-zksync-chai-matchers'; const systemConfig = require('./SystemConfig.json'); export default { zksolc: { - version: '1.3.11', + version: '1.3.14', compilerSource: 'binary', settings: { isSystem: true @@ -18,7 +19,14 @@ export default { ethNetwork: 'http://localhost:8545' }, solidity: { - version: '0.8.17' + version: '0.8.17', + settings: { + optimizer: { + enabled: true, + runs: 200 + }, + viaIR: true + } }, solpp: { defs: (() => { @@ -32,6 +40,11 @@ export default { networks: { hardhat: { zksync: true + }, + zkSyncTestNode: { + url: 'http://127.0.0.1:8011', + ethNetwork: '', + zksync: true } } }; diff --git a/package.json b/package.json index b4df9b82..4d508911 100644 --- a/package.json +++ b/package.json @@ -10,17 +10,19 @@ "ethers": "^5.7.0", "hardhat": "^2.11.0", "preprocess": "^3.2.0", - "zksync-web3": "^0.13.0" + "zksync-web3": "^0.14.3" }, "devDependencies": { + "@matterlabs/hardhat-zksync-chai-matchers": "^0.1.4", "@matterlabs/hardhat-zksync-solc": "^0.4.2", + "@nomicfoundation/hardhat-chai-matchers": "^1.0.3", "@nomiclabs/hardhat-ethers": "^2.0.6", "@typechain/ethers-v5": "^10.0.0", + "@typechain/hardhat": "^7.0.0", "@types/chai": "^4.3.1", "@types/mocha": "^9.1.1", "@types/node": "^17.0.34", "chai": "^4.3.6", - "hardhat-typechain": "^0.3.5", "mocha": "^10.0.0", "prettier": "^2.3.0", "prettier-plugin-solidity": "^1.0.0-alpha.27", @@ -40,7 +42,7 @@ ] }, "scripts": { - "test": "zk f mocha test/system-contract-test.test.ts", + "test": "hardhat test --network zkSyncTestNode", "build": "hardhat compile", "clean": "hardhat clean", "fmt": "prettier --config prettier.js --write contracts/*.sol contracts/**/*.sol", diff --git a/scripts/compile-yul.ts b/scripts/compile-yul.ts index b3919b75..9db5ac9b 100644 --- a/scripts/compile-yul.ts +++ b/scripts/compile-yul.ts @@ -2,19 +2,24 @@ import * as hre from 'hardhat'; import * as fs from 'fs'; import { exec as _exec, spawn as _spawn } from 'child_process'; -import { getZksolcPath, getZksolcUrl, saltFromUrl } from '@matterlabs/hardhat-zksync-solc'; +import { getZksolcUrl, saltFromUrl } from '@matterlabs/hardhat-zksync-solc'; +import { getCompilersDir } from 'hardhat/internal/util/global-dir'; +import path from 'path'; -const COMPILER_VERSION = '1.3.11'; +const COMPILER_VERSION = '1.3.14'; const IS_COMPILER_PRE_RELEASE = false; async function compilerLocation(): Promise { - if(IS_COMPILER_PRE_RELEASE) { + const compilersCache = await getCompilersDir(); + + let salt = ''; + + if (IS_COMPILER_PRE_RELEASE) { const url = getZksolcUrl('https://github.com/matter-labs/zksolc-prerelease', hre.config.zksolc.version); - const salt = saltFromUrl(url); - return await getZksolcPath(COMPILER_VERSION, salt); - } else { - return await getZksolcPath(COMPILER_VERSION, ''); + salt = saltFromUrl(url); } + + return path.join(compilersCache, 'zksolc', `zksolc-v${COMPILER_VERSION}${salt ? '-' : ''}${salt}`); } // executes a command in a new shell @@ -58,9 +63,9 @@ function preparePaths(path: string, files: string[], outputDirName: string | nul }) .join(' '); const outputDir = outputDirName || files[0]; - let absolutePathSources = `${process.env.ZKSYNC_HOME}/etc/system-contracts/${path}`; - - let absolutePathArtifacts = `${process.env.ZKSYNC_HOME}/etc/system-contracts/${path}/artifacts`; + // This script is located in `system-contracts/scripts`, so we get one directory back. + const absolutePathSources = `${__dirname}/../${path}`; + const absolutePathArtifacts = `${__dirname}/../${path}/artifacts`; return new CompilerPaths(filePaths, outputDir, absolutePathSources, absolutePathArtifacts); } diff --git a/scripts/constants.ts b/scripts/constants.ts index 27b1044c..c21f9d4f 100644 --- a/scripts/constants.ts +++ b/scripts/constants.ts @@ -107,9 +107,9 @@ export const SYSTEM_CONTRACTS: ISystemContracts = { lang: Language.Yul, path: '' }, - bytecodeCompressor: { + compressor: { address: '0x000000000000000000000000000000000000800e', - codeName: 'BytecodeCompressor', + codeName: 'Compressor', lang: Language.Solidity, }, complexUpgrader: { diff --git a/scripts/deploy-preimages.ts b/scripts/deploy-preimages.ts index ac1c1093..98cb5fb7 100644 --- a/scripts/deploy-preimages.ts +++ b/scripts/deploy-preimages.ts @@ -131,7 +131,7 @@ class ZkSyncDeployer { } async processBootloader() { - const bootloaderCode = ethers.utils.hexlify(fs.readFileSync('./bootloader/build/artifacts/proved_block.yul/proved_block.yul.zbin')); + const bootloaderCode = ethers.utils.hexlify(fs.readFileSync('./bootloader/build/artifacts/proved_batch.yul/proved_batch.yul.zbin')); await this.publishBootloader(bootloaderCode); await this.checkShouldUpgradeBootloader(bootloaderCode); diff --git a/scripts/process.ts b/scripts/process.ts index 2358e6d1..5088a3c1 100644 --- a/scripts/process.ts +++ b/scripts/process.ts @@ -97,6 +97,10 @@ let params = { RIGHT_PADDED_POST_TRANSACTION_SELECTOR: getPaddedSelector('IPaymaster', 'postTransaction'), RIGHT_PADDED_SET_TX_ORIGIN: getPaddedSelector('SystemContext', 'setTxOrigin'), RIGHT_PADDED_SET_GAS_PRICE: getPaddedSelector('SystemContext', 'setGasPrice'), + RIGHT_PADDED_INCREMENT_TX_NUMBER_IN_BLOCK_SELECTOR: getPaddedSelector('SystemContext', 'incrementTxNumberInBatch'), + RIGHT_PADDED_RESET_TX_NUMBER_IN_BLOCK_SELECTOR: getPaddedSelector('SystemContext', 'resetTxNumberInBatch'), + RIGHT_PADDED_SEND_L2_TO_L1_LOG_SELECTOR: getPaddedSelector('L1Messenger', 'sendL2ToL1Log'), + PUBLISH_PUBDATA_SELECTOR: getSelector('L1Messenger', 'publishPubdataAndClearState'), RIGHT_PADDED_SET_NEW_BATCH_SELECTOR: getPaddedSelector('SystemContext', 'setNewBatch'), RIGHT_PADDED_OVERRIDE_BATCH_SELECTOR: getPaddedSelector('SystemContext', 'unsafeOverrideBatch'), // Error @@ -111,16 +115,25 @@ let params = { PADDED_TRANSFER_FROM_TO_SELECTOR: getPaddedSelector('L2EthToken', 'transferFromTo'), SUCCESSFUL_ACCOUNT_VALIDATION_MAGIC_VALUE: getPaddedSelector('IAccount', 'validateTransaction'), SUCCESSFUL_PAYMASTER_VALIDATION_MAGIC_VALUE: getPaddedSelector('IPaymaster', 'validateAndPayForPaymasterTransaction'), - PUBLISH_COMPRESSED_BYTECODE_SELECTOR: getSelector('BytecodeCompressor', 'publishCompressedBytecode'), + PUBLISH_COMPRESSED_BYTECODE_SELECTOR: getSelector('Compressor', 'publishCompressedBytecode'), GET_MARKER_PADDED_SELECTOR: getPaddedSelector('KnownCodesStorage', 'getMarker'), RIGHT_PADDED_SET_L2_BLOCK_SELECTOR: getPaddedSelector('SystemContext', 'setL2Block'), RIGHT_PADDED_APPEND_TRANSACTION_TO_L2_BLOCK_SELECTOR: getPaddedSelector('SystemContext', 'appendTransactionToCurrentL2Block'), - RIGHT_PADDED_PUBLISH_BATCH_DATA_TO_L1_SELECTOR: getPaddedSelector('SystemContext', 'publishBatchDataToL1'), + RIGHT_PADDED_PUBLISH_TIMESTAMP_DATA_TO_L1_SELECTOR: getPaddedSelector('SystemContext', 'publishTimestampDataToL1'), COMPRESSED_BYTECODES_SLOTS: 32768, ENSURE_RETURNED_MAGIC: 1, FORBID_ZERO_GAS_PER_PUBDATA: 1, SYSTEM_CONTEXT_EXPECTED_CODE_HASH: getSystemContextExpectedHash(), UPGRADE_SYSTEM_CONTEXT_CALLDATA: upgradeSystemContextCalldata(), + // One of "worst case" scenarios for the number of state diffs in a batch is when 120kb of pubdata is spent + // on repeated writes, that are all zeroed out. In this case, the number of diffs is 120k / 5 = 24k. This means that they will have + // accoomdate 6528000 bytes of calldata for the uncompressed state diffs. Adding 120k on top leaves us with + // roughly 6650000 bytes needed for calldata. 207813 slots are needed to accomodate this amount of data. + // We round up to 208000 slots just in case. + // + // In theory though much more calldata could be used (if for instance 1 byte is used for enum index). It is the responsibility of the + // operator to ensure that it can form the correct calldata for the L1Messenger. + OPERATOR_PROVIDED_L1_MESSENGER_PUBDATA_SLOTS: 208000, ...SYSTEM_PARAMS }; diff --git a/scripts/quick-setup.sh b/scripts/quick-setup.sh new file mode 100755 index 00000000..341d77d2 --- /dev/null +++ b/scripts/quick-setup.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# install rust +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh + +rustup toolchain install nightly + +# install era-test-node +cargo +nightly install --git https://github.com/matter-labs/era-test-node.git --locked --branch boojum-integration + +yarn +yarn build +era_test_node run > /dev/null 2>&1 & export TEST_NODE_PID=$! +yarn test +kill $TEST_NODE_PID diff --git a/test/AccountCodeStorage.spec.ts b/test/AccountCodeStorage.spec.ts new file mode 100644 index 00000000..d6384a77 --- /dev/null +++ b/test/AccountCodeStorage.spec.ts @@ -0,0 +1,225 @@ +import { expect } from 'chai'; +import { AccountCodeStorage } from '../typechain-types'; +import { DEPLOYER_SYSTEM_CONTRACT_ADDRESS, EMPTY_STRING_KECCAK } from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract } from './shared/utils'; +import { network, ethers } from 'hardhat'; + +describe('AccountCodeStorage tests', function () { + let wallet: Wallet; + let accountCodeStorage: AccountCodeStorage; + let deployerAccount: ethers.Signer; + + const CONSTRUCTING_BYTECODE_HASH = '0x0101FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF'; + const CONSTRUCTED_BYTECODE_HASH = '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF'; + const RANDOM_ADDRESS = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; + + before(async () => { + wallet = getWallets()[0]; + accountCodeStorage = (await deployContract('AccountCodeStorage')) as AccountCodeStorage; + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + deployerAccount = await ethers.getSigner(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + }); + + after(async () => { + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + }); + + describe('storeAccountConstructingCodeHash', function () { + it('non-deployer failed to call', async () => { + await expect( + accountCodeStorage.storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) + ).to.be.revertedWith('Callable only by the deployer system contract'); + }); + + it('failed to set with constructed bytecode', async () => { + await expect( + accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH) + ).to.be.revertedWith('Code hash is not for a contract on constructor'); + }); + + it('successfully stored', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH); + + expect(await accountCodeStorage.getRawCodeHash(RANDOM_ADDRESS)).to.be.eq( + CONSTRUCTING_BYTECODE_HASH.toLowerCase() + ); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + }); + + describe('storeAccountConstructedCodeHash', function () { + it('non-deployer failed to call', async () => { + await expect( + accountCodeStorage.storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) + ).to.be.revertedWith('Callable only by the deployer system contract'); + }); + + it('failed to set with constructing bytecode', async () => { + await expect( + accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH) + ).to.be.revertedWith('Code hash is not for a constructed contract'); + }); + + it('successfully stored', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH); + + expect(await accountCodeStorage.getRawCodeHash(RANDOM_ADDRESS)).to.be.eq( + CONSTRUCTED_BYTECODE_HASH.toLowerCase() + ); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + }); + + describe('markAccountCodeHashAsConstructed', function () { + it('non-deployer failed to call', async () => { + await expect(accountCodeStorage.markAccountCodeHashAsConstructed(RANDOM_ADDRESS)).to.be.revertedWith( + 'Callable only by the deployer system contract' + ); + }); + + it('failed to mark already constructed bytecode', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH); + + await expect( + accountCodeStorage.connect(deployerAccount).markAccountCodeHashAsConstructed(RANDOM_ADDRESS) + ).to.be.revertedWith('Code hash is not for a contract on constructor'); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + + it('successfully marked', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH); + + await accountCodeStorage.connect(deployerAccount).markAccountCodeHashAsConstructed(RANDOM_ADDRESS); + + expect(await accountCodeStorage.getRawCodeHash(RANDOM_ADDRESS)).to.be.eq( + CONSTRUCTED_BYTECODE_HASH.toLowerCase() + ); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + }); + + describe('getRawCodeHash', function () { + it('zero', async () => { + expect(await accountCodeStorage.getRawCodeHash(RANDOM_ADDRESS)).to.be.eq(ethers.constants.HashZero); + }); + + it('non-zero', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH); + + expect(await accountCodeStorage.getRawCodeHash(RANDOM_ADDRESS)).to.be.eq( + CONSTRUCTED_BYTECODE_HASH.toLowerCase() + ); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + }); + + describe('getCodeHash', function () { + it('precompile', async () => { + expect(await accountCodeStorage.getCodeHash('0x0000000000000000000000000000000000000001')).to.be.eq( + EMPTY_STRING_KECCAK + ); + }); + + it('EOA with non-zero nonce', async () => { + // This address at least deployed this contract + expect(await accountCodeStorage.getCodeHash(wallet.address)).to.be.eq(EMPTY_STRING_KECCAK); + }); + + it('address in the constructor', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH); + + expect(await accountCodeStorage.getCodeHash(RANDOM_ADDRESS)).to.be.eq(EMPTY_STRING_KECCAK); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + + it('constructed code hash', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH); + + expect(await accountCodeStorage.getCodeHash(RANDOM_ADDRESS)).to.be.eq( + CONSTRUCTED_BYTECODE_HASH.toLowerCase() + ); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + + it('zero', async () => { + expect(await accountCodeStorage.getCodeHash(RANDOM_ADDRESS)).to.be.eq(ethers.constants.HashZero); + }); + }); + + describe('getCodeSize', function () { + it('zero address', async () => { + expect(await accountCodeStorage.getCodeSize(ethers.constants.AddressZero)).to.be.eq(0); + }); + + it('precompile', async () => { + expect(await accountCodeStorage.getCodeSize('0x0000000000000000000000000000000000000001')).to.be.eq(0); + }); + + it('address in the constructor', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructingCodeHash(RANDOM_ADDRESS, CONSTRUCTING_BYTECODE_HASH); + + expect(await accountCodeStorage.getCodeSize(RANDOM_ADDRESS)).to.be.eq(0); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + + it('non-zero size', async () => { + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(RANDOM_ADDRESS, CONSTRUCTED_BYTECODE_HASH); + + expect(await accountCodeStorage.getCodeSize(RANDOM_ADDRESS)).to.be.eq(65535 * 32); + + await unsetCodeHash(accountCodeStorage, RANDOM_ADDRESS); + }); + + it('zero', async () => { + expect(await accountCodeStorage.getCodeSize(RANDOM_ADDRESS)).to.be.eq(0); + }); + }); +}); + +// Utility function to unset code hash for the specified address. +// Deployer system contract should be impersonated +async function unsetCodeHash(accountCodeStorage: AccountCodeStorage, address: string) { + const deployerAccount = await ethers.getImpersonatedSigner(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + + await accountCodeStorage + .connect(deployerAccount) + .storeAccountConstructedCodeHash(address, ethers.constants.HashZero); +} diff --git a/test/BootloaderUtilities.spec.ts b/test/BootloaderUtilities.spec.ts new file mode 100644 index 00000000..03874bdd --- /dev/null +++ b/test/BootloaderUtilities.spec.ts @@ -0,0 +1,182 @@ +import { expect } from 'chai'; +import { BootloaderUtilities } from '../typechain-types'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract } from './shared/utils'; +import { ethers } from 'hardhat'; +import * as zksync from 'zksync-web3'; +import { hashBytecode, serialize } from 'zksync-web3/build/src/utils'; +import { TransactionData, signedTxToTransactionData } from './shared/transactions'; + +describe('BootloaderUtilities tests', function () { + let wallet: Wallet; + let bootloaderUtilities: BootloaderUtilities; + + before(async () => { + wallet = getWallets()[0]; + bootloaderUtilities = (await deployContract('BootloaderUtilities')) as BootloaderUtilities; + }); + + describe('EIP-712 transaction', function () { + it('check hashes', async () => { + const eip712Tx = await wallet.populateTransaction({ + type: 113, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100, + customData: { + gasPerPubdata: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT + } + }); + const signedEip712Tx = await wallet.signTransaction(eip712Tx); + const parsedEIP712tx = zksync.utils.parseTransaction(signedEip712Tx); + + const eip712TxData = signedTxToTransactionData(parsedEIP712tx)!; + const expectedEIP712TxHash = parsedEIP712tx.hash; + const expectedEIP712SignedHash = zksync.EIP712Signer.getSignedDigest(eip712Tx); + + const proposedEIP712Hashes = await bootloaderUtilities.getTransactionHashes(eip712TxData); + + expect(proposedEIP712Hashes.txHash).to.be.eq(expectedEIP712TxHash); + expect(proposedEIP712Hashes.signedTxHash).to.be.eq(expectedEIP712SignedHash); + }); + }); + + describe('legacy transaction', function () { + it('check hashes', async () => { + const legacyTx = await wallet.populateTransaction({ + type: 0, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await wallet.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const expectedTxHash = parsedTx.hash; + delete legacyTx.from; + const expectedSignedHash = ethers.utils.keccak256(serialize(legacyTx)); + + const proposedHashes = await bootloaderUtilities.getTransactionHashes(txData); + expect(proposedHashes.txHash).to.be.eq(expectedTxHash); + expect(proposedHashes.signedTxHash).to.be.eq(expectedSignedHash); + }); + + it('invalid v signature value', async () => { + const legacyTx = await wallet.populateTransaction({ + type: 0, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await wallet.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + let signature = ethers.utils.arrayify(txData.signature); + signature[64] = 29; + txData.signature = signature; + + await expect(bootloaderUtilities.getTransactionHashes(txData)).to.be.revertedWith('Invalid v value'); + }); + }); + + describe('EIP-1559 transaction', function () { + it('check hashes', async () => { + const eip1559Tx = await wallet.populateTransaction({ + type: 2, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100 + }); + const signedEip1559Tx = await wallet.signTransaction(eip1559Tx); + const parsedEIP1559tx = zksync.utils.parseTransaction(signedEip1559Tx); + + const EIP1559TxData = signedTxToTransactionData(parsedEIP1559tx)!; + delete eip1559Tx.from; + const expectedEIP1559TxHash = parsedEIP1559tx.hash; + const expectedEIP1559SignedHash = ethers.utils.keccak256(serialize(eip1559Tx)); + + const proposedEIP1559Hashes = await bootloaderUtilities.getTransactionHashes(EIP1559TxData); + expect(proposedEIP1559Hashes.txHash).to.be.eq(expectedEIP1559TxHash); + expect(proposedEIP1559Hashes.signedTxHash).to.be.eq(expectedEIP1559SignedHash); + }); + + it('invalid v signature value', async () => { + const eip1559Tx = await wallet.populateTransaction({ + type: 2, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100 + }); + const signedEip1559Tx = await wallet.signTransaction(eip1559Tx); + const parsedEIP1559tx = zksync.utils.parseTransaction(signedEip1559Tx); + + const EIP1559TxData = signedTxToTransactionData(parsedEIP1559tx)!; + let signature = ethers.utils.arrayify(EIP1559TxData.signature); + signature[64] = 0; + EIP1559TxData.signature = signature; + + await expect(bootloaderUtilities.getTransactionHashes(EIP1559TxData)).to.be.revertedWith('Invalid v value'); + }); + }); + + describe('EIP-1559 transaction', function () { + it('check hashes', async () => { + const eip2930Tx = await wallet.populateTransaction({ + type: 1, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + gasLimit: 50000, + gasPrice: 55000 + }); + const signedEip2930Tx = await wallet.signTransaction(eip2930Tx); + const parsedEIP2930tx = zksync.utils.parseTransaction(signedEip2930Tx); + + const EIP2930TxData = signedTxToTransactionData(parsedEIP2930tx)!; + delete eip2930Tx.from; + const expectedEIP2930TxHash = parsedEIP2930tx.hash; + const expectedEIP2930SignedHash = ethers.utils.keccak256(serialize(eip2930Tx)); + + const proposedEIP2930Hashes = await bootloaderUtilities.getTransactionHashes(EIP2930TxData); + expect(proposedEIP2930Hashes.txHash).to.be.eq(expectedEIP2930TxHash); + expect(proposedEIP2930Hashes.signedTxHash).to.be.eq(expectedEIP2930SignedHash); + }); + + it('invalid v signature value', async () => { + const eip2930Tx = await wallet.populateTransaction({ + type: 1, + to: wallet.address, + from: wallet.address, + data: '0x', + value: 0, + gasLimit: 50000, + gasPrice: 55000 + }); + const signedEip2930Tx = await wallet.signTransaction(eip2930Tx); + const parsedEIP2930tx = zksync.utils.parseTransaction(signedEip2930Tx); + + const EIP2930TxData = signedTxToTransactionData(parsedEIP2930tx)!; + let signature = ethers.utils.arrayify(EIP2930TxData.signature); + signature[64] = 100; + EIP2930TxData.signature = signature; + + await expect(bootloaderUtilities.getTransactionHashes(EIP2930TxData)).to.be.revertedWith('Invalid v value'); + }); + }); +}); diff --git a/test/ComplexUpgrader.spec.ts b/test/ComplexUpgrader.spec.ts new file mode 100644 index 00000000..282bef45 --- /dev/null +++ b/test/ComplexUpgrader.spec.ts @@ -0,0 +1,49 @@ +import { expect } from 'chai'; +import { ComplexUpgrader, DummyUpgrade } from '../typechain-types'; +import { FORCE_DEPLOYER_ADDRESS } from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract } from './shared/utils'; +import { network, ethers } from 'hardhat'; + +describe('ComplexUpgrader tests', function () { + let wallet: Wallet; + let complexUpgrader: ComplexUpgrader; + let dummyUpgrade: DummyUpgrade; + + before(async () => { + wallet = getWallets()[0]; + complexUpgrader = (await deployContract('ComplexUpgrader')) as ComplexUpgrader; + dummyUpgrade = (await deployContract('DummyUpgrade')) as DummyUpgrade; + }); + + describe('upgrade', function () { + it('non force deployer failed to call', async () => { + await expect( + complexUpgrader.upgrade( + dummyUpgrade.address, + dummyUpgrade.interface.encodeFunctionData('performUpgrade') + ) + ).to.be.revertedWith('Can only be called by FORCE_DEPLOYER'); + }); + + it('successfully upgraded', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [FORCE_DEPLOYER_ADDRESS] + }); + + const force_deployer = await ethers.getSigner(FORCE_DEPLOYER_ADDRESS); + + await expect( + complexUpgrader + .connect(force_deployer) + .upgrade(dummyUpgrade.address, dummyUpgrade.interface.encodeFunctionData('performUpgrade')) + ).to.emit(dummyUpgrade.attach(complexUpgrader.address), 'Upgraded'); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [FORCE_DEPLOYER_ADDRESS] + }); + }); + }); +}); diff --git a/test/Compressor.spec.ts b/test/Compressor.spec.ts new file mode 100644 index 00000000..d55fdd1a --- /dev/null +++ b/test/Compressor.spec.ts @@ -0,0 +1,533 @@ +import { expect } from 'chai'; +import { Compressor, MockKnownCodesStorage__factory } from '../typechain-types'; +import { + BOOTLOADER_FORMAL_ADDRESS, + KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, + L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, + TWO_IN_256 +} from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract, getCode, loadArtifact, setCode } from './shared/utils'; +import { network, ethers } from 'hardhat'; +import * as zksync from 'zksync-web3'; +import { BigNumber, BytesLike } from 'ethers'; + +describe('Compressor tests', function () { + let wallet: Wallet; + let compressor: Compressor; + let bootloader: ethers.Signer; + let l1Messenger: ethers.Signer; + + let _knownCodesStorageCode: string; + + before(async () => { + wallet = getWallets()[0]; + compressor = (await deployContract('Compressor')) as Compressor; + _knownCodesStorageCode = await getCode(KNOWN_CODE_STORAGE_CONTRACT_ADDRESS); + let mockKnownCodesStorageArtifact = await loadArtifact('MockKnownCodesStorage'); + await setCode(KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, mockKnownCodesStorageArtifact.bytecode); + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS] + }); + l1Messenger = await ethers.getSigner(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS); + }); + + after(async function () { + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS] + }); + + await setCode(KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, _knownCodesStorageCode); + }); + + describe('publishCompressedBytecode', function () { + it('non-bootloader failed to call', async () => { + await expect(compressor.publishCompressedBytecode('0x', '0x0000')).to.be.revertedWith( + 'Callable only by the bootloader' + ); + }); + + it('invalid encoded length', async () => { + const BYTECODE = '0xdeadbeefdeadbeef'; + const COMPRESSED_BYTECODE = '0x0001deadbeefdeadbeef00000000'; + await expect( + compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) + ).to.be.revertedWith('Encoded data length should be 4 times shorter than the original bytecode'); + }); + + it('chunk index is out of bounds', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + const bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + const BYTECODE = '0xdeadbeefdeadbeef'; + const COMPRESSED_BYTECODE = '0x0001deadbeefdeadbeef0001'; + await expect( + compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) + ).to.be.revertedWith('Encoded chunk index is out of bounds'); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + + it('chunk does not match the original bytecode', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + const bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + const BYTECODE = '0xdeadbeefdeadbeef1111111111111111'; + const COMPRESSED_BYTECODE = '0x0002deadbeefdeadbeef111111111111111100000000'; + await expect( + compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) + ).to.be.revertedWith('Encoded chunk does not match the original bytecode'); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + + it('invalid bytecode length in bytes', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + const bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + const BYTECODE = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; + const COMPRESSED_BYTECODE = '0x0001deadbeefdeadbeef000000000000'; + await expect( + compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) + ).to.be.revertedWith('po'); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + + // Test case with too big bytecode is unrealistic because API cannot accept so much data. + it('invalid bytecode length in words', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + const bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + const BYTECODE = '0x' + 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'.repeat(2); + const COMPRESSED_BYTECODE = '0x0001deadbeefdeadbeef' + '0000'.repeat(4 * 2); + await expect( + compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE) + ).to.be.revertedWith('pr'); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + + it('successfully published', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + + const bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + + const BYTECODE = + '0x000200000000000200010000000103550000006001100270000000150010019d0000000101200190000000080000c13d0000000001000019004e00160000040f0000000101000039004e00160000040f0000001504000041000000150510009c000000000104801900000040011002100000000001310019000000150320009c0000000002048019000000600220021000000000012100190000004f0001042e000000000100001900000050000104300000008002000039000000400020043f0000000002000416000000000110004c000000240000613d000000000120004c0000004d0000c13d000000200100003900000100001004430000012000000443000001000100003900000040020000390000001d03000041004e000a0000040f000000000120004c0000004d0000c13d0000000001000031000000030110008c0000004d0000a13d0000000101000367000000000101043b0000001601100197000000170110009c0000004d0000c13d0000000101000039000000000101041a0000000202000039000000000202041a000000400300043d00000040043000390000001805200197000000000600041a0000000000540435000000180110019700000020043000390000000000140435000000a0012002700000001901100197000000600430003900000000001404350000001a012001980000001b010000410000000001006019000000b8022002700000001c02200197000000000121019f0000008002300039000000000012043500000018016001970000000000130435000000400100043d0000000002130049000000a0022000390000000003000019004e000a0000040f004e00140000040f0000004e000004320000004f0001042e000000500001043000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffff000000000000000000000000000000000000000000000000000000008903573000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000000000ffffff0000000000008000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80000000000000000000000000000000000000000000000000000000000000007fffff00000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'; + const COMPRESSED_BYTECODE = + '0x00510000000000000000ffffffffffffffff0000004d0000c13d00000000ffffffff0000000000140435004e000a0000040f000000000120004c00000050000104300000004f0001042e0000000101000039004e00160000040f0000000001000019000000020000000000000000007fffffffffffffff80000000000000000080000000000000ffffff8903573000000000ffffffff000000000000004e00000432004e00140000040f0000000003000019000000a0022000390000000002130049000000400100043d0000000000130435000000180160019700000000001204350000008002300039000000000121019f0000001c02200197000000b80220027000000000010060190000001b010000410000001a0120019800000060043000390000001901100197000000a001200270000000200430003900000018011001970000000000540435000000000600041a00000018052001970000004004300039000000400300043d000000000202041a0000000202000039000000000101041a000000170110009c0000001601100197000000000101043b00000001010003670000004d0000a13d000000030110008c00000000010000310000001d0300004100000040020000390000010001000039000001200000044300000100001004430000002001000039000000240000613d000000000110004c0000000002000416000000400020043f0000008002000039000000000121001900000060022002100000000002048019000000150320009c000000000131001900000040011002100000000001048019000000150510009c0000001504000041000000080000c13d0000000101200190000000150010019d0000006001100270000100000001035500020000000000020050004f004e004d004c004b000b000a0009000a004a004900480047004600450044004300420008000b000700410040003f003e003d00060002003c003b003a003900380037000500060002003600350034003300320031003000020009002f002e002d002c002b002a002900280027002600040025002400230004002200210020001f001e001d001c001b001a001900180017001600150005001400130008000700000000000000000000000000030012000000000000001100000000000000000003000100010000000000000010000f000000000000000100010001000e000000000000000d000c0000000000000000000000000000'; + await expect(compressor.connect(bootloader).publishCompressedBytecode(BYTECODE, COMPRESSED_BYTECODE)) + .to.emit( + MockKnownCodesStorage__factory.connect(KNOWN_CODE_STORAGE_CONTRACT_ADDRESS, wallet), + 'MockBytecodePublished' + ) + .withArgs(zksync.utils.hashBytecode(BYTECODE)); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + }); + + describe('verifyCompressedStateDiffs', function () { + it('non l1 messenger failed to call', async () => { + await expect(compressor.verifyCompressedStateDiffs(0, 8, '0x', '0x0000')).to.be.revertedWith( + 'Inappropriate caller' + ); + }); + + it('enumeration index size is too large', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 0, + initValue: BigNumber.from(0), + finalValue: BigNumber.from('0x1234567890123456789012345678901234567890123456789012345678901234') + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[0].key = '0x1234567890123456789012345678901234567890123456789012345678901233'; + let compressedStateDiffs = compressStateDiffs(9, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 9, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('enumeration index size is too large'); + }); + + it('initial write key mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 0, + initValue: BigNumber.from(1), + finalValue: BigNumber.from(0) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[0].key = '0x1234567890123456789012345678901234567890123456789012345678901233'; + let compressedStateDiffs = compressStateDiffs(4, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 4, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('iw: initial key mismatch'); + }); + + it('repeated write key mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 1, + initValue: BigNumber.from(1), + finalValue: BigNumber.from(0) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[0].index = 2; + let compressedStateDiffs = compressStateDiffs(8, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 8, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('rw: enum key mismatch'); + }); + + it('no compression value mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 1, + initValue: BigNumber.from(1), + finalValue: BigNumber.from(0) + }, + { + key: '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef', + index: 0, + initValue: TWO_IN_256.div(2), + finalValue: TWO_IN_256.sub(2) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[1].finalValue = TWO_IN_256.sub(1); + let compressedStateDiffs = compressStateDiffs(3, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(2, 3, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('transform or no compression: compressed and final mismatch'); + }); + + it('transform value mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 255, + initValue: BigNumber.from(1), + finalValue: BigNumber.from(0) + }, + { + key: '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef', + index: 0, + initValue: TWO_IN_256.div(2), + finalValue: BigNumber.from(1) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[1].finalValue = BigNumber.from(0); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('transform or no compression: compressed and final mismatch'); + }); + + it('add value mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901235', + index: 255, + initValue: TWO_IN_256.div(2).sub(2), + finalValue: TWO_IN_256.div(2).sub(1) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[0].finalValue = TWO_IN_256.div(2); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('add: initial plus converted not equal to final'); + }); + + it('sub value mismatch', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901236', + index: 0, + initValue: TWO_IN_256.div(4), + finalValue: TWO_IN_256.div(4).sub(5) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs[0].finalValue = TWO_IN_256.div(4).sub(1); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('sub: initial minus converted not equal to final'); + }); + + it('invalid operation', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901236', + index: 0, + initValue: TWO_IN_256.div(4), + finalValue: TWO_IN_256.div(4).sub(5) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + let compressedStateDiffsCharArray = compressedStateDiffs.split(''); + compressedStateDiffsCharArray[2 + 4 + 64 + 1] = 'f'; + compressedStateDiffs = compressedStateDiffsCharArray.join(''); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(1, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('unsupported operation'); + }); + + it('Incorrect number of initial storage diffs', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901236', + index: 0, + initValue: TWO_IN_256.div(4), + finalValue: TWO_IN_256.div(4).sub(5) + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901239', + index: 121, + initValue: TWO_IN_256.sub(1), + finalValue: BigNumber.from(0) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs.push({ + key: '0x0234567890123456789012345678901234567890123456789012345678901231', + index: 0, + initValue: BigNumber.from(0), + finalValue: BigNumber.from(1) + }); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('Incorrect number of initial storage diffs'); + }); + + it('Extra data in compressed state diffs', async () => { + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901236', + index: 0, + initValue: TWO_IN_256.div(4), + finalValue: TWO_IN_256.div(4).sub(5) + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901239', + index: 121, + initValue: TWO_IN_256.sub(1), + finalValue: BigNumber.from(0) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + stateDiffs.push({ + key: '0x0234567890123456789012345678901234567890123456789012345678901231', + index: 1, + initValue: BigNumber.from(0), + finalValue: BigNumber.from(1) + }); + let compressedStateDiffs = compressStateDiffs(1, stateDiffs); + await expect( + compressor + .connect(l1Messenger) + .verifyCompressedStateDiffs(2, 1, encodedStateDiffs, compressedStateDiffs) + ).to.be.revertedWith('Extra data in _compressedStateDiffs'); + }); + + it('successfully verified', async () => { + const l1Messenger = await ethers.getSigner(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS); + + let stateDiffs = [ + { + key: '0x1234567890123456789012345678901234567890123456789012345678901230', + index: 0, + initValue: BigNumber.from('0x1234567890123456789012345678901234567890123456789012345678901231'), + finalValue: BigNumber.from('0x1234567890123456789012345678901234567890123456789012345678901230') + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901232', + index: 1, + initValue: TWO_IN_256.sub(1), + finalValue: BigNumber.from(1) + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901234', + index: 0, + initValue: TWO_IN_256.div(2), + finalValue: BigNumber.from(1) + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901236', + index: 2323, + initValue: BigNumber.from('0x1234567890123456789012345678901234567890123456789012345678901237'), + finalValue: BigNumber.from('0x0239329298382323782378478237842378478237847237237872373272373272') + }, + { + key: '0x1234567890123456789012345678901234567890123456789012345678901238', + index: 2, + initValue: BigNumber.from(0), + finalValue: BigNumber.from(1) + } + ]; + let encodedStateDiffs = encodeStateDiffs(stateDiffs); + let compressedStateDiffs = compressStateDiffs(4, stateDiffs); + const tx = { + from: L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, + to: compressor.address, + data: compressor.interface.encodeFunctionData('verifyCompressedStateDiffs', [ + 5, + 4, + encodedStateDiffs, + compressedStateDiffs + ]) + }; + // eth_call to get return data + expect(await ethers.provider.call(tx)).to.be.eq(ethers.utils.keccak256(encodedStateDiffs)); + }); + }); +}); + +interface StateDiff { + key: BytesLike; + index: number; + initValue: BigNumber; + finalValue: BigNumber; +} + +function encodeStateDiffs(stateDiffs: StateDiff[]): string { + let rawStateDiffs = []; + for (const stateDiff of stateDiffs) { + rawStateDiffs.push( + ethers.utils.solidityPack( + ['address', 'bytes32', 'bytes32', 'uint64', 'uint256', 'uint256', 'bytes'], + [ + ethers.constants.AddressZero, + ethers.constants.HashZero, + stateDiff.key, + stateDiff.index, + stateDiff.initValue, + stateDiff.finalValue, + '0x' + '00'.repeat(116) + ] + ) + ); + } + return ethers.utils.hexlify(ethers.utils.concat(rawStateDiffs)); +} + +function compressStateDiffs(enumerationIndexSize: number, stateDiffs: StateDiff[]): string { + let num_initial = 0; + let initial = []; + let repeated = []; + for (const stateDiff of stateDiffs) { + const addition = stateDiff.finalValue.sub(stateDiff.initValue).add(TWO_IN_256).mod(TWO_IN_256); + const subtraction = stateDiff.initValue.sub(stateDiff.finalValue).add(TWO_IN_256).mod(TWO_IN_256); + let op = 3; + let min = stateDiff.finalValue; + if (addition.lt(min)) { + min = addition; + op = 1; + } + if (subtraction.lt(min)) { + min = subtraction; + op = 2; + } + if (min.gte(BigNumber.from(2).pow(248))) { + min = stateDiff.finalValue; + op = 0; + } + let len = 0; + let minHex = min.eq(0) ? '0x' : min.toHexString(); + if (op > 0) { + len = (minHex.length - 2) / 2; + } + let metadata = (len << 3) + op; + let enumerationIndexType = 'uint' + (enumerationIndexSize * 8).toString(); + if (stateDiff.index === 0) { + num_initial += 1; + initial.push(ethers.utils.solidityPack(['bytes32', 'uint8', 'bytes'], [stateDiff.key, metadata, minHex])); + } else { + repeated.push( + ethers.utils.solidityPack([enumerationIndexType, 'uint8', 'bytes'], [stateDiff.index, metadata, minHex]) + ); + } + } + return ethers.utils.hexlify( + ethers.utils.concat([ethers.utils.solidityPack(['uint16'], [num_initial]), ...initial, ...repeated]) + ); +} diff --git a/test/ContractDeployer.spec.ts b/test/ContractDeployer.spec.ts new file mode 100644 index 00000000..1a4e55f0 --- /dev/null +++ b/test/ContractDeployer.spec.ts @@ -0,0 +1,548 @@ +import { expect } from 'chai'; +import { + ContractDeployer, + ContractDeployer__factory, + NonceHolder, + NonceHolder__factory, + Deployable__factory +} from '../typechain-types'; +import { + DEPLOYER_SYSTEM_CONTRACT_ADDRESS, + NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, + FORCE_DEPLOYER_ADDRESS +} from './shared/constants'; +import { Wallet, Contract, utils } from 'zksync-web3'; +import { getWallets, deployContract, loadArtifact, setCode, getCode, publishBytecode } from './shared/utils'; +import { network, ethers } from 'hardhat'; +import { ZkSyncArtifact } from '@matterlabs/hardhat-zksync-deploy/dist/types'; + +describe('ContractDeployer tests', function () { + let wallet: Wallet; + let contractDeployer: ContractDeployer; + let contractDeployerSystemCall: ContractDeployer; + let contractDeployerNotSystemCall: ContractDeployer; + let nonceHolder: NonceHolder; + let deployableArtifact: ZkSyncArtifact; + let deployerAccount: ethers.Signer; + let forceDeployer: ethers.Signer; + + const EOA = ethers.utils.getAddress('0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'); + const RANDOM_ADDRESS = ethers.utils.getAddress('0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbee1'); + const RANDOM_ADDRESS_2 = ethers.utils.getAddress('0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbee2'); + const RANDOM_ADDRESS_3 = ethers.utils.getAddress('0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbee3'); + const AA_VERSION_NONE = 0; + const AA_VERSION_1 = 1; + const NONCE_ORDERING_SEQUENTIAL = 0; + const NONCE_ORDERING_ARBITRARY = 1; + + let _contractDeployerCode: string; + + before(async () => { + wallet = getWallets()[0]; + + _contractDeployerCode = await getCode(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + let contractDeployerArtifact = await loadArtifact('ContractDeployer'); + await setCode(DEPLOYER_SYSTEM_CONTRACT_ADDRESS, contractDeployerArtifact.bytecode); + contractDeployer = ContractDeployer__factory.connect(DEPLOYER_SYSTEM_CONTRACT_ADDRESS, wallet); + + nonceHolder = NonceHolder__factory.connect(NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, wallet); + + let contractDeployerSystemCallContract = await deployContract('SystemCaller', [contractDeployer.address]); + contractDeployerSystemCall = new Contract( + contractDeployerSystemCallContract.address, + contractDeployerArtifact.abi, + wallet + ) as ContractDeployer; + + let contractDeployerNotSystemCallContract = await deployContract('NotSystemCaller', [contractDeployer.address]); + contractDeployerNotSystemCall = new Contract( + contractDeployerNotSystemCallContract.address, + contractDeployerArtifact.abi, + wallet + ) as ContractDeployer; + + deployableArtifact = await loadArtifact('Deployable'); + await publishBytecode(deployableArtifact.bytecode); + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [FORCE_DEPLOYER_ADDRESS] + }); + deployerAccount = await ethers.getSigner(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + forceDeployer = await ethers.getSigner(FORCE_DEPLOYER_ADDRESS); + }); + + after(async () => { + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [FORCE_DEPLOYER_ADDRESS] + }); + await setCode(DEPLOYER_SYSTEM_CONTRACT_ADDRESS, _contractDeployerCode); + }); + + describe('updateAccountVersion', function () { + it('non system call failed', async () => { + await expect(contractDeployer.updateAccountVersion(AA_VERSION_NONE)).to.be.revertedWith( + 'This method require system call flag' + ); + }); + + it('from none to version1', async () => { + expect( + (await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).supportedAAVersion + ).to.be.eq(AA_VERSION_NONE); + await contractDeployerSystemCall.updateAccountVersion(AA_VERSION_1); + expect( + (await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).supportedAAVersion + ).to.be.eq(AA_VERSION_1); + }); + + it('from version1 to none', async () => { + expect( + (await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).supportedAAVersion + ).to.be.eq(AA_VERSION_1); + await contractDeployerSystemCall.updateAccountVersion(AA_VERSION_NONE); + expect( + (await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).supportedAAVersion + ).to.be.eq(AA_VERSION_NONE); + }); + }); + + describe('updateNonceOrdering', function () { + it('non system call failed', async () => { + await expect(contractDeployer.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL)).to.be.revertedWith( + 'This method require system call flag' + ); + }); + + it('success from sequential to arbitrary', async () => { + expect((await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).nonceOrdering).to.be.eq( + NONCE_ORDERING_SEQUENTIAL + ); + await contractDeployerSystemCall.updateNonceOrdering(NONCE_ORDERING_ARBITRARY); + expect((await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).nonceOrdering).to.be.eq( + NONCE_ORDERING_ARBITRARY + ); + }); + + it('failed from arbitrary to sequential', async () => { + expect((await contractDeployer.getAccountInfo(contractDeployerSystemCall.address)).nonceOrdering).to.be.eq( + NONCE_ORDERING_ARBITRARY + ); + await expect(contractDeployerSystemCall.updateNonceOrdering(NONCE_ORDERING_SEQUENTIAL)).to.be.revertedWith( + 'It is only possible to change from sequential to arbitrary ordering' + ); + }); + }); + + describe('getAccountInfo', function () { + it('success', async () => { + let accountInfo = await contractDeployer.getAccountInfo(RANDOM_ADDRESS); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('extendedAccountVersion', function () { + it('account abstraction contract', async () => { + await contractDeployerSystemCall.updateAccountVersion(AA_VERSION_1); + expect(await contractDeployer.extendedAccountVersion(contractDeployerSystemCall.address)).to.be.eq( + AA_VERSION_1 + ); + await contractDeployerSystemCall.updateAccountVersion(AA_VERSION_NONE); + }); + + it('EOA', async () => { + expect(await contractDeployer.extendedAccountVersion(EOA)).to.be.eq(AA_VERSION_1); + }); + + it('not AA', async () => { + expect(await contractDeployer.extendedAccountVersion(contractDeployerSystemCall.address)).to.be.eq( + AA_VERSION_NONE + ); + }); + }); + + describe('getNewAddressCreate2', function () { + it('success', async () => { + expect( + await contractDeployer.getNewAddressCreate2( + RANDOM_ADDRESS, + '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF', + '0x0000000022000000000123812381283812831823812838912389128938912893', + '0x' + ) + ).to.be.eq( + utils.create2Address( + RANDOM_ADDRESS, + '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF', + '0x0000000022000000000123812381283812831823812838912389128938912893', + '0x' + ) + ); + }); + }); + + describe('getNewAddressCreate', function () { + it('success', async () => { + expect(await contractDeployer.getNewAddressCreate(RANDOM_ADDRESS, 3223233)).to.be.eq( + utils.createAddress(RANDOM_ADDRESS, 3223233) + ); + }); + }); + + // TODO: some other things can be tested: + // - check other contracts (like known codes storage) + // - cases with the kernel space address (not possible in production) + // - twice on the same address for create (not possible in production) + // - constructor behavior (failed, invalid immutables array) + // - more cases for force deployments + describe('createAccount', function () { + it('non system call failed', async () => { + await expect( + contractDeployerNotSystemCall.createAccount( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('This method require system call flag'); + }); + + it('zero bytecode hash failed', async () => { + await expect( + contractDeployerSystemCall.createAccount( + ethers.constants.HashZero, + ethers.constants.HashZero, + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('BytecodeHash cannot be zero'); + }); + + it('not known bytecode hash failed', async () => { + await expect( + contractDeployerSystemCall.createAccount( + ethers.constants.HashZero, + '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF', + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('The code hash is not known'); + }); + + it('successfully deployed', async () => { + let nonce = await nonceHolder.getDeploymentNonce(wallet.address); + let expectedAddress = utils.createAddress(wallet.address, nonce); + await expect( + contractDeployer.createAccount( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0xdeadbeef', + AA_VERSION_NONE + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(0, '0xdeadbeef'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + + it('non-zero value deployed', async () => { + let nonce = await nonceHolder.getDeploymentNonce(wallet.address); + let expectedAddress = utils.createAddress(wallet.address, nonce); + await expect( + contractDeployer.createAccount( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x', + AA_VERSION_NONE, + { value: 11111111 } + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(11111111, '0x'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('create2Account', function () { + it('non system call failed', async () => { + await expect( + contractDeployerNotSystemCall.create2Account( + '0x1234567891234567891234512222122167891123456789123456787654323456', + utils.hashBytecode(deployableArtifact.bytecode), + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('This method require system call flag'); + }); + + it('zero bytecode hash failed', async () => { + await expect( + contractDeployerSystemCall.create2Account( + '0x1234567891234567891234512222122167891123456789123456787654323456', + ethers.constants.HashZero, + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('BytecodeHash cannot be zero'); + }); + + it('not known bytecode hash failed', async () => { + await expect( + contractDeployerSystemCall.create2Account( + '0x1234567891234567891234512222122167891123456789123456787654323456', + '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF', + '0x', + AA_VERSION_NONE + ) + ).to.be.revertedWith('The code hash is not known'); + }); + + it('successfully deployed', async () => { + let expectedAddress = utils.create2Address( + wallet.address, + utils.hashBytecode(deployableArtifact.bytecode), + '0x1234567891234567891234512222122167891123456789123456787654323456', + '0xdeadbeef' + ); + await expect( + contractDeployer.create2Account( + '0x1234567891234567891234512222122167891123456789123456787654323456', + utils.hashBytecode(deployableArtifact.bytecode), + '0xdeadbeef', + AA_VERSION_NONE + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(0, '0xdeadbeef'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + + it('already deployed failed', async () => { + await expect( + contractDeployer.create2Account( + '0x1234567891234567891234512222122167891123456789123456787654323456', + utils.hashBytecode(deployableArtifact.bytecode), + '0xdeadbeef', + AA_VERSION_NONE + ) + ).to.be.revertedWith('Code hash is non-zero'); + }); + + it('non-zero value deployed', async () => { + let expectedAddress = utils.create2Address( + wallet.address, + utils.hashBytecode(deployableArtifact.bytecode), + ethers.constants.HashZero, + '0x' + ); + await expect( + contractDeployer.create2Account( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x', + AA_VERSION_NONE, + { value: 5555 } + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(5555, '0x'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('create', function () { + it('non system call failed', async () => { + await expect( + contractDeployerNotSystemCall.create( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x' + ) + ).to.be.revertedWith('This method require system call flag'); + }); + + it('successfully deployed', async () => { + let nonce = await nonceHolder.getDeploymentNonce(wallet.address); + let expectedAddress = utils.createAddress(wallet.address, nonce); + await expect( + contractDeployer.create( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x12' + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(0, '0x12'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('create2', function () { + it('non system call failed', async () => { + await expect( + contractDeployerNotSystemCall.create2( + ethers.constants.HashZero, + utils.hashBytecode(deployableArtifact.bytecode), + '0x' + ) + ).to.be.revertedWith('This method require system call flag'); + }); + + it('successfully deployed', async () => { + let expectedAddress = utils.create2Address( + wallet.address, + utils.hashBytecode(deployableArtifact.bytecode), + '0x1234567891234567891234512222122167891123456789123456787654323456', + '0xab' + ); + await expect( + contractDeployer.create2( + '0x1234567891234567891234512222122167891123456789123456787654323456', + utils.hashBytecode(deployableArtifact.bytecode), + '0xab' + ) + ) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), expectedAddress) + .to.emit(Deployable__factory.connect(expectedAddress, wallet), 'Deployed') + .withArgs(0, '0xab'); + let accountInfo = await contractDeployer.getAccountInfo(expectedAddress); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('forceDeployOnAddress', function () { + it('not from self call failed', async () => { + let deploymentData = { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS, + callConstructor: false, + value: 0, + input: '0x' + }; + await expect(contractDeployer.forceDeployOnAddress(deploymentData, wallet.address)).to.be.revertedWith( + 'Callable only by self' + ); + }); + + it('not known bytecode hash failed', async () => { + let deploymentData = { + bytecodeHash: '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF', + newAddress: RANDOM_ADDRESS, + callConstructor: false, + value: 0, + input: '0x' + }; + await expect( + contractDeployer.connect(deployerAccount).forceDeployOnAddress(deploymentData, wallet.address) + ).to.be.revertedWith('The code hash is not known'); + }); + + it('successfully deployed', async () => { + let deploymentData = { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS, + callConstructor: false, + value: 0, + input: '0x' + }; + await expect(contractDeployer.connect(deployerAccount).forceDeployOnAddress(deploymentData, wallet.address)) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(wallet.address, utils.hashBytecode(deployableArtifact.bytecode), RANDOM_ADDRESS) + .to.not.emit(Deployable__factory.connect(RANDOM_ADDRESS, wallet), 'Deployed'); + let accountInfo = await contractDeployer.getAccountInfo(RANDOM_ADDRESS); + expect(accountInfo.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); + + describe('forceDeployOnAddresses', function () { + it('not allowed to call', async () => { + let deploymentData = [ + { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS_2, + callConstructor: true, + value: 0, + input: '0x' + }, + { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS_3, + callConstructor: false, + value: 0, + input: '0xab' + } + ]; + await expect(contractDeployer.forceDeployOnAddresses(deploymentData)).to.be.revertedWith( + 'Can only be called by FORCE_DEPLOYER or COMPLEX_UPGRADER_CONTRACT' + ); + }); + + it('successfully deployed', async () => { + let deploymentData = [ + { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS_2, + callConstructor: true, + value: 0, + input: '0x' + }, + { + bytecodeHash: utils.hashBytecode(deployableArtifact.bytecode), + newAddress: RANDOM_ADDRESS_3, + callConstructor: false, + value: 0, + input: '0xab' + } + ]; + await expect(contractDeployer.connect(forceDeployer).forceDeployOnAddresses(deploymentData)) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(forceDeployer.address, utils.hashBytecode(deployableArtifact.bytecode), RANDOM_ADDRESS_2) + .to.emit(contractDeployer, 'ContractDeployed') + .withArgs(forceDeployer.address, utils.hashBytecode(deployableArtifact.bytecode), RANDOM_ADDRESS_3) + .to.emit(Deployable__factory.connect(RANDOM_ADDRESS_2, wallet), 'Deployed') + .withArgs(0, '0x') + .to.not.emit(Deployable__factory.connect(RANDOM_ADDRESS_3, wallet), 'Deployed'); + + let accountInfo1 = await contractDeployer.getAccountInfo(RANDOM_ADDRESS_2); + expect(accountInfo1.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo1.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + + let accountInfo2 = await contractDeployer.getAccountInfo(RANDOM_ADDRESS_3); + expect(accountInfo2.supportedAAVersion).to.be.eq(AA_VERSION_NONE); + expect(accountInfo2.nonceOrdering).to.be.eq(NONCE_ORDERING_SEQUENTIAL); + }); + }); +}); diff --git a/test/DefaultAccount.spec.ts b/test/DefaultAccount.spec.ts new file mode 100644 index 00000000..6231c341 --- /dev/null +++ b/test/DefaultAccount.spec.ts @@ -0,0 +1,377 @@ +import { expect } from 'chai'; +import { + DefaultAccount, + DefaultAccount__factory, + NonceHolder, + NonceHolder__factory, + Callable, + L2EthToken, + L2EthToken__factory, + MockERC20Approve +} from '../typechain-types'; +import { + BOOTLOADER_FORMAL_ADDRESS, + NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, + ETH_TOKEN_SYSTEM_CONTRACT_ADDRESS +} from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract, setCode, loadArtifact } from './shared/utils'; +import { network, ethers } from 'hardhat'; +import { hashBytecode, serialize } from 'zksync-web3/build/src/utils'; +import * as zksync from 'zksync-web3'; +import { TransactionData, signedTxToTransactionData } from './shared/transactions'; + +describe('DefaultAccount tests', function () { + let wallet: Wallet; + let account: Wallet; + let defaultAccount: DefaultAccount; + let bootloader: ethers.Signer; + let nonceHolder: NonceHolder; + let l2EthToken: L2EthToken; + let callable: Callable; + let mockERC20Approve: MockERC20Approve; + let paymasterFlowInterface: ethers.utils.Interface; + + const RANDOM_ADDRESS = ethers.utils.getAddress('0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'); + + before(async () => { + wallet = getWallets()[0]; + account = getWallets()[2]; + let defaultAccountArtifact = await loadArtifact('DefaultAccount'); + await setCode(account.address, defaultAccountArtifact.bytecode); + defaultAccount = DefaultAccount__factory.connect(account.address, wallet); + nonceHolder = NonceHolder__factory.connect(NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS, wallet); + l2EthToken = L2EthToken__factory.connect(ETH_TOKEN_SYSTEM_CONTRACT_ADDRESS, wallet); + callable = (await deployContract('Callable')) as Callable; + mockERC20Approve = (await deployContract('MockERC20Approve')) as MockERC20Approve; + + let paymasterFlowInterfaceArtifact = await loadArtifact('IPaymasterFlow'); + paymasterFlowInterface = new ethers.utils.Interface(paymasterFlowInterfaceArtifact.abi); + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + bootloader = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + }); + + after(async function () { + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + }); + + describe('validateTransaction', function () { + it('non-deployer ignored', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: RANDOM_ADDRESS, + from: account.address, + nonce: nonce, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + const call = { + from: wallet.address, + to: defaultAccount.address, + value: 0, + data: defaultAccount.interface.encodeFunctionData('validateTransaction', [txHash, signedHash, txData]) + }; + expect(await wallet.provider.call(call)).to.be.eq('0x'); + }); + + it('invalid ignature', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: RANDOM_ADDRESS, + from: account.address, + nonce: nonce, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + parsedTx.s = '0x0FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0'; + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + const call = { + from: BOOTLOADER_FORMAL_ADDRESS, + to: defaultAccount.address, + value: 0, + data: defaultAccount.interface.encodeFunctionData('validateTransaction', [txHash, signedHash, txData]) + }; + expect(await bootloader.provider.call(call)).to.be.eq(ethers.constants.HashZero); + }); + + it('valid tx', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: RANDOM_ADDRESS, + from: account.address, + nonce: nonce, + data: '0x', + value: 0, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + const call = { + from: BOOTLOADER_FORMAL_ADDRESS, + to: defaultAccount.address, + value: 0, + data: defaultAccount.interface.encodeFunctionData('validateTransaction', [txHash, signedHash, txData]) + }; + expect(await bootloader.provider.call(call)).to.be.eq( + defaultAccount.interface.getSighash('validateTransaction') + '0'.repeat(56) + ); + }); + }); + + describe('executeTransaction', function () { + it('non-deployer ignored', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: callable.address, + from: account.address, + nonce: nonce, + data: '0xdeadbeef', + value: 5, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + await expect(await defaultAccount.executeTransaction(txHash, signedHash, txData)).to.not.emit( + callable, + 'Called' + ); + }); + + it('successfully executed', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: callable.address, + from: account.address, + nonce: nonce, + data: '0xdeadbeef', + value: 5, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + await expect(await defaultAccount.connect(bootloader).executeTransaction(txHash, signedHash, txData)) + .to.emit(callable, 'Called') + .withArgs(5, '0xdeadbeef'); + }); + }); + + describe('executeTransactionFromOutside', function () { + it('nothing', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: callable.address, + from: account.address, + nonce: nonce, + data: '0xdeadbeef', + value: 5, + gasLimit: 50000 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + await expect(await defaultAccount.executeTransactionFromOutside(txData)).to.not.emit(callable, 'Called'); + }); + }); + + describe('payForTransaction', function () { + it('non-deployer ignored', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: callable.address, + from: account.address, + nonce: nonce, + data: '0xdeadbeef', + value: 5, + gasLimit: 50000, + gasPrice: 200 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + let balanceBefore = await l2EthToken.balanceOf(defaultAccount.address); + await defaultAccount.payForTransaction(txHash, signedHash, txData); + let balanceAfter = await l2EthToken.balanceOf(defaultAccount.address); + expect(balanceAfter).to.be.eq(balanceBefore); + }); + + it('successfully payed', async () => { + let nonce = await nonceHolder.getMinNonce(account.address); + const legacyTx = await account.populateTransaction({ + type: 0, + to: callable.address, + from: account.address, + nonce: nonce, + data: '0xdeadbeef', + value: 5, + gasLimit: 50000, + gasPrice: 200 + }); + const txBytes = await account.signTransaction(legacyTx); + const parsedTx = zksync.utils.parseTransaction(txBytes); + const txData = signedTxToTransactionData(parsedTx)!; + + const txHash = parsedTx.hash; + delete legacyTx.from; + const signedHash = ethers.utils.keccak256(serialize(legacyTx)); + + await expect(await defaultAccount.connect(bootloader).payForTransaction(txHash, signedHash, txData)) + .to.emit(l2EthToken, 'Transfer') + .withArgs(account.address, BOOTLOADER_FORMAL_ADDRESS, 50000 * 200); + }); + }); + + describe('prepareForPaymaster', function () { + it('non-deployer ignored', async () => { + const eip712Tx = await account.populateTransaction({ + type: 113, + to: callable.address, + from: account.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100, + gasLimit: 50000, + customData: { + gasPerPubdata: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: { + paymaster: RANDOM_ADDRESS, + paymasterInput: paymasterFlowInterface.encodeFunctionData('approvalBased', [ + mockERC20Approve.address, + 2023, + '0x' + ]) + } + } + }); + const signedEip712Tx = await account.signTransaction(eip712Tx); + const parsedEIP712tx = zksync.utils.parseTransaction(signedEip712Tx); + + const eip712TxData = signedTxToTransactionData(parsedEIP712tx)!; + const eip712TxHash = parsedEIP712tx.hash; + const eip712SignedHash = zksync.EIP712Signer.getSignedDigest(eip712Tx); + + await expect( + await defaultAccount.prepareForPaymaster(eip712TxHash, eip712SignedHash, eip712TxData) + ).to.not.emit(mockERC20Approve, 'Approved'); + }); + + it('successfully prepared', async () => { + const eip712Tx = await account.populateTransaction({ + type: 113, + to: callable.address, + from: account.address, + data: '0x', + value: 0, + maxFeePerGas: 12000, + maxPriorityFeePerGas: 100, + gasLimit: 50000, + customData: { + gasPerPubdata: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: { + paymaster: RANDOM_ADDRESS, + paymasterInput: paymasterFlowInterface.encodeFunctionData('approvalBased', [ + mockERC20Approve.address, + 2023, + '0x' + ]) + } + } + }); + const signedEip712Tx = await account.signTransaction(eip712Tx); + const parsedEIP712tx = zksync.utils.parseTransaction(signedEip712Tx); + + const eip712TxData = signedTxToTransactionData(parsedEIP712tx)!; + const eip712TxHash = parsedEIP712tx.hash; + const eip712SignedHash = zksync.EIP712Signer.getSignedDigest(eip712Tx); + + await expect( + await defaultAccount + .connect(bootloader) + .prepareForPaymaster(eip712TxHash, eip712SignedHash, eip712TxData) + ) + .to.emit(mockERC20Approve, 'Approved') + .withArgs(RANDOM_ADDRESS, 2023); + }); + }); + + describe('fallback/receive', function () { + it('zero value', async () => { + const call = { + from: wallet.address, + to: defaultAccount.address, + value: 0, + data: '0x872384894899834939049043904390390493434343434344433443433434344234234234' + }; + expect(await wallet.provider.call(call)).to.be.eq('0x'); + }); + + it('non-zero value', async () => { + const call = { + from: wallet.address, + to: defaultAccount.address, + value: 3223, + data: '0x87238489489983493904904390431212224343434344433443433434344234234234' + }; + expect(await wallet.provider.call(call)).to.be.eq('0x'); + }); + }); +}); diff --git a/test/EcAdd.spec.ts b/test/EcAdd.spec.ts new file mode 100644 index 00000000..2e88259c --- /dev/null +++ b/test/EcAdd.spec.ts @@ -0,0 +1,188 @@ +import { expect } from 'chai'; +import { Contract } from 'zksync-web3'; +import { deployContractYul, callFallback } from './shared/utils'; + +describe('EcAdd tests', function () { + let ecAdd: Contract; + + before(async () => { + ecAdd = await deployContractYul('EcAdd', 'precompiles'); + }); + + describe('Ethereum tests', function () { + it('0 bytes: (0, 0) + (0, 0)', async () => { + const returnData = await callFallback(ecAdd, ''); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('128 bytes: (6, 9) + (19274124, 124124)', async () => { + const call = callFallback( + ecAdd, + '0x00000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000126198c000000000000000000000000000000000000000000000000000000000001e4dc' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 2) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + }); + + it('128 bytes: (0, 0) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('128 bytes: (0, 3) + (1, 2)', async () => { + const call = callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (0, 0) + (1, 3)', async () => { + const call = callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (0, 0) + (1, 2)', async () => { + const returnData = await callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + }); + + it('64 bytes: (0, 0) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('128 bytes: (1, 2) + (1, 2)', async () => { + const returnData = await callFallback( + ecAdd, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(returnData).to.be.equal( + '0x030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd315ed738c0e0a7c92e7845f96b2ae9c0a68a6a449e3538fc7ff3ebf7a5a18a2c4' + ); + }); + + it('80 bytes: (1, 3) + (0, 0)', async () => { + const call = callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('192 bytes: (1, 2) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + }); + + it('192 bytes: (0, 0) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('80 bytes: (0, 0) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + // (10744596414106452074759370245733544594153395043370666422502510773307029471145, 848677436511517736191562425154572367705380862894644942948681172815252343932) + // + + // (10744596414106452074759370245733544594153395043370666422502510773307029471145, 21039565435327757486054843320102702720990930294403178719740356721829973864651) + it('192 bytes: (1074..1145, 8486..3932) + (1074..1145, 2103..4651)', async () => { + const returnData = await callFallback( + ecAdd, + '0x17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa92e83f8d734803fc370eba25ed1f6b8768bd6d83887b87165fc2434fe11a830cb00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('192 bytes: (0, 0) + (1, 2)', async () => { + const returnData = await callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + }); + + it('192 bytes: (1, 2) + (1, 2)', async () => { + const returnData = await callFallback( + ecAdd, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd315ed738c0e0a7c92e7845f96b2ae9c0a68a6a449e3538fc7ff3ebf7a5a18a2c4' + ); + }); + + it('64 bytes: (1, 2) + (0, 0)', async () => { + const returnData = await callFallback( + ecAdd, + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002' + ); + }); + + // (10744596414106452074759370245733544594153395043370666422502510773307029471145, 848677436511517736191562425154572367705380862894644942948681172815252343932) + // + + // (1624070059937464756887933993293429854168590106605707304006200119738501412969, 3269329550605213075043232856820720631601935657990457502777101397807070461336) + it('128 bytes: (1074..1145, 8486..3932) + (1624..2969, 3269..1336)', async () => { + const returnData = await callFallback( + ecAdd, + '0x17c139df0efee0f766bc0204762b774362e4ded88953a39ce849a8a7fa163fa901e0559bacb160664764a357af8a9fe70baa9258e0b959273ffc5718c6d4cc7c039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98' + ); + await expect(returnData).to.be.equal( + '0x15bf2bb17880144b5d1cd2b1f46eff9d617bffd1ca57c37fb5a49bd84e53cf66049c797f9ce0d17083deb32b5e36f2ea2a212ee036598dd7624c168993d1355f' + ); + }); + }); +}); diff --git a/test/EcMul.spec.ts b/test/EcMul.spec.ts new file mode 100644 index 00000000..e56de1a3 --- /dev/null +++ b/test/EcMul.spec.ts @@ -0,0 +1,399 @@ +import { expect } from 'chai'; +import { Contract } from 'zksync-web3'; +import { deployContractYul, callFallback } from './shared/utils'; + +describe('EcMul tests', function () { + let ecMul: Contract; + + before(async () => { + ecMul = await deployContractYul('EcMul', 'precompiles'); + }); + + describe('Ethereum tests', function () { + it('128 bytes: (1, 3) * 0', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 2) * 21888242871839275222246405745257275088548364400416034343698204186575808495616', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000230644e72e131a029b85045b68181585d2833e84879b9709143e1f593f00000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45' + ); + }); + + it('64 bytes: (1, 3) * 0', async () => { + const call = callFallback( + ecMul, + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 3) * 21888242871839275222246405745257275088548364400416034343698204186575808495616', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000330644e72e131a029b85045b68181585d2833e84879b9709143e1f593f00000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('96 bytes: (1, 3) * 21888242871839275222246405745257275088548364400416034343698204186575808495617', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000330644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001' + ); + await expect(call).to.be.reverted; + }); + + it('96 bytes: (1, 3) * 1', async () => { + const call = callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001' + ); + await expect(call).to.be.reverted; + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 21888242871839275222246405745257275088548364400416034343698204186575808495616 + it('96 bytes: (1199..7827, 1184..6598) * 2188..5616', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f630644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000' + ); + await expect(returnData).to.be.equal( + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe3163511ddc1c3f25d396745388200081287b3fd1472d8339d5fecb2eae0830451' + ); + }); + + it('128 bytes: (1, 3) * 9', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 3) * 21888242871839275222246405745257275088548364400416034343698204186575808495617', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000330644e72e131a029b85045b68181585d2833e84879b9709143e1f593f00000010000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 2) * 340282366920938463463374607431768211456', async () => { + const returnData = await callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000100000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x13b8fec4a1eb2c7e3ccc07061ad516277c3bbe57bd4a302012b58a517f6437a4224d978b5763831dff16ce9b2c42222684835fedfc70ffec005789bb0c10de36' + ); + }); + + it('96 bytes: (1, 3) * 2', async () => { + const call = callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(call).to.be.reverted; + }); + + it('128 bytes: (1, 3) * 1', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('96 bytes: (1, 2) * 115792089237316195423570985008687907853269984665640564039457584007913129639935', async () => { + const returnData = await callFallback( + ecMul, + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + ); + await expect(returnData).to.be.equal( + '0x2f588cffe99db877a4434b598ab28f81e0522910ea52b45f0adaa772b2d5d35212f42fa8fd34fb1b33d8c6a718b6590198389b26fc9d8808d971f8b009777a97' + ); + }); + + it('128 bytes: (1, 2) * 21888242871839275222246405745257275088548364400416034343698204186575808495617', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000230644e72e131a029b85045b68181585d2833e84879b9709143e1f593f00000010000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('128 bytes: (1, 2) * 2', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x030644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd315ed738c0e0a7c92e7845f96b2ae9c0a68a6a449e3538fc7ff3ebf7a5a18a2c4' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 340282366920938463463374607431768211456 + it('80 bytes: (1199..7827, 1184..6598) * 340282366920938463463374607431768211456', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000100000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x1051acb0700ec6d42a88215852d582efbaef31529b6fcbc3277b5c1b300f5cf0135b2394bb45ab04b8bd7611bd2dfe1de6a4e6e2ccea1ea1955f577cd66af85b' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 0 + it('96 bytes: (1199..7827, 1184..6598) * 0', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('96 bytes: (1, 3) * 9', async () => { + const call = callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000009' + ); + await expect(call).to.be.reverted; + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 115792089237316195423570985008687907853269984665640564039457584007913129639935 + it('96 bytes: (1, 3) * 9', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + ); + await expect(returnData).to.be.equal( + '0x2cde5879ba6f13c0b5aa4ef627f159a3347df9722efce88a9afbb20b763b4c411aa7e43076f6aee272755a7f9b84832e71559ba0d2e0b17d5f9f01755e5b0d11' + ); + }); + + it('96 bytes: (1, 3) * 115792089237316195423570985008687907853269984665640564039457584007913129639935', async () => { + const call = callFallback( + ecMul, + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000003ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 0 + it('64 bytes: (1199..7827, 1184..6598) * 0', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('128 bytes: (1, 2) * 115792089237316195423570985008687907853269984665640564039457584007913129639935', async () => { + const returnData = await callFallback( + ecMul, + '0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x2f588cffe99db877a4434b598ab28f81e0522910ea52b45f0adaa772b2d5d35212f42fa8fd34fb1b33d8c6a718b6590198389b26fc9d8808d971f8b009777a97' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 1 + it('96 bytes: (1199..7827, 1184..6598) * 1', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000001' + ); + await expect(returnData).to.be.equal( + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6' + ); + }); + + it('96 bytes: (1, 2) * 9', async () => { + const returnData = await callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000009' + ); + await expect(returnData).to.be.equal( + '0x039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98' + ); + }); + + it('96 bytes: (1, 2) * 21888242871839275222246405745257275088548364400416034343698204186575808495617', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000230644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + it('80 bytes: (1, 3) * 340282366920938463463374607431768211456', async () => { + const call = callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000100000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('80 bytes: (1, 3) * 2', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + it('96 bytes: (1, 3) * 21888242871839275222246405745257275088548364400416034343698204186575808495616', async () => { + const call = callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000330644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000' + ); + await expect(call).to.be.reverted; + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 2 + it('96 bytes: (1199..7827, 1184..6598) * 2', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000002' + ); + await expect(returnData).to.be.equal( + '0x03d64e49ebb3c56c99e0769c1833879c9b86ead23945e1e7477cbd057e961c500d6840b39f8c2fefe0eced3e7d210b830f50831e756f1cc9039af65dc292e6d0' + ); + }); + + it('128 bytes: (1, 2) * 9', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x039730ea8dff1254c0fee9c0ea777d29a9c710b7e616683f194f18c43b43b869073a5ffcc6fc7a28c30723d6e58ce577356982d65b833a5a5c15bf9024b43d98' + ); + }); + + it('96 bytes: (1, 3) * 0', async () => { + const call = callFallback( + ecMul, + '0x000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(call).to.be.reverted; + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 21888242871839275222246405745257275088548364400416034343698204186575808495617 + it('96 bytes: (1199..7827, 1184..6598) * 2188..5617', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f630644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 9 + it('96 bytes: (1199..7827, 1184..6598) * 9', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f60000000000000000000000000000000000000000000000000000000000000009' + ); + await expect(returnData).to.be.equal( + '0x1dbad7d39dbc56379f78fac1bca147dc8e66de1b9d183c7b167351bfe0aeab742cd757d51289cd8dbd0acf9e673ad67d0f0a89f912af47ed1be53664f5692575' + ); + }); + + it('96 bytes: (1, 2) * 21888242871839275222246405745257275088548364400416034343698204186575808495616', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000230644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000' + ); + await expect(returnData).to.be.equal( + '0x000000000000000000000000000000000000000000000000000000000000000130644e72e131a029b85045b68181585d97816a916871ca8d3c208c16d87cfd45' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 2 + it('128 bytes: (1199..7827, 1184..6598) * 2', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f600000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x03d64e49ebb3c56c99e0769c1833879c9b86ead23945e1e7477cbd057e961c500d6840b39f8c2fefe0eced3e7d210b830f50831e756f1cc9039af65dc292e6d0' + ); + }); + + it('128 bytes: (1, 2) * 340282366920938463463374607431768211456', async () => { + const returnData = await callFallback( + ecMul, + '0x0000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x13b8fec4a1eb2c7e3ccc07061ad516277c3bbe57bd4a302012b58a517f6437a4224d978b5763831dff16ce9b2c42222684835fedfc70ffec005789bb0c10de36' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 115792089237316195423570985008687907853269984665640564039457584007913129639935 + it('128 bytes: (1199..7827, 1184..6598) * 1157..9935', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x2cde5879ba6f13c0b5aa4ef627f159a3347df9722efce88a9afbb20b763b4c411aa7e43076f6aee272755a7f9b84832e71559ba0d2e0b17d5f9f01755e5b0d11' + ); + }); + + // (11999875504842010600789954262886096740416429265635183817701593963271973497827, 11843594000332171325303933275547366297934113019079887694534126289021216356598) + // * + // 21888242871839275222246405745257275088548364400416034343698204186575808495617 + it('128 bytes: (1199..7827, 1184..6598) * 2188..5617', async () => { + const returnData = await callFallback( + ecMul, + '0x1a87b0584ce92f4593d161480614f2989035225609f08058ccfa3d0f940febe31a2f3c951f6dadcc7ee9007dff81504b0fcd6d7cf59996efdc33d92bf7f9f8f630644e72e131a029b85045b68181585d2833e84879b9709143e1f593f00000010000000000000000000000000000000000000000000000000000000000000000' + ); + await expect(returnData).to.be.equal( + '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + ); + }); + }); +}); diff --git a/test/EmptyContract.spec.ts b/test/EmptyContract.spec.ts new file mode 100644 index 00000000..e5fdfb0e --- /dev/null +++ b/test/EmptyContract.spec.ts @@ -0,0 +1,44 @@ +import { expect } from 'chai'; +import { EmptyContract } from '../typechain-types'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract, provider } from './shared/utils'; +import { ethers } from 'hardhat'; + +describe('EmptyContract tests', function () { + let wallet: Wallet; + let emptyContract: EmptyContract; + + before(async () => { + wallet = getWallets()[0]; + emptyContract = (await deployContract('EmptyContract')) as EmptyContract; + }); + + it('zero value', async () => { + const tx = { + from: wallet.address, + to: emptyContract.address, + value: 0, + data: '0x1234567890deadbeef1234567890' + }; + expect(await provider.call(tx)).to.be.eq('0x'); + }); + + it('non-zero value', async () => { + const tx = { + from: wallet.address, + to: emptyContract.address, + value: ethers.utils.parseEther('1.0'), + data: '0x1234567890deadbeef1234567890' + }; + expect(await provider.call(tx)).to.be.eq('0x'); + }); + + it('empty calldata', async () => { + const tx = { + from: wallet.address, + to: emptyContract.address, + data: '' + }; + expect(await provider.call(tx)).to.be.eq('0x'); + }); +}); diff --git a/test/EventWriter.spec.ts b/test/EventWriter.spec.ts new file mode 100644 index 00000000..094c640e --- /dev/null +++ b/test/EventWriter.spec.ts @@ -0,0 +1,82 @@ +import { expect } from 'chai'; +import { EventWriterTest } from '../typechain-types'; +import { Contract, Wallet } from 'zksync-web3'; +import { EVENT_WRITER_CONTRACT_ADDRESS } from './shared/constants'; +import { getCode, getWallets, deployContract, setCode } from './shared/utils'; +import { readYulBytecode } from '../scripts/utils'; +import { Language } from '../scripts/constants'; + +describe('EventWriter tests', function () { + let wallet: Wallet; + let eventWriter: Contract; + let eventWriterTest: EventWriterTest; + + let _eventWriterCode: string; + + before(async () => { + _eventWriterCode = await getCode(EVENT_WRITER_CONTRACT_ADDRESS); + let eventWriterTestCode = readYulBytecode({ + codeName: 'EventWriter', + path: '', + lang: Language.Yul, + address: ethers.constants.AddressZero + }); + await setCode(EVENT_WRITER_CONTRACT_ADDRESS, eventWriterTestCode); + + wallet = (await getWallets())[0]; + eventWriter = new Contract(EVENT_WRITER_CONTRACT_ADDRESS, [], wallet); + eventWriterTest = (await deployContract('EventWriterTest')) as EventWriterTest; + }); + + after(async () => { + await setCode(EVENT_WRITER_CONTRACT_ADDRESS, _eventWriterCode); + }); + + it('non system call failed', async () => { + await expect(eventWriter.fallback({ data: '0x' })).to.be.reverted; + }); + + // TODO: anonymous events doesn't work + it.skip('zero topics', async () => { + console.log((await (await eventWriterTest.zeroTopics('0x')).wait()).events); + await expect(eventWriterTest.zeroTopics('0x')).to.emit(eventWriterTest, 'ZeroTopics').withArgs('0x'); + }); + + it('one topic', async () => { + await expect(eventWriterTest.oneTopic('0xdeadbeef')) + .to.emit(eventWriterTest, 'OneTopic') + .withArgs('0xdeadbeef'); + }); + + it('two topics', async () => { + await expect( + eventWriterTest.twoTopics('0x1278378123784223232874782378478237848723784782378423747237848723', '0xabcd') + ) + .to.emit(eventWriterTest, 'TwoTopics') + .withArgs('0x1278378123784223232874782378478237848723784782378423747237848723', '0xabcd'); + }); + + it('three topics', async () => { + await expect(eventWriterTest.threeTopics(0, 1133, '0x')) + .to.emit(eventWriterTest, 'ThreeTopics') + .withArgs(0, 1133, '0x'); + }); + + it('four topics', async () => { + await expect( + eventWriterTest.fourTopics( + '0x1234567890', + 0, + 22, + '0x2828383489438934898934893894893895348915893489589348958349589348958934859348958934858394589348958934854385838954893489' + ) + ) + .to.emit(eventWriterTest, 'FourTopics') + .withArgs( + '0x1234567890', + 0, + 22, + '0x2828383489438934898934893894893895348915893489589348958349589348958934859348958934858394589348958934854385838954893489' + ); + }); +}); diff --git a/test/ImmutableSimulator.spec.ts b/test/ImmutableSimulator.spec.ts new file mode 100644 index 00000000..3ba7b034 --- /dev/null +++ b/test/ImmutableSimulator.spec.ts @@ -0,0 +1,64 @@ +import { expect } from 'chai'; +import { ImmutableSimulator } from '../typechain-types'; +import { DEPLOYER_SYSTEM_CONTRACT_ADDRESS } from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract } from './shared/utils'; +import { network, ethers } from 'hardhat'; + +describe('ImmutableSimulator tests', function () { + let wallet: Wallet; + let immutableSimulator: ImmutableSimulator; + + const RANDOM_ADDRESS = '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef'; + const IMMUTABLES_DATA = [ + { + index: 0, + value: '0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' + }, + { + index: 23, + value: '0x0000000000000000000000000000000000000000000000000000000000000111' + } + ]; + + before(async () => { + wallet = getWallets()[0]; + immutableSimulator = (await deployContract('ImmutableSimulator')) as ImmutableSimulator; + }); + + describe('setImmutables', function () { + it('non-deployer failed to call', async () => { + await expect(immutableSimulator.setImmutables(RANDOM_ADDRESS, IMMUTABLES_DATA)).to.be.revertedWith( + 'Callable only by the deployer system contract' + ); + }); + + it('successfully set', async () => { + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + + const deployer_account = await ethers.getSigner(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + + await immutableSimulator.connect(deployer_account).setImmutables(RANDOM_ADDRESS, IMMUTABLES_DATA); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + + for (const immutable of IMMUTABLES_DATA) { + expect(await immutableSimulator.getImmutable(RANDOM_ADDRESS, immutable.index)).to.be.eq( + immutable.value + ); + } + }); + }); + + describe('getImmutable', function () { + it('zero', async () => { + expect(await immutableSimulator.getImmutable(RANDOM_ADDRESS, 333)).to.be.eq(ethers.constants.HashZero); + }); + }); +}); diff --git a/test/KnownCodesStorage.spec.ts b/test/KnownCodesStorage.spec.ts new file mode 100644 index 00000000..d00deb3c --- /dev/null +++ b/test/KnownCodesStorage.spec.ts @@ -0,0 +1,157 @@ +import { expect } from 'chai'; +import { KnownCodesStorage, MockL1Messenger, MockL1Messenger__factory } from '../typechain-types'; +import { + BOOTLOADER_FORMAL_ADDRESS, + EMPTY_STRING_KECCAK, + L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, + COMPRESSOR_CONTRACT_ADDRESS +} from './shared/constants'; +import { Wallet } from 'zksync-web3'; +import { getWallets, deployContract, loadArtifact, setCode, getCode } from './shared/utils'; +import { network, ethers } from 'hardhat'; + +describe('KnownCodesStorage tests', function () { + let wallet: Wallet; + let knownCodesStorage: KnownCodesStorage; + let mockL1Messenger: MockL1Messenger; + let bootloaderAccount: ethers.Signer; + let compressorAccount: ethers.Signer; + + let _l1MessengerCode: string; + + const BYTECODE_HASH_1 = '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF'; + const BYTECODE_HASH_2 = '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEE1'; + const BYTECODE_HASH_3 = '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEE2'; + const BYTECODE_HASH_4 = '0x0100FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEE3'; + const INCORRECTLY_FORMATTED_HASH = '0x0120FFFFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF'; + const INVALID_LENGTH_HASH = '0x0100FFFEDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEFDEADBEEF'; + + before(async () => { + wallet = (await getWallets())[0]; + knownCodesStorage = (await deployContract('KnownCodesStorage')) as KnownCodesStorage; + + _l1MessengerCode = await getCode(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS); + let l1MessengerArtifact = await loadArtifact('MockL1Messenger'); + await setCode(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, l1MessengerArtifact.bytecode); + mockL1Messenger = MockL1Messenger__factory.connect(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, wallet); + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [COMPRESSOR_CONTRACT_ADDRESS] + }); + bootloaderAccount = await ethers.getSigner(BOOTLOADER_FORMAL_ADDRESS); + compressorAccount = await ethers.getSigner(COMPRESSOR_CONTRACT_ADDRESS); + }); + + after(async () => { + await setCode(L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS, _l1MessengerCode); + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [BOOTLOADER_FORMAL_ADDRESS] + }); + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [COMPRESSOR_CONTRACT_ADDRESS] + }); + }); + + describe('markBytecodeAsPublished', function () { + it('non-compressor failed to call', async () => { + await expect(knownCodesStorage.markBytecodeAsPublished(BYTECODE_HASH_1)).to.be.revertedWith( + 'Callable only by the compressor' + ); + }); + + it('incorrectly fomatted bytecode hash failed to call', async () => { + await expect( + knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(INCORRECTLY_FORMATTED_HASH) + ).to.be.revertedWith('Incorrectly formatted bytecodeHash'); + }); + + it('invalid length bytecode hash failed to call', async () => { + await expect( + knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(INVALID_LENGTH_HASH) + ).to.be.revertedWith('Code length in words must be odd'); + }); + + it('successfuly marked', async () => { + await expect(knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(BYTECODE_HASH_1)) + .to.emit(knownCodesStorage, 'MarkedAsKnown') + .withArgs(BYTECODE_HASH_1.toLowerCase(), false) + .not.emit(mockL1Messenger, 'MockBytecodeL1Published'); + expect(await knownCodesStorage.getMarker(BYTECODE_HASH_1)).to.be.eq(1); + }); + + it('not marked second time', async () => { + await expect( + knownCodesStorage.connect(compressorAccount).markBytecodeAsPublished(BYTECODE_HASH_1) + ).to.not.emit(knownCodesStorage, 'MarkedAsKnown'); + }); + }); + + describe('markFactoryDeps', function () { + it('non-bootloader failed to call', async () => { + await expect( + knownCodesStorage.markFactoryDeps(false, [BYTECODE_HASH_2, BYTECODE_HASH_3]) + ).to.be.revertedWith('Callable only by the bootloader'); + }); + + it('incorrectly fomatted bytecode hash failed to call', async () => { + await expect( + knownCodesStorage + .connect(bootloaderAccount) + .markFactoryDeps(true, [BYTECODE_HASH_2, INCORRECTLY_FORMATTED_HASH]) + ).to.be.revertedWith('Incorrectly formatted bytecodeHash'); + }); + + it('invalid length bytecode hash failed to call', async () => { + await expect( + knownCodesStorage + .connect(bootloaderAccount) + .markFactoryDeps(false, [INVALID_LENGTH_HASH, BYTECODE_HASH_3]) + ).to.be.revertedWith('Code length in words must be odd'); + }); + + it('successfuly marked', async () => { + await expect( + knownCodesStorage.connect(bootloaderAccount).markFactoryDeps(false, [BYTECODE_HASH_2, BYTECODE_HASH_3]) + ) + .to.emit(knownCodesStorage, 'MarkedAsKnown') + .withArgs(BYTECODE_HASH_2.toLowerCase(), false) + .emit(knownCodesStorage, 'MarkedAsKnown') + .withArgs(BYTECODE_HASH_3.toLowerCase(), false) + .not.emit(mockL1Messenger, 'MockBytecodeL1Published'); + expect(await knownCodesStorage.getMarker(BYTECODE_HASH_2)).to.be.eq(1); + expect(await knownCodesStorage.getMarker(BYTECODE_HASH_3)).to.be.eq(1); + }); + + it('not marked second time', async () => { + await expect( + knownCodesStorage.connect(bootloaderAccount).markFactoryDeps(false, [BYTECODE_HASH_2, BYTECODE_HASH_3]) + ).to.not.emit(knownCodesStorage, 'MarkedAsKnown'); + }); + + it('sent to l1', async () => { + await expect(knownCodesStorage.connect(bootloaderAccount).markFactoryDeps(true, [BYTECODE_HASH_4])) + .to.emit(knownCodesStorage, 'MarkedAsKnown') + .withArgs(BYTECODE_HASH_4.toLowerCase(), true) + .emit(mockL1Messenger, 'MockBytecodeL1Published') + .withArgs(BYTECODE_HASH_4.toLowerCase()); + expect(await knownCodesStorage.getMarker(BYTECODE_HASH_4)).to.be.eq(1); + }); + }); + + describe('getMarker', function () { + it('not known', async () => { + expect(await knownCodesStorage.getMarker(INCORRECTLY_FORMATTED_HASH)).to.be.eq(0); + }); + + it('known', async () => { + expect(await knownCodesStorage.getMarker(BYTECODE_HASH_1)).to.be.eq(1); + }); + }); +}); diff --git a/test/shared/constants.ts b/test/shared/constants.ts new file mode 100644 index 00000000..489259cb --- /dev/null +++ b/test/shared/constants.ts @@ -0,0 +1,14 @@ +import { BigNumber } from 'ethers'; + +export const BOOTLOADER_FORMAL_ADDRESS = '0x0000000000000000000000000000000000008001'; +export const NONCE_HOLDER_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008003'; +export const KNOWN_CODE_STORAGE_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008004'; +export const DEPLOYER_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008006'; +export const FORCE_DEPLOYER_ADDRESS = '0x0000000000000000000000000000000000008007'; +export const L1_MESSENGER_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000008008'; +export const ETH_TOKEN_SYSTEM_CONTRACT_ADDRESS = '0x000000000000000000000000000000000000800a'; +export const EVENT_WRITER_CONTRACT_ADDRESS = '0x000000000000000000000000000000000000800d'; +export const COMPRESSOR_CONTRACT_ADDRESS = '0x000000000000000000000000000000000000800e'; +export const EMPTY_STRING_KECCAK = '0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470'; + +export const TWO_IN_256 = BigNumber.from(2).pow(256); diff --git a/test/shared/transactions.ts b/test/shared/transactions.ts new file mode 100644 index 00000000..f6c9d254 --- /dev/null +++ b/test/shared/transactions.ts @@ -0,0 +1,146 @@ +import * as zksync from 'zksync-web3'; +import { BigNumberish, BytesLike, Transaction } from 'ethers'; + +// Interface encoding the transaction struct used for AA protocol +export interface TransactionData { + txType: BigNumberish; + from: BigNumberish; + to: BigNumberish; + gasLimit: BigNumberish; + gasPerPubdataByteLimit: BigNumberish; + maxFeePerGas: BigNumberish; + maxPriorityFeePerGas: BigNumberish; + paymaster: BigNumberish; + nonce: BigNumberish; + value: BigNumberish; + // In the future, we might want to add some + // new fields to the struct. The `txData` struct + // is to be passed to account and any changes to its structure + // would mean a breaking change to these accounts. In order to prevent this, + // we should keep some fields as "reserved". + // It is also recommneded that their length is fixed, since + // it would allow easier proof integration (in case we will need + // some special circuit for preprocessing transactions). + reserved: [BigNumberish, BigNumberish, BigNumberish, BigNumberish]; + data: BytesLike; + signature: BytesLike; + factoryDeps: BytesLike[]; + paymasterInput: BytesLike; + // Reserved dynamic type for the future use-case. Using it should be avoided, + // But it is still here, just in case we want to enable some additional functionality. + reservedDynamic: BytesLike; +} + +export function signedTxToTransactionData(tx: Transaction) { + // Transform legacy transaction's `v` part of the signature + // to a single byte used in the packed eth signature + function unpackV(v: number) { + if (v >= 35) { + const chainId = Math.floor((v - 35) / 2); + return v - chainId * 2 - 8; + } else if (v <= 1) { + return 27 + v; + } + + throw new Error('Invalid `v`'); + } + + function legacyTxToTransactionData(tx: any): TransactionData { + return { + txType: 0, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.gasPrice!, + maxPriorityFeePerGas: tx.gasPrice!, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [tx.chainId || 0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, new Uint8Array([unpackV(tx.v)])]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip2930TxToTransactionData(tx: any): TransactionData { + return { + txType: 1, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.gasPrice!, + maxPriorityFeePerGas: tx.gasPrice!, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, unpackV(tx.v)]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip1559TxToTransactionData(tx: any): TransactionData { + return { + txType: 2, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.maxFeePerGas, + maxPriorityFeePerGas: tx.maxPriorityFeePerGas, + paymaster: 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: ethers.utils.hexConcat([tx.r, tx.s, unpackV(tx.v)]), + factoryDeps: [], + paymasterInput: '0x', + reservedDynamic: '0x' + }; + } + + function eip712TxToTransactionData(tx: any): TransactionData { + return { + txType: 113, + from: tx.from!, + to: tx.to!, + gasLimit: tx.gasLimit!, + gasPerPubdataByteLimit: tx.customData.gasPerPubdata || zksync.utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + maxFeePerGas: tx.maxFeePerGas, + maxPriorityFeePerGas: tx.maxPriorityFeePerGas, + paymaster: tx.customData.paymasterParams?.paymaster || 0, + nonce: tx.nonce, + value: tx.value || 0, + reserved: [0, 0, 0, 0], + data: tx.data!, + signature: tx.customData.customSignature, + factoryDeps: tx.customData.factoryDeps.map(zksync.utils.hashBytecode), + paymasterInput: tx.customData.paymasterParams?.paymasterInput || '0x', + reservedDynamic: '0x' + }; + } + + const txType = tx.type ?? 0; + + switch (txType) { + case 0: + return legacyTxToTransactionData(tx); + case 1: + return eip2930TxToTransactionData(tx); + case 2: + return eip1559TxToTransactionData(tx); + case 113: + return eip712TxToTransactionData(tx); + default: + throw new Error('Unsupported tx type'); + } +} diff --git a/test/shared/utils.ts b/test/shared/utils.ts new file mode 100644 index 00000000..79cdf6d6 --- /dev/null +++ b/test/shared/utils.ts @@ -0,0 +1,133 @@ +import { Provider, Contract, Wallet } from 'zksync-web3'; +import { Deployer } from '@matterlabs/hardhat-zksync-deploy'; +import { readYulBytecode } from '../../scripts/utils'; +import { ethers, network } from 'hardhat'; +import { BytesLike } from 'ethers'; +import * as hre from 'hardhat'; +import * as zksync from 'zksync-web3'; +import { ZkSyncArtifact } from '@matterlabs/hardhat-zksync-deploy/dist/types'; +import { DEPLOYER_SYSTEM_CONTRACT_ADDRESS } from './constants'; +import { ContractDeployer__factory } from '../../typechain-types'; +import { Language } from '../../scripts/constants'; + +const RICH_WALLETS = [ + { + address: '0x36615Cf349d7F6344891B1e7CA7C72883F5dc049', + privateKey: '0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110' + }, + { + address: '0xa61464658AfeAf65CccaaFD3a512b69A83B77618', + privateKey: '0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3' + }, + { + address: '0x0D43eB5B8a47bA8900d84AA36656c92024e9772e', + privateKey: '0xd293c684d884d56f8d6abd64fc76757d3664904e309a0645baf8522ab6366d9e' + }, + { + address: '0xA13c10C0D5bd6f79041B9835c63f91de35A15883', + privateKey: '0x850683b40d4a740aa6e745f889a6fdc8327be76e122f5aba645a5b02d0248db8' + } +]; + +export const provider = new Provider((hre.network.config as any).url); + +const wallet = new Wallet(RICH_WALLETS[0].privateKey, provider); +const deployer = new Deployer(hre, wallet); + +export async function callFallback(contract: Contract, data: string) { + // `eth_Call` revert is not parsed by ethers, so we send + // transaction to catch the error and use `eth_Call` to the return data. + await contract.fallback({ data }); + return contract.provider.call({ + to: contract.address, + data + }); +} + +export function getWallets(): Wallet[] { + let wallets = []; + for (let i = 0; i < RICH_WALLETS.length; i++) { + wallets[i] = new Wallet(RICH_WALLETS[i].privateKey, provider); + } + return wallets; +} + +export async function loadArtifact(name: string): Promise { + return await deployer.loadArtifact(name); +} + +export async function deployContract(name: string, constructorArguments?: any[] | undefined): Promise { + const artifact = await loadArtifact(name); + return await deployer.deploy(artifact, constructorArguments); +} + +export async function deployContractYul(codeName: string, path: string): Promise { + const bytecode = readYulBytecode({ + codeName, + path, + lang: Language.Yul, + address: '0x0000000000000000000000000000000000000000' + }); + return await deployer.deploy( + { + bytecode, + factoryDeps: {}, + sourceMapping: '', + _format: '', + contractName: '', + sourceName: '', + abi: [], + deployedBytecode: bytecode, + linkReferences: {}, + deployedLinkReferences: {} + }, + [] + ); +} + +export async function publishBytecode(bytecode: BytesLike) { + await wallet.sendTransaction({ + type: 113, + to: ethers.constants.AddressZero, + data: '0x', + customData: { + factoryDeps: [bytecode], + gasPerPubdata: 50000 + } + }); +} + +export async function getCode(address: string): Promise { + return await provider.getCode(address); +} + +// Force deploy bytecode on the address +export async function setCode(address: string, bytecode: BytesLike) { + // TODO: think about factoryDeps with eth_sendTransaction + try { + // publish bytecode in a separate tx + await publishBytecode(bytecode); + } catch {} + + await network.provider.request({ + method: 'hardhat_impersonateAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); + + const deployerAccount = await ethers.getSigner(DEPLOYER_SYSTEM_CONTRACT_ADDRESS); + const deployerContract = ContractDeployer__factory.connect(DEPLOYER_SYSTEM_CONTRACT_ADDRESS, deployerAccount); + + const deployment = { + bytecodeHash: zksync.utils.hashBytecode(bytecode), + newAddress: address, + callConstructor: false, + value: 0, + input: '0x' + }; + await deployerContract.forceDeployOnAddress(deployment, ethers.constants.AddressZero); + + await network.provider.request({ + method: 'hardhat_stopImpersonatingAccount', + params: [DEPLOYER_SYSTEM_CONTRACT_ADDRESS] + }); +} diff --git a/test/system-contract-test.test.ts b/test/system-contract-test.test.ts deleted file mode 100644 index 55eabef1..00000000 --- a/test/system-contract-test.test.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { Wallet, utils } from "zksync-web3"; -import * as hre from "hardhat"; -import { Deployer } from "@matterlabs/hardhat-zksync-deploy"; - -import { TestSystemContract } from "../typechain-types/cache-zk/solpp-generated-contracts/test-contracts"; -import { deployContractOnAddress } from "./utils/deployOnAnyAddress"; -import { BigNumber, ethers } from "ethers"; - -const RICH_WALLET_PK = '0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110'; - -describe('System contracts tests', function () { - // An example address where our system contracts will be put - const TEST_SYSTEM_CONTRACT_ADDRESS = '0x0000000000000000000000000000000000000101'; - let testContract: TestSystemContract; - let deployer = new Deployer(hre, new Wallet(RICH_WALLET_PK)); - - before('Prepare bootloader and system contracts', async function () { - testContract = (await deployContractOnAddress( - 'TestSystemContract', - TEST_SYSTEM_CONTRACT_ADDRESS, - "0x", - deployer - )).connect(deployer.zkWallet) as TestSystemContract; - - await (await deployer.zkWallet.deposit({ - token: utils.ETH_ADDRESS, - amount: ethers.utils.parseEther('10.0') - })).wait(); - }); - - it('Test precompile call', async function () { - await testContract.testPrecompileCall(); - }) - - it('Test mimicCall and setValueForNextCall', async function () { - const whoToMimic = Wallet.createRandom().address; - const value = BigNumber.from(2).pow(128).sub(1); - await (await testContract.testMimicCallAndValue( - whoToMimic, - value - )); - }); - - it('Test onlySystemCall modifier', async function () { - await testContract.testOnlySystemModifier(); - }); - - it('Test system mimicCall', async function () { - await testContract.testSystemMimicCall(); - }); -}); diff --git a/test/utils/DiamonCutFacet.json b/test/utils/DiamonCutFacet.json deleted file mode 100644 index c973d8ec..00000000 --- a/test/utils/DiamonCutFacet.json +++ /dev/null @@ -1,295 +0,0 @@ -[ - { - "inputs": [], - "stateMutability": "nonpayable", - "type": "constructor" - }, - { - "anonymous": false, - "inputs": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "indexed": false, - "internalType": "struct Diamond.FacetCut[]", - "name": "_facetCuts", - "type": "tuple[]" - }, - { - "indexed": false, - "internalType": "address", - "name": "_initAddress", - "type": "address" - } - ], - "name": "DiamondCutProposal", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "currentProposalId", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "proposedDiamondCutHash", - "type": "bytes32" - } - ], - "name": "DiamondCutProposalCancelation", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "components": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "initAddress", - "type": "address" - }, - { - "internalType": "bytes", - "name": "initCalldata", - "type": "bytes" - } - ], - "indexed": false, - "internalType": "struct Diamond.DiamondCutData", - "name": "_diamondCut", - "type": "tuple" - } - ], - "name": "DiamondCutProposalExecution", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "_address", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "currentProposalId", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "securityCouncilEmergencyApprovals", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "proposedDiamondCutHash", - "type": "bytes32" - } - ], - "name": "EmergencyDiamondCutApproved", - "type": "event" - }, - { - "anonymous": false, - "inputs": [], - "name": "EmergencyFreeze", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "lastDiamondFreezeTimestamp", - "type": "uint256" - } - ], - "name": "Unfreeze", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "bytes32", - "name": "_diamondCutHash", - "type": "bytes32" - } - ], - "name": "approveEmergencyDiamondCutAsSecurityCouncilMember", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "cancelDiamondCutProposal", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "emergencyFreezeDiamond", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "initAddress", - "type": "address" - }, - { - "internalType": "bytes", - "name": "initCalldata", - "type": "bytes" - } - ], - "internalType": "struct Diamond.DiamondCutData", - "name": "_diamondCut", - "type": "tuple" - } - ], - "name": "executeDiamondCutProposal", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "_facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "_initAddress", - "type": "address" - } - ], - "name": "proposeDiamondCut", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "unfreezeDiamond", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - } -] \ No newline at end of file diff --git a/test/utils/DiamondUpgradeInit.json b/test/utils/DiamondUpgradeInit.json deleted file mode 100644 index acc106bc..00000000 --- a/test/utils/DiamondUpgradeInit.json +++ /dev/null @@ -1,446 +0,0 @@ -[ - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "txId", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "bytes32", - "name": "txHash", - "type": "bytes32" - }, - { - "indexed": false, - "internalType": "uint64", - "name": "expirationBlock", - "type": "uint64" - }, - { - "components": [ - { - "internalType": "uint256", - "name": "txType", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "from", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "to", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasPerPubdataByteLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxPriorityFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "paymaster", - "type": "uint256" - }, - { - "internalType": "uint256[6]", - "name": "reserved", - "type": "uint256[6]" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "signature", - "type": "bytes" - }, - { - "internalType": "uint256[]", - "name": "factoryDeps", - "type": "uint256[]" - }, - { - "internalType": "bytes", - "name": "paymasterInput", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "reservedDynamic", - "type": "bytes" - } - ], - "indexed": false, - "internalType": "struct IMailbox.L2CanonicalTransaction", - "name": "transaction", - "type": "tuple" - }, - { - "indexed": false, - "internalType": "bytes[]", - "name": "factoryDeps", - "type": "bytes[]" - } - ], - "name": "NewPriorityRequest", - "type": "event" - }, - { - "inputs": [ - { - "internalType": "bytes", - "name": "_forceDeployCalldata", - "type": "bytes" - }, - { - "internalType": "bytes[]", - "name": "_factoryDeps", - "type": "bytes[]" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - } - ], - "name": "forceDeployL2Contract", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "", - "type": "uint256" - }, - { - "internalType": "uint32", - "name": "", - "type": "uint32" - } - ], - "name": "l2TransactionBaseCost", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_index", - "type": "uint256" - }, - { - "components": [ - { - "internalType": "uint8", - "name": "l2ShardId", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isService", - "type": "bool" - }, - { - "internalType": "uint16", - "name": "txNumberInBlock", - "type": "uint16" - }, - { - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "internalType": "bytes32", - "name": "key", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "value", - "type": "bytes32" - } - ], - "internalType": "struct L2Log", - "name": "_log", - "type": "tuple" - }, - { - "internalType": "bytes32[]", - "name": "_proof", - "type": "bytes32[]" - } - ], - "name": "proveL2LogInclusion", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_index", - "type": "uint256" - }, - { - "components": [ - { - "internalType": "uint16", - "name": "txNumberInBlock", - "type": "uint16" - }, - { - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "internalType": "struct L2Message", - "name": "_message", - "type": "tuple" - }, - { - "internalType": "bytes32[]", - "name": "_proof", - "type": "bytes32[]" - } - ], - "name": "proveL2MessageInclusion", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_contractL2", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_l2Value", - "type": "uint256" - }, - { - "internalType": "bytes", - "name": "_calldata", - "type": "bytes" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - }, - { - "internalType": "bytes[]", - "name": "_factoryDeps", - "type": "bytes[]" - } - ], - "name": "requestL2Transaction", - "outputs": [ - { - "internalType": "bytes32", - "name": "canonicalTxHash", - "type": "bytes32" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_txId", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_l2Value", - "type": "uint256" - }, - { - "internalType": "address", - "name": "_sender", - "type": "address" - }, - { - "internalType": "address", - "name": "_contractAddressL2", - "type": "address" - }, - { - "internalType": "bytes", - "name": "_calldata", - "type": "bytes" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - }, - { - "internalType": "bytes[]", - "name": "_factoryDeps", - "type": "bytes[]" - } - ], - "name": "serializeL2Transaction", - "outputs": [ - { - "components": [ - { - "internalType": "uint256", - "name": "txType", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "from", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "to", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasPerPubdataByteLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxPriorityFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "paymaster", - "type": "uint256" - }, - { - "internalType": "uint256[6]", - "name": "reserved", - "type": "uint256[6]" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "signature", - "type": "bytes" - }, - { - "internalType": "uint256[]", - "name": "factoryDeps", - "type": "uint256[]" - }, - { - "internalType": "bytes", - "name": "paymasterInput", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "reservedDynamic", - "type": "bytes" - } - ], - "internalType": "struct IMailbox.L2CanonicalTransaction", - "name": "", - "type": "tuple" - } - ], - "stateMutability": "pure", - "type": "function" - } -] diff --git a/test/utils/IZkSync.json b/test/utils/IZkSync.json deleted file mode 100644 index 92be1231..00000000 --- a/test/utils/IZkSync.json +++ /dev/null @@ -1,1841 +0,0 @@ -{ - "abi": [ - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint256", - "name": "blockNumber", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "name": "BlockCommit", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint256", - "name": "blockNumber", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "name": "BlockExecution", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "totalBlocksCommitted", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "totalBlocksVerified", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "totalBlocksExecuted", - "type": "uint256" - } - ], - "name": "BlocksRevert", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "uint256", - "name": "previousLastVerifiedBlock", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "uint256", - "name": "currentLastVerifiedBlock", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "enum IExecutor.BlockVerificationMode", - "name": "verificationMode", - "type": "uint8" - } - ], - "name": "BlocksVerification", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "indexed": false, - "internalType": "struct Diamond.FacetCut[]", - "name": "_facetCuts", - "type": "tuple[]" - }, - { - "indexed": false, - "internalType": "address", - "name": "_initAddress", - "type": "address" - } - ], - "name": "DiamondCutProposal", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "currentProposalId", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "proposedDiamondCutHash", - "type": "bytes32" - } - ], - "name": "DiamondCutProposalCancelation", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "components": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "initAddress", - "type": "address" - }, - { - "internalType": "bytes", - "name": "initCalldata", - "type": "bytes" - } - ], - "indexed": false, - "internalType": "struct Diamond.DiamondCutData", - "name": "_diamondCut", - "type": "tuple" - } - ], - "name": "DiamondCutProposalExecution", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "_address", - "type": "address" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "currentProposalId", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "uint256", - "name": "securityCouncilEmergencyApprovals", - "type": "uint256" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "proposedDiamondCutHash", - "type": "bytes32" - } - ], - "name": "EmergencyDiamondCutApproved", - "type": "event" - }, - { - "anonymous": false, - "inputs": [], - "name": "EmergencyFreeze", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "bool", - "name": "isPorterAvailable", - "type": "bool" - } - ], - "name": "IsPorterAvailableStatusUpdate", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "oldGovernor", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "newGovernor", - "type": "address" - } - ], - "name": "NewGovernor", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "bytes32", - "name": "previousBytecodeHash", - "type": "bytes32" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "newBytecodeHash", - "type": "bytes32" - } - ], - "name": "NewL2BootloaderBytecodeHash", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "bytes32", - "name": "previousBytecodeHash", - "type": "bytes32" - }, - { - "indexed": true, - "internalType": "bytes32", - "name": "newBytecodeHash", - "type": "bytes32" - } - ], - "name": "NewL2DefaultAccountBytecodeHash", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "oldPendingGovernor", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "newPendingGovernor", - "type": "address" - } - ], - "name": "NewPendingGovernor", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "txId", - "type": "uint256" - }, - { - "indexed": false, - "internalType": "bytes32", - "name": "txHash", - "type": "bytes32" - }, - { - "indexed": false, - "internalType": "uint64", - "name": "expirationBlock", - "type": "uint64" - }, - { - "components": [ - { - "internalType": "uint256", - "name": "txType", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "from", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "to", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasPerPubdataByteLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxPriorityFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "paymaster", - "type": "uint256" - }, - { - "internalType": "uint256[6]", - "name": "reserved", - "type": "uint256[6]" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "signature", - "type": "bytes" - }, - { - "internalType": "uint256[]", - "name": "factoryDeps", - "type": "uint256[]" - }, - { - "internalType": "bytes", - "name": "paymasterInput", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "reservedDynamic", - "type": "bytes" - } - ], - "indexed": false, - "internalType": "struct IMailbox.L2CanonicalTransaction", - "name": "transaction", - "type": "tuple" - }, - { - "indexed": false, - "internalType": "bytes[]", - "name": "factoryDeps", - "type": "bytes[]" - } - ], - "name": "NewPriorityRequest", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "oldVerifier", - "type": "address" - }, - { - "indexed": true, - "internalType": "address", - "name": "newVerifier", - "type": "address" - } - ], - "name": "NewVerifier", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "components": [ - { - "internalType": "bytes32", - "name": "recursionNodeLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionLeafLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionCircuitsSetVksHash", - "type": "bytes32" - } - ], - "indexed": false, - "internalType": "struct VerifierParams", - "name": "oldVerifierParams", - "type": "tuple" - }, - { - "components": [ - { - "internalType": "bytes32", - "name": "recursionNodeLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionLeafLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionCircuitsSetVksHash", - "type": "bytes32" - } - ], - "indexed": false, - "internalType": "struct VerifierParams", - "name": "newVerifierParams", - "type": "tuple" - } - ], - "name": "NewVerifierParams", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": false, - "internalType": "uint256", - "name": "lastDiamondFreezeTimestamp", - "type": "uint256" - } - ], - "name": "Unfreeze", - "type": "event" - }, - { - "anonymous": false, - "inputs": [ - { - "indexed": true, - "internalType": "address", - "name": "validatorAddress", - "type": "address" - }, - { - "indexed": false, - "internalType": "bool", - "name": "isActive", - "type": "bool" - } - ], - "name": "ValidatorStatusUpdate", - "type": "event" - }, - { - "inputs": [], - "name": "acceptGovernor", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes32", - "name": "_diamondCutHash", - "type": "bytes32" - } - ], - "name": "approveEmergencyDiamondCutAsSecurityCouncilMember", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "cancelDiamondCutProposal", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "uint64", - "name": "blockNumber", - "type": "uint64" - }, - { - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "internalType": "uint64", - "name": "indexRepeatedStorageChanges", - "type": "uint64" - }, - { - "internalType": "uint256", - "name": "numberOfLayer1Txs", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "priorityOperationsHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "l2LogsTreeRoot", - "type": "bytes32" - }, - { - "internalType": "uint256", - "name": "timestamp", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "internalType": "struct IExecutor.StoredBlockInfo", - "name": "_lastCommittedBlockData", - "type": "tuple" - }, - { - "components": [ - { - "internalType": "uint64", - "name": "blockNumber", - "type": "uint64" - }, - { - "internalType": "uint64", - "name": "timestamp", - "type": "uint64" - }, - { - "internalType": "uint64", - "name": "indexRepeatedStorageChanges", - "type": "uint64" - }, - { - "internalType": "bytes32", - "name": "newStateRoot", - "type": "bytes32" - }, - { - "internalType": "uint256", - "name": "numberOfLayer1Txs", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "l2LogsTreeRoot", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "priorityOperationsHash", - "type": "bytes32" - }, - { - "internalType": "bytes", - "name": "initialStorageChanges", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "repeatedStorageChanges", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "l2Logs", - "type": "bytes" - }, - { - "internalType": "bytes[]", - "name": "l2ArbitraryLengthMessages", - "type": "bytes[]" - }, - { - "internalType": "bytes[]", - "name": "factoryDeps", - "type": "bytes[]" - } - ], - "internalType": "struct IExecutor.CommitBlockInfo[]", - "name": "_newBlocksData", - "type": "tuple[]" - } - ], - "name": "commitBlocks", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "emergencyFreezeDiamond", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "uint64", - "name": "blockNumber", - "type": "uint64" - }, - { - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "internalType": "uint64", - "name": "indexRepeatedStorageChanges", - "type": "uint64" - }, - { - "internalType": "uint256", - "name": "numberOfLayer1Txs", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "priorityOperationsHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "l2LogsTreeRoot", - "type": "bytes32" - }, - { - "internalType": "uint256", - "name": "timestamp", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "internalType": "struct IExecutor.StoredBlockInfo[]", - "name": "_blocksData", - "type": "tuple[]" - } - ], - "name": "executeBlocks", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "initAddress", - "type": "address" - }, - { - "internalType": "bytes", - "name": "initCalldata", - "type": "bytes" - } - ], - "internalType": "struct Diamond.DiamondCutData", - "name": "_diamondCut", - "type": "tuple" - } - ], - "name": "executeDiamondCutProposal", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes4", - "name": "_selector", - "type": "bytes4" - } - ], - "name": "facetAddress", - "outputs": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "facetAddresses", - "outputs": [ - { - "internalType": "address[]", - "name": "facets", - "type": "address[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_facet", - "type": "address" - } - ], - "name": "facetFunctionSelectors", - "outputs": [ - { - "internalType": "bytes4[]", - "name": "", - "type": "bytes4[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "facets", - "outputs": [ - { - "components": [ - { - "internalType": "address", - "name": "addr", - "type": "address" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct IGetters.Facet[]", - "name": "", - "type": "tuple[]" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getCurrentProposalId", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getFirstUnprocessedPriorityTx", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getGovernor", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getLastDiamondFreezeTimestamp", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getPendingGovernor", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getPriorityQueueSize", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getProposedDiamondCutHash", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getProposedDiamondCutTimestamp", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getSecurityCouncilEmergencyApprovals", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_address", - "type": "address" - } - ], - "name": "getSecurityCouncilMemberLastApprovedProposalId", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getTotalBlocksCommitted", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getTotalBlocksExecuted", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getTotalBlocksVerified", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getTotalPriorityTxs", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "getVerifier", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "isDiamondStorageFrozen", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes4", - "name": "_selector", - "type": "bytes4" - } - ], - "name": "isFunctionFreezable", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_address", - "type": "address" - } - ], - "name": "isSecurityCouncilMember", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_address", - "type": "address" - } - ], - "name": "isValidator", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - } - ], - "name": "l2LogsRootHash", - "outputs": [ - { - "internalType": "bytes32", - "name": "hash", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_gasPrice", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_gasPricePerPubdata", - "type": "uint256" - } - ], - "name": "l2TransactionBaseCost", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "priorityQueueFrontOperation", - "outputs": [ - { - "components": [ - { - "internalType": "bytes32", - "name": "canonicalTxHash", - "type": "bytes32" - }, - { - "internalType": "uint64", - "name": "expirationBlock", - "type": "uint64" - }, - { - "internalType": "uint192", - "name": "layer2Tip", - "type": "uint192" - } - ], - "internalType": "struct PriorityOperation", - "name": "", - "type": "tuple" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "address", - "name": "facet", - "type": "address" - }, - { - "internalType": "enum Diamond.Action", - "name": "action", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isFreezable", - "type": "bool" - }, - { - "internalType": "bytes4[]", - "name": "selectors", - "type": "bytes4[]" - } - ], - "internalType": "struct Diamond.FacetCut[]", - "name": "_facetCuts", - "type": "tuple[]" - }, - { - "internalType": "address", - "name": "_initAddress", - "type": "address" - } - ], - "name": "proposeDiamondCut", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "uint64", - "name": "blockNumber", - "type": "uint64" - }, - { - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "internalType": "uint64", - "name": "indexRepeatedStorageChanges", - "type": "uint64" - }, - { - "internalType": "uint256", - "name": "numberOfLayer1Txs", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "priorityOperationsHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "l2LogsTreeRoot", - "type": "bytes32" - }, - { - "internalType": "uint256", - "name": "timestamp", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "internalType": "struct IExecutor.StoredBlockInfo", - "name": "_prevBlock", - "type": "tuple" - }, - { - "components": [ - { - "internalType": "uint64", - "name": "blockNumber", - "type": "uint64" - }, - { - "internalType": "bytes32", - "name": "blockHash", - "type": "bytes32" - }, - { - "internalType": "uint64", - "name": "indexRepeatedStorageChanges", - "type": "uint64" - }, - { - "internalType": "uint256", - "name": "numberOfLayer1Txs", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "priorityOperationsHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "l2LogsTreeRoot", - "type": "bytes32" - }, - { - "internalType": "uint256", - "name": "timestamp", - "type": "uint256" - }, - { - "internalType": "bytes32", - "name": "commitment", - "type": "bytes32" - } - ], - "internalType": "struct IExecutor.StoredBlockInfo[]", - "name": "_committedBlocks", - "type": "tuple[]" - }, - { - "components": [ - { - "internalType": "uint256[]", - "name": "recurisiveAggregationInput", - "type": "uint256[]" - }, - { - "internalType": "uint256[]", - "name": "serializedProof", - "type": "uint256[]" - } - ], - "internalType": "struct IExecutor.ProofInput", - "name": "_proof", - "type": "tuple" - }, - { - "internalType": "enum IExecutor.BlockVerificationMode", - "name": "_verificationMode", - "type": "uint8" - } - ], - "name": "proveBlocks", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_index", - "type": "uint256" - }, - { - "components": [ - { - "internalType": "uint8", - "name": "l2ShardId", - "type": "uint8" - }, - { - "internalType": "bool", - "name": "isService", - "type": "bool" - }, - { - "internalType": "uint16", - "name": "txNumberInBlock", - "type": "uint16" - }, - { - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "internalType": "bytes32", - "name": "key", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "value", - "type": "bytes32" - } - ], - "internalType": "struct L2Log", - "name": "_log", - "type": "tuple" - }, - { - "internalType": "bytes32[]", - "name": "_proof", - "type": "bytes32[]" - } - ], - "name": "proveL2LogInclusion", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_index", - "type": "uint256" - }, - { - "components": [ - { - "internalType": "uint16", - "name": "txNumberInBlock", - "type": "uint16" - }, - { - "internalType": "address", - "name": "sender", - "type": "address" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - } - ], - "internalType": "struct L2Message", - "name": "_message", - "type": "tuple" - }, - { - "internalType": "bytes32[]", - "name": "_proof", - "type": "bytes32[]" - } - ], - "name": "proveL2MessageInclusion", - "outputs": [ - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_contractL2", - "type": "address" - }, - { - "internalType": "uint256", - "name": "_l2Value", - "type": "uint256" - }, - { - "internalType": "bytes", - "name": "_calldata", - "type": "bytes" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_gasPricePerPubdata", - "type": "uint256" - }, - { - "internalType": "bytes[]", - "name": "_factoryDeps", - "type": "bytes[]" - } - ], - "name": "requestL2Transaction", - "outputs": [ - { - "internalType": "bytes32", - "name": "canonicalTxHash", - "type": "bytes32" - } - ], - "stateMutability": "payable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_newLastBlock", - "type": "uint256" - } - ], - "name": "revertBlocks", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_txId", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "_l2Value", - "type": "uint256" - }, - { - "internalType": "address", - "name": "_sender", - "type": "address" - }, - { - "internalType": "address", - "name": "_contractAddressL2", - "type": "address" - }, - { - "internalType": "bytes", - "name": "_calldata", - "type": "bytes" - }, - { - "internalType": "uint256", - "name": "_gasLimit", - "type": "uint256" - }, - { - "internalType": "bytes[]", - "name": "_factoryDeps", - "type": "bytes[]" - } - ], - "name": "serializeL2Transaction", - "outputs": [ - { - "components": [ - { - "internalType": "uint256", - "name": "txType", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "from", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "to", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "gasPerPubdataByteLimit", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "maxPriorityFeePerGas", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "paymaster", - "type": "uint256" - }, - { - "internalType": "uint256[6]", - "name": "reserved", - "type": "uint256[6]" - }, - { - "internalType": "bytes", - "name": "data", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "signature", - "type": "bytes" - }, - { - "internalType": "uint256[]", - "name": "factoryDeps", - "type": "uint256[]" - }, - { - "internalType": "bytes", - "name": "paymasterInput", - "type": "bytes" - }, - { - "internalType": "bytes", - "name": "reservedDynamic", - "type": "bytes" - } - ], - "internalType": "struct IMailbox.L2CanonicalTransaction", - "name": "", - "type": "tuple" - } - ], - "stateMutability": "pure", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes32", - "name": "_l2BootloaderBytecodeHash", - "type": "bytes32" - } - ], - "name": "setL2BootloaderBytecodeHash", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bytes32", - "name": "_l2DefaultAccountBytecodeHash", - "type": "bytes32" - } - ], - "name": "setL2DefaultAccountBytecodeHash", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_newPendingGovernor", - "type": "address" - } - ], - "name": "setPendingGovernor", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "bool", - "name": "_isPorterAvailable", - "type": "bool" - } - ], - "name": "setPorterAvailability", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_validator", - "type": "address" - }, - { - "internalType": "bool", - "name": "_active", - "type": "bool" - } - ], - "name": "setValidator", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "contract Verifier", - "name": "_newVerifier", - "type": "address" - } - ], - "name": "setVerifier", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "components": [ - { - "internalType": "bytes32", - "name": "recursionNodeLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionLeafLevelVkHash", - "type": "bytes32" - }, - { - "internalType": "bytes32", - "name": "recursionCircuitsSetVksHash", - "type": "bytes32" - } - ], - "internalType": "struct VerifierParams", - "name": "_newVerifierParams", - "type": "tuple" - } - ], - "name": "setVerifierParams", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_blockNumber", - "type": "uint256" - } - ], - "name": "storedBlockHash", - "outputs": [ - { - "internalType": "bytes32", - "name": "", - "type": "bytes32" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "unfreezeDiamond", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - } - ] -} diff --git a/test/utils/deployOnAnyAddress.ts b/test/utils/deployOnAnyAddress.ts deleted file mode 100644 index b155e301..00000000 --- a/test/utils/deployOnAnyAddress.ts +++ /dev/null @@ -1,141 +0,0 @@ -import { BigNumber, BytesLike, Contract } from 'ethers'; -import { ethers } from 'ethers'; -import { Provider, types, utils } from 'zksync-web3'; -import { Deployer } from '@matterlabs/hardhat-zksync-deploy'; -import { hashBytecode } from 'zksync-web3/build/src/utils'; -import { expect } from 'chai'; -import * as hre from 'hardhat'; - -const DIAMOND_UPGRADE_INIT_ABI = new ethers.utils.Interface(require('./DiamondUpgradeInit.json')); -const DIAMOND_CUT_FACET_ABI = new ethers.utils.Interface(require('./DiamonCutFacet.json')); -const CONTRACT_DEPLOYER_INTERFACE = new ethers.utils.Interface(hre.artifacts.readArtifactSync('ContractDeployer').abi); -const ZKSYNC_INTERFACE = new ethers.utils.Interface(require('./IZkSync.json')); - -const DEFAULT_GAS_LIMIT = 60000000; -const DIAMOND_UPGRADE_INIT_ADDRESS = '0x2CaF2C21Fa1f6d3180Eb23A0D821c0d9B4cf0553'; - -export interface ForceDeployment { - // The bytecode hash to put on an address - bytecodeHash: BytesLike; - // The address on which to deploy the bytecodehash to - newAddress: string; - // The value with which to initialize a contract - value: BigNumber; - // The constructor calldata - input: BytesLike; -} - -export function diamondCut(facetCuts: any[], initAddress: string, initCalldata: string): any { - return { - facetCuts, - initAddress, - initCalldata - }; -} - -// The same mnemonic as in the etc/test_config/eth.json -const LOCAL_GOV_MNEMONIC = 'fine music test violin matrix prize squirrel panther purchase material script deal'; - -export async function deployOnAnyLocalAddress( - ethProvider: ethers.providers.Provider, - l2Provider: Provider, - deployments: ForceDeployment[], - factoryDeps: BytesLike[] -): Promise { - const govWallet = ethers.Wallet.fromMnemonic( - LOCAL_GOV_MNEMONIC, - "m/44'/60'/0'/0/1" - ).connect(ethProvider); - - const zkSyncContract = await l2Provider.getMainContractAddress(); - - const zkSync = new ethers.Contract( - zkSyncContract, - ZKSYNC_INTERFACE, - govWallet - ); - if(!(await zkSync.getProposedDiamondCutTimestamp()).eq(0)) { - await zkSync.cancelDiamondCutProposal(); - } - - // Encode data for the upgrade call - const encodedParams = CONTRACT_DEPLOYER_INTERFACE.encodeFunctionData('forceDeployOnAddresses', [ - deployments - ]); - - // Prepare the diamond cut data - const upgradeInitData = DIAMOND_UPGRADE_INIT_ABI.encodeFunctionData('forceDeployL2Contract', [ - encodedParams, - factoryDeps, - DEFAULT_GAS_LIMIT - ]); - - const upgradeParam = diamondCut([], DIAMOND_UPGRADE_INIT_ADDRESS, upgradeInitData); - - // Get transaction data of the `proposeDiamondCut` - const proposeDiamondCut = DIAMOND_CUT_FACET_ABI.encodeFunctionData('proposeDiamondCut', [ - upgradeParam.facetCuts, - upgradeParam.initAddress - ]); - - // Get transaction data of the `executeDiamondCutProposal` - const executeDiamondCutProposal = DIAMOND_CUT_FACET_ABI.encodeFunctionData( - 'executeDiamondCutProposal', - [upgradeParam] - ); - - // Proposing the upgrade - await (await govWallet.sendTransaction({ - to: zkSyncContract, - data: proposeDiamondCut, - gasLimit: BigNumber.from(10000000) - })).wait(); - - const receipt = await (await govWallet.sendTransaction({ - to: zkSyncContract, - data: executeDiamondCutProposal, - gasLimit: BigNumber.from(10000000) - })).wait(); - - return utils.getL2HashFromPriorityOp(receipt, zkSyncContract); -} - -export async function deployContractOnAddress( - name: string, - address: string, - input: BytesLike, - deployer: Deployer, -): Promise { - const artifact = await deployer.loadArtifact(name); - const bytecodeHash = hashBytecode(artifact.bytecode); - - const factoryDeps = [ - artifact.bytecode, - ...await deployer.extractFactoryDeps(artifact) - ]; - - const deployment: ForceDeployment = { - bytecodeHash, - newAddress: address, - value: BigNumber.from(0), - input - }; - - const txHash = await deployOnAnyLocalAddress( - deployer.ethWallet.provider, - deployer.zkWallet.provider, - [deployment], - factoryDeps - ) - - const receipt = await deployer.zkWallet.provider.waitForTransaction(txHash); - - expect(receipt.status, 'Contract deployment failed').to.eq(1); - - return new ethers.Contract( - address, - artifact.abi, - deployer.zkWallet.provider - ); -} - diff --git a/yarn.lock b/yarn.lock index bb0bda9e..9adafa56 100644 --- a/yarn.lock +++ b/yarn.lock @@ -387,21 +387,31 @@ "@jridgewell/resolve-uri" "^3.0.3" "@jridgewell/sourcemap-codec" "^1.4.10" -"@matterlabs/hardhat-zksync-deploy@^0.6.3": - version "0.6.3" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-deploy/-/hardhat-zksync-deploy-0.6.3.tgz#833b208373e7037bf43671054328d82511444e2a" - integrity sha512-FB+2xFL/80JJwlGna+aHA6dk4ONrMFqThTZATYVJUAKooA0Aw5qmpmM8B3qsNB4LLzHSO/EmVrHIcLaPv8hYwQ== +"@matterlabs/hardhat-zksync-chai-matchers@^0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-chai-matchers/-/hardhat-zksync-chai-matchers-0.1.4.tgz#105cb0ec1367c8fcd3ce7e3773f747c71fff675b" + integrity sha512-eGQWiImg51fmayoQ7smIK/T6QZkSu38PK7xjp1RIrewGzw2ZgqFWGp40jb5oomkf8yOQPk52Hu4TwE3Ntp8CtA== + +"@matterlabs/hardhat-zksync-deploy@^0.6.5": + version "0.6.5" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-deploy/-/hardhat-zksync-deploy-0.6.5.tgz#fe56bf30850e71c8d328ac1a06a100c1a0af6e3e" + integrity sha512-EZpvn8pDslfO3UA2obT8FOi5jsHhxYS5ndIR7tjL2zXKbvkbpoJR5rgKoGTJJm0riaCud674sQcxMOybVQ+2gg== dependencies: + "@matterlabs/hardhat-zksync-solc" "0.4.2" chalk "4.1.2" + ts-morph "^19.0.0" -"@matterlabs/hardhat-zksync-solc@^0.3.15": - version "0.3.16" - resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.3.16.tgz#dd8ed44f1a580f282794a15fee995f418b040158" - integrity sha512-gw46yyiCfj49I/nbUcOlnF5xE80WyeW/i8i9ouHom4KWJNt1kioQIwOPkN7aJURhXpJJxKSdeWBrQHLWTZDnTA== +"@matterlabs/hardhat-zksync-solc@0.4.2", "@matterlabs/hardhat-zksync-solc@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@matterlabs/hardhat-zksync-solc/-/hardhat-zksync-solc-0.4.2.tgz#64121082e88c5ab22eb4e9594d120e504f6af499" + integrity sha512-6NFWPSZiOAoo7wNuhMg4ztj7mMEH+tLrx09WuCbcURrHPijj/KxYNsJD6Uw5lapKr7G8H7SQISGid1/MTXVmXQ== dependencies: "@nomiclabs/hardhat-docker" "^2.0.0" chalk "4.1.2" dockerode "^3.3.4" + fs-extra "^11.1.1" + proper-lockfile "^4.1.2" + semver "^7.5.1" "@metamask/eth-sig-util@^4.0.0": version "4.0.1" @@ -429,6 +439,27 @@ resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.6.3.tgz#7eed12d9f4404b416999d0c87686836c4c5c9b94" integrity sha512-T04e4iTurVy7I8Sw4+c5OSN9/RkPlo1uKxAomtxQNLq8j1uPAqnsqG1bqvY3Jv7c13gyr6dui0zmh/I3+f/JaQ== +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + "@nomicfoundation/ethereumjs-block@^4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-block/-/ethereumjs-block-4.0.0.tgz#fdd5c045e7baa5169abeed0e1202bf94e4481c49" @@ -561,6 +592,17 @@ mcl-wasm "^0.7.1" rustbn.js "~0.2.0" +"@nomicfoundation/hardhat-chai-matchers@^1.0.3": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@nomicfoundation/hardhat-chai-matchers/-/hardhat-chai-matchers-1.0.6.tgz#72a2e312e1504ee5dd73fe302932736432ba96bc" + integrity sha512-f5ZMNmabZeZegEfuxn/0kW+mm7+yV7VNDxLpMOMGXWFJ2l/Ct3QShujzDRF9cOkK9Ui/hbDeOWGZqyQALDXVCQ== + dependencies: + "@ethersproject/abi" "^5.1.2" + "@types/chai-as-promised" "^7.1.3" + chai-as-promised "^7.1.1" + deep-eql "^4.0.1" + ordinal "^1.0.3" + "@nomicfoundation/solidity-analyzer-darwin-arm64@0.0.3": version "0.0.3" resolved "https://registry.yarnpkg.com/@nomicfoundation/solidity-analyzer-darwin-arm64/-/solidity-analyzer-darwin-arm64-0.0.3.tgz#1d49e4ac028831a3011a9f3dca60bd1963185342" @@ -746,6 +788,16 @@ dependencies: antlr4ts "^0.5.0-alpha.4" +"@ts-morph/common@~0.20.0": + version "0.20.0" + resolved "https://registry.yarnpkg.com/@ts-morph/common/-/common-0.20.0.tgz#3f161996b085ba4519731e4d24c35f6cba5b80af" + integrity sha512-7uKjByfbPpwuzkstL3L5MQyuXPSKdoNG93Fmi2JoDcTf3pEP731JdRFAduRVkOs8oqxPsXKA+ScrWkdQ8t/I+Q== + dependencies: + fast-glob "^3.2.12" + minimatch "^7.4.3" + mkdirp "^2.1.6" + path-browserify "^1.0.1" + "@tsconfig/node10@^1.0.7": version "1.0.9" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" @@ -767,13 +819,20 @@ integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== "@typechain/ethers-v5@^10.0.0": - version "10.1.0" - resolved "https://registry.yarnpkg.com/@typechain/ethers-v5/-/ethers-v5-10.1.0.tgz#068d7dc7014502354696dab59590a7841091e951" - integrity sha512-3LIb+eUpV3mNCrjUKT5oqp8PBsZYSnVrkfk6pY/ZM0boRs2mKxjFZ7bktx42vfDye8PPz3NxtW4DL5NsNsFqlg== + version "10.2.1" + resolved "https://registry.yarnpkg.com/@typechain/ethers-v5/-/ethers-v5-10.2.1.tgz#50241e6957683281ecfa03fb5a6724d8a3ce2391" + integrity sha512-n3tQmCZjRE6IU4h6lqUGiQ1j866n5MTCBJreNEHHVWXa2u9GJTaeYyU1/k+1qLutkyw+sS6VAN+AbeiTqsxd/A== dependencies: lodash "^4.17.15" ts-essentials "^7.0.1" +"@typechain/hardhat@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@typechain/hardhat/-/hardhat-7.0.0.tgz#ffa7465328150e793007fee616ae7b76ed20784d" + integrity sha512-XB79i5ewg9Met7gMVGfgVkmypicbnI25T5clJBEooMoW2161p4zvKFpoS2O+lBppQyMrPIZkdvl2M3LMDayVcA== + dependencies: + fs-extra "^9.1.0" + "@types/async-eventemitter@^0.2.1": version "0.2.1" resolved "https://registry.yarnpkg.com/@types/async-eventemitter/-/async-eventemitter-0.2.1.tgz#f8e6280e87e8c60b2b938624b0a3530fb3e24712" @@ -793,6 +852,18 @@ dependencies: "@types/node" "*" +"@types/chai-as-promised@^7.1.3": + version "7.1.6" + resolved "https://registry.yarnpkg.com/@types/chai-as-promised/-/chai-as-promised-7.1.6.tgz#3b08cbe1e7206567a480dc6538bade374b19e4e1" + integrity sha512-cQLhk8fFarRVZAXUQV1xEnZgMoPxqKojBvRkqPCKPQCzEhpbbSKl1Uu75kDng7k5Ln6LQLUmNBjLlFthCgm1NA== + dependencies: + "@types/chai" "*" + +"@types/chai@*": + version "4.3.6" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.6.tgz#7b489e8baf393d5dd1266fb203ddd4ea941259e6" + integrity sha512-VOVRLM1mBxIRxydiViqPcKn6MIxZytrbMpd6RJLIWKxUNr3zux8no0Oc7kJx0WAPIitgZ0gkrDS+btlqQpubpw== + "@types/chai@^4.3.1": version "4.3.3" resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.3.tgz#3c90752792660c4b562ad73b3fbd68bf3bc7ae07" @@ -1024,6 +1095,11 @@ async@^2.4.0: dependencies: lodash "^4.17.14" +at-least-node@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" + integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== + axios@^0.21.1: version "0.21.4" resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" @@ -1132,7 +1208,7 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -braces@~3.0.2: +braces@^3.0.2, braces@~3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== @@ -1266,6 +1342,13 @@ catering@^2.1.0, catering@^2.1.1: resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== +chai-as-promised@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/chai-as-promised/-/chai-as-promised-7.1.1.tgz#08645d825deb8696ee61725dbf590c012eb00ca0" + integrity sha512-azL6xMoi+uxu6z4rhWQ1jbdUhOMhis2PvscD/xjLqNMkv3BPPp2JyyuTHOrf9BOosGpNQ11v6BKv/g57RXbiaA== + dependencies: + check-error "^1.0.2" + chai@^4.3.6: version "4.3.6" resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.6.tgz#ffe4ba2d9fa9d6680cc0b370adae709ec9011e9c" @@ -1359,6 +1442,11 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +code-block-writer@^12.0.0: + version "12.0.0" + resolved "https://registry.yarnpkg.com/code-block-writer/-/code-block-writer-12.0.0.tgz#4dd58946eb4234105aff7f0035977b2afdc2a770" + integrity sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w== + color-convert@^1.9.0: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" @@ -1520,6 +1608,13 @@ deep-eql@^3.0.1: dependencies: type-detect "^4.0.0" +deep-eql@^4.0.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.3.tgz#7c7775513092f7df98d8df9996dd085eb668cc6d" + integrity sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw== + dependencies: + type-detect "^4.0.0" + deep-extend@~0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" @@ -1744,6 +1839,24 @@ evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" +fast-glob@^3.2.12: + version "3.3.1" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.1.tgz#784b4e897340f3dbbef17413b3f11acf03c874c4" + integrity sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fastq@^1.6.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + dependencies: + reusify "^1.0.4" + fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -1809,6 +1922,15 @@ fs-extra@^0.30.0: path-is-absolute "^1.0.0" rimraf "^2.2.8" +fs-extra@^11.1.1: + version "11.1.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.1.1.tgz#da69f7c39f3b002378b0954bb6ae7efdc0876e2d" + integrity sha512-MGIE4HOvQCeUCzmlHs0vXpih4ysz4wg9qiSAu6cd42lVwPbTM1TjV7RusoyQqMmk/95gdQZX72u+YW+c3eEpFQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^7.0.0, fs-extra@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" @@ -1818,6 +1940,16 @@ fs-extra@^7.0.0, fs-extra@^7.0.1: jsonfile "^4.0.0" universalify "^0.1.0" +fs-extra@^9.1.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -1857,7 +1989,7 @@ get-intrinsic@^1.0.2: has "^1.0.3" has-symbols "^1.0.3" -glob-parent@~5.1.2: +glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== @@ -1905,10 +2037,10 @@ graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.1.9: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== -hardhat-typechain@^0.3.5: - version "0.3.5" - resolved "https://registry.yarnpkg.com/hardhat-typechain/-/hardhat-typechain-0.3.5.tgz#8e50616a9da348b33bd001168c8fda9c66b7b4af" - integrity sha512-w9lm8sxqTJACY+V7vijiH+NkPExnmtiQEjsV9JKD1KgMdVk2q8y+RhvU/c4B7+7b1+HylRUCxpOIvFuB3rE4+w== +graceful-fs@^4.2.0, graceful-fs@^4.2.4: + version "4.2.11" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== hardhat@^2.11.0: version "2.11.2" @@ -2172,6 +2304,15 @@ jsonfile@^4.0.0: optionalDependencies: graceful-fs "^4.1.6" +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + jsonparse@^1.2.0: version "1.3.1" resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" @@ -2306,6 +2447,19 @@ memorystream@^0.3.1: resolved "https://registry.yarnpkg.com/memorystream/-/memorystream-0.3.1.tgz#86d7090b30ce455d63fbae12dda51a47ddcaf9b2" integrity sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw== +merge2@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" @@ -2330,6 +2484,13 @@ minimatch@^3.0.4, minimatch@^3.1.1: dependencies: brace-expansion "^1.1.7" +minimatch@^7.4.3: + version "7.4.6" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-7.4.6.tgz#845d6f254d8f4a5e4fd6baf44d5f10c8448365fb" + integrity sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw== + dependencies: + brace-expansion "^2.0.1" + minimist@^1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" @@ -2352,6 +2513,11 @@ mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== +mkdirp@^2.1.6: + version "2.1.6" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-2.1.6.tgz#964fbcb12b2d8c5d6fbc62a963ac95a273e2cc19" + integrity sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A== + mnemonist@^0.38.0: version "0.38.5" resolved "https://registry.yarnpkg.com/mnemonist/-/mnemonist-0.38.5.tgz#4adc7f4200491237fe0fa689ac0b86539685cade" @@ -2470,6 +2636,11 @@ once@^1.3.0, once@^1.3.1, once@^1.4.0: dependencies: wrappy "1" +ordinal@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/ordinal/-/ordinal-1.0.3.tgz#1a3c7726a61728112f50944ad7c35c06ae3a0d4d" + integrity sha512-cMddMgb2QElm8G7vdaa02jhUNbTSrhsgAGUz1OokD83uJTwSUn+nKoNoKVVaRa08yF6sgfO7Maou1+bgLd9rdQ== + os-tmpdir@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" @@ -2522,6 +2693,11 @@ p-try@^1.0.0: resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" integrity sha512-U1etNYuMJoIz3ZXSrrySFjsXQTWOx2/jdi86L+2pRvph/qMKL6sbcCYdH23fqsbm8TH2Gn0OybpT4eSFlCVHww== +path-browserify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" + integrity sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g== + path-exists@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" @@ -2563,7 +2739,7 @@ pbkdf2@^3.0.17: safe-buffer "^5.0.1" sha.js "^2.4.8" -picomatch@^2.0.4, picomatch@^2.2.1: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== @@ -2587,16 +2763,30 @@ prettier-plugin-solidity@^1.0.0-alpha.27: solidity-comments-extractor "^0.0.7" string-width "^4.2.3" -prettier@^2.1.2, prettier@^2.3.0, prettier@^2.3.1: +prettier@^2.1.2, prettier@^2.3.0: version "2.7.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== +prettier@^2.3.1: + version "2.8.8" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da" + integrity sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q== + process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== +proper-lockfile@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/proper-lockfile/-/proper-lockfile-4.1.2.tgz#c8b9de2af6b2f1601067f98e01ac66baa223141f" + integrity sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA== + dependencies: + graceful-fs "^4.2.4" + retry "^0.12.0" + signal-exit "^3.0.2" + pump@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/pump/-/pump-1.0.3.tgz#5dfe8311c33bbf6fc18261f9f34702c47c08a954" @@ -2712,6 +2902,16 @@ resolve@^1.10.0, resolve@^1.8.1: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" +retry@^0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/retry/-/retry-0.12.0.tgz#1b42a6266a21f07421d1b0b54b7dc167b01c013b" + integrity sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow== + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + rimraf@^2.2.8: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" @@ -2741,6 +2941,13 @@ run-parallel-limit@^1.1.0: dependencies: queue-microtask "^1.2.2" +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + rustbn.js@~0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/rustbn.js/-/rustbn.js-0.2.0.tgz#8082cb886e707155fd1cb6f23bd591ab8d55d0ca" @@ -2792,6 +2999,13 @@ semver@^7.3.7: dependencies: lru-cache "^6.0.0" +semver@^7.5.1: + version "7.5.4" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" + integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + dependencies: + lru-cache "^6.0.0" + serialize-javascript@6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-6.0.0.tgz#efae5d88f45d7924141da8b5c3a7a7e663fefeb8" @@ -2826,6 +3040,11 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" +signal-exit@^3.0.2: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + solc@0.7.3: version "0.7.3" resolved "https://registry.yarnpkg.com/solc/-/solc-0.7.3.tgz#04646961bd867a744f63d2b4e3c0701ffdc7d78a" @@ -3098,9 +3317,9 @@ tr46@~0.0.3: integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== ts-command-line-args@^2.2.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/ts-command-line-args/-/ts-command-line-args-2.3.1.tgz#b6188e42efc6cf7a8898e438a873fbb15505ddd6" - integrity sha512-FR3y7pLl/fuUNSmnPhfLArGqRrpojQgIEEOVzYx9DhTmfIN7C9RWSfpkJEF4J+Gk7aVx5pak8I7vWZsaN4N84g== + version "2.5.1" + resolved "https://registry.yarnpkg.com/ts-command-line-args/-/ts-command-line-args-2.5.1.tgz#e64456b580d1d4f6d948824c274cf6fa5f45f7f0" + integrity sha512-H69ZwTw3rFHb5WYpQya40YAX2/w7Ut75uUECbgBIsLmM+BNuYnxsltfyyLMxy6sEeKxgijLTnQtLd0nKd6+IYw== dependencies: chalk "^4.1.0" command-line-args "^5.1.1" @@ -3132,6 +3351,14 @@ ts-generator@^0.1.1: resolve "^1.8.1" ts-essentials "^1.0.0" +ts-morph@^19.0.0: + version "19.0.0" + resolved "https://registry.yarnpkg.com/ts-morph/-/ts-morph-19.0.0.tgz#43e95fb0156c3fe3c77c814ac26b7d0be2f93169" + integrity sha512-D6qcpiJdn46tUqV45vr5UGM2dnIEuTGNxVhg0sk5NX11orcouwj6i1bMqZIz2mZTZB1Hcgy7C3oEVhAT+f6mbQ== + dependencies: + "@ts-morph/common" "~0.20.0" + code-block-writer "^12.0.0" + ts-node@^10.7.0: version "10.9.1" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" @@ -3192,9 +3419,9 @@ type-fest@^0.7.1: integrity sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg== typechain@^8.1.1: - version "8.1.1" - resolved "https://registry.yarnpkg.com/typechain/-/typechain-8.1.1.tgz#9c2e8012c2c4c586536fc18402dcd7034c4ff0bd" - integrity sha512-uF/sUvnXTOVF2FHKhQYnxHk4su4JjZR8vr4mA2mBaRwHTbwh0jIlqARz9XJr1tA0l7afJGvEa1dTSi4zt039LQ== + version "8.3.1" + resolved "https://registry.yarnpkg.com/typechain/-/typechain-8.3.1.tgz#dccbc839b94877997536c356380eff7325395cfb" + integrity sha512-fA7clol2IP/56yq6vkMTR+4URF1nGjV82Wx6Rf09EsqD4tkzMAvEaqYxVFCavJm/1xaRga/oD55K+4FtuXwQOQ== dependencies: "@types/prettier" "^2.1.1" debug "^4.3.1" @@ -3239,6 +3466,11 @@ universalify@^0.1.0: resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + unpipe@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" @@ -3382,7 +3614,7 @@ yocto-queue@^1.0.0: resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.0.0.tgz#7f816433fb2cbc511ec8bf7d263c3b58a1a3c251" integrity sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g== -zksync-web3@^0.13.0: - version "0.13.0" - resolved "https://registry.yarnpkg.com/zksync-web3/-/zksync-web3-0.13.0.tgz#979633eb507c8501185ebacbaa543e91c8ab423c" - integrity sha512-7E16RMVTi+6+AyjeRNn3e6CNbQ29UCoFO2osTjkPBgQjTortA0aqjrVAyAEi7o4g22Q2iLsPD2T7llUmTI8bBw== +zksync-web3@^0.14.3: + version "0.14.3" + resolved "https://registry.yarnpkg.com/zksync-web3/-/zksync-web3-0.14.3.tgz#64ac2a16d597464c3fc4ae07447a8007631c57c9" + integrity sha512-hT72th4AnqyLW1d5Jlv8N2B/qhEnl2NePK2A3org7tAa24niem/UAaHMkEvmWI3SF9waYUPtqAtjpf+yvQ9zvQ== From ba9835ab02b0427512b8f16dde0174870df0516a Mon Sep 17 00:00:00 2001 From: Bence Haromi <56651250+benceharomi@users.noreply.github.com> Date: Thu, 5 Oct 2023 16:32:03 +0100 Subject: [PATCH 2/2] chore: Upgrade to Node v18 (#20) --- .github/workflows/nodejs-license.yaml | 2 +- .nvmrc | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 .nvmrc diff --git a/.github/workflows/nodejs-license.yaml b/.github/workflows/nodejs-license.yaml index cdfc22ce..7ab1a48c 100644 --- a/.github/workflows/nodejs-license.yaml +++ b/.github/workflows/nodejs-license.yaml @@ -49,7 +49,7 @@ jobs: - name: Use Node.js uses: actions/setup-node@v3 with: - node-version: 16.15.1 + node-version: 18.18.0 - name: Install yarn run: npm install -g yarn license-checker diff --git a/.nvmrc b/.nvmrc new file mode 100644 index 00000000..6aab9b43 --- /dev/null +++ b/.nvmrc @@ -0,0 +1 @@ +v18.18.0