From 3704cfab400ca561f4f40b1839292521961125e6 Mon Sep 17 00:00:00 2001 From: Liam Monninger <79056955+l-monninger@users.noreply.github.com> Date: Fri, 14 Jul 2023 18:07:40 -0700 Subject: [PATCH 01/13] Update mirror.yml --- .github/workflows/mirror.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index a1f9b49f..a7700c0b 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -22,4 +22,4 @@ jobs: destination-github-username: 'movemntdev' destination-repository-name: 'movement-framework-mirror' user-email: l.mak.monninger@gmail.com - target-branch: main + target-branch: main # maybe this should be changed From bb647da9e8e4f48f33b113b941bb4f723c40120f Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Wed, 26 Jul 2023 22:58:29 -0700 Subject: [PATCH 02/13] orphan. --- .github/mirror.sh | 22 + .github/workflows/check.yml | 27 + .github/workflows/coverage.yml | 52 + .github/workflows/mirror.yml | 25 + .github/workflows/platform/dev.yml | 0 .github/workflows/platform/prod.yml | 0 .github/workflows/platform/test.yml | 0 .github/workflows/release.yml | 111 + .github/workflows/test.yml | 36 + .gitignore | 62 + .gitmodules | 14 + LICENSE | 29 + LICENSE.header | 2 + README.md | 71 + aptos-core | 1 + aptos-pre-core | 1 + ed25519-dalek | 1 + m1/.gitignore | 2 + m1/Cargo.toml | 168 + m1/JavaScript-client/.eslintignore | 4 + m1/JavaScript-client/.eslintrc.js | 36 + m1/JavaScript-client/.gitignore | 17 + m1/JavaScript-client/.npmignore | 4 + m1/JavaScript-client/.nvmrc | 1 + m1/JavaScript-client/.prettierignore | 2 + m1/JavaScript-client/.versionrc.json | 19 + m1/JavaScript-client/CHANGELOG.md | 225 + m1/JavaScript-client/CONTRIBUTING.md | 18 + m1/JavaScript-client/README.md | 9 + m1/JavaScript-client/examples/README.md | 5 + .../examples/javascript/index.js | 82 + .../examples/javascript/package.json | 16 + .../examples/javascript/pnpm-lock.yaml | 104 + .../examples/typescript-esm/index.ts | 137 + .../examples/typescript-esm/package.json | 20 + .../examples/typescript-esm/pnpm-lock.yaml | 107 + .../examples/typescript-esm/tsconfig.json | 23 + .../examples/typescript/bcs_transaction.ts | 98 + .../examples/typescript/call_aptos_cli.ts | 28 + .../examples/typescript/common.ts | 9 + .../examples/typescript/multisig_account.ts | 210 + .../typescript/multisig_transaction.ts | 136 + .../examples/typescript/package.json | 32 + .../examples/typescript/pnpm-lock.yaml | 1076 +++ .../examples/typescript/simple_nft.ts | 188 + .../examples/typescript/transfer_coin.ts | 67 + .../examples/typescript/tsconfig.json | 23 + .../examples/typescript/your_coin.ts | 137 + m1/JavaScript-client/jest.config.js | 20 + m1/JavaScript-client/package.json | 84 + m1/JavaScript-client/pnpm-lock.yaml | 6984 +++++++++++++++++ m1/JavaScript-client/scripts/check.sh | 40 + .../scripts/checked_publish.sh | 16 + .../scripts/generate_ts_docs.sh | 32 + .../scripts/publish_ans_contract.ts | 63 + .../src/account/aptos_account.ts | 191 + m1/JavaScript-client/src/account/index.ts | 1 + m1/JavaScript-client/src/aptos_types/abi.ts | 137 + .../src/aptos_types/account_address.ts | 88 + .../src/aptos_types/authentication_key.ts | 73 + .../src/aptos_types/authenticator.ts | 153 + .../src/aptos_types/ed25519.ts | 49 + .../src/aptos_types/identifier.ts | 17 + m1/JavaScript-client/src/aptos_types/index.ts | 15 + .../src/aptos_types/multi_ed25519.ts | 158 + .../aptos_types/rotation_proof_challenge.ts | 27 + .../src/aptos_types/token_types.ts | 88 + .../src/aptos_types/transaction.ts | 700 ++ .../src/aptos_types/type_tag.ts | 458 ++ m1/JavaScript-client/src/bcs/consts.ts | 12 + m1/JavaScript-client/src/bcs/deserializer.ts | 188 + m1/JavaScript-client/src/bcs/helper.ts | 106 + m1/JavaScript-client/src/bcs/index.ts | 7 + m1/JavaScript-client/src/bcs/serializer.ts | 240 + m1/JavaScript-client/src/bcs/types.ts | 13 + .../src/generated/AptosGeneratedClient.ts | 52 + .../src/generated/core/ApiError.ts | 24 + .../src/generated/core/ApiRequestOptions.ts | 16 + .../src/generated/core/ApiResult.ts | 10 + .../src/generated/core/AxiosHttpRequest.ts | 25 + .../src/generated/core/BaseHttpRequest.ts | 13 + .../src/generated/core/CancelablePromise.ts | 128 + .../src/generated/core/OpenAPI.ts | 31 + .../src/generated/core/request.ts | 418 + m1/JavaScript-client/src/generated/index.ts | 214 + .../src/generated/models/AccountData.ts | 17 + .../src/generated/models/AccountSignature.ts | 17 + .../AccountSignature_Ed25519Signature.ts | 10 + .../AccountSignature_MultiEd25519Signature.ts | 10 + .../src/generated/models/Address.ts | 14 + .../src/generated/models/AptosError.ts | 22 + .../src/generated/models/AptosErrorCode.ts | 30 + .../src/generated/models/Block.ts | 26 + .../models/BlockMetadataTransaction.ts | 55 + .../src/generated/models/DecodedTableData.ts | 26 + .../src/generated/models/DeleteModule.ts | 19 + .../src/generated/models/DeleteResource.ts | 19 + .../src/generated/models/DeleteTableItem.ts | 17 + .../src/generated/models/DeletedTableData.ts | 18 + .../src/generated/models/DirectWriteSet.ts | 12 + .../src/generated/models/Ed25519Signature.ts | 14 + .../models/EncodeSubmissionRequest.ts | 24 + .../src/generated/models/EntryFunctionId.ts | 13 + .../generated/models/EntryFunctionPayload.ts | 22 + .../src/generated/models/Event.ts | 21 + .../src/generated/models/EventGuid.ts | 12 + .../src/generated/models/GasEstimation.ts | 22 + .../src/generated/models/GenesisPayload.ts | 11 + .../models/GenesisPayload_WriteSetPayload.ts | 10 + .../generated/models/GenesisTransaction.ts | 42 + .../src/generated/models/HashValue.ts | 5 + .../generated/models/HealthCheckSuccess.ts | 11 + .../src/generated/models/HexEncodedBytes.ts | 12 + .../src/generated/models/IdentifierWrapper.ts | 5 + .../src/generated/models/IndexResponse.ts | 30 + .../generated/models/ModuleBundlePayload.ts | 10 + .../src/generated/models/MoveAbility.ts | 5 + .../src/generated/models/MoveFunction.ts | 37 + .../models/MoveFunctionGenericTypeParam.ts | 16 + .../models/MoveFunctionVisibility.ts | 12 + .../src/generated/models/MoveModule.ts | 30 + .../generated/models/MoveModuleBytecode.ts | 15 + .../src/generated/models/MoveModuleId.ts | 15 + .../src/generated/models/MoveResource.ts | 15 + .../generated/models/MoveScriptBytecode.ts | 15 + .../src/generated/models/MoveStruct.ts | 32 + .../src/generated/models/MoveStructField.ts | 15 + .../models/MoveStructGenericTypeParam.ts | 16 + .../src/generated/models/MoveStructTag.ts | 24 + .../src/generated/models/MoveStructValue.ts | 49 + .../src/generated/models/MoveType.ts | 34 + .../src/generated/models/MoveValue.ts | 16 + .../generated/models/MultiAgentSignature.ts | 24 + .../generated/models/MultiEd25519Signature.ts | 27 + .../src/generated/models/MultisigPayload.ts | 16 + .../models/MultisigTransactionPayload.ts | 8 + .../generated/models/PendingTransaction.ts | 24 + .../generated/models/RawTableItemRequest.ts | 13 + .../src/generated/models/RoleType.ts | 8 + .../src/generated/models/ScriptPayload.ts | 22 + .../src/generated/models/ScriptWriteSet.ts | 12 + .../models/StateCheckpointTransaction.ts | 34 + .../src/generated/models/StateKeyWrapper.ts | 9 + .../models/SubmitTransactionRequest.ts | 24 + .../src/generated/models/TableItemRequest.ts | 18 + .../src/generated/models/Transaction.ts | 15 + .../generated/models/TransactionPayload.ts | 14 + ...TransactionPayload_EntryFunctionPayload.ts | 10 + .../TransactionPayload_ModuleBundlePayload.ts | 10 + .../TransactionPayload_MultisigPayload.ts | 10 + .../TransactionPayload_ScriptPayload.ts | 10 + .../generated/models/TransactionSignature.ts | 13 + .../TransactionSignature_Ed25519Signature.ts | 10 + ...ransactionSignature_MultiAgentSignature.ts | 10 + ...nsactionSignature_MultiEd25519Signature.ts | 10 + .../Transaction_BlockMetadataTransaction.ts | 10 + .../models/Transaction_GenesisTransaction.ts | 10 + .../models/Transaction_PendingTransaction.ts | 10 + .../Transaction_StateCheckpointTransaction.ts | 10 + .../models/Transaction_UserTransaction.ts | 10 + ...ransactionsBatchSingleSubmissionFailure.ts | 17 + .../TransactionsBatchSubmissionResult.ts | 18 + .../src/generated/models/U128.ts | 12 + .../src/generated/models/U256.ts | 12 + .../src/generated/models/U64.ts | 12 + .../src/generated/models/UserTransaction.ts | 49 + .../src/generated/models/VersionedEvent.ts | 22 + .../src/generated/models/ViewRequest.ts | 22 + .../src/generated/models/WriteModule.ts | 19 + .../src/generated/models/WriteResource.ts | 19 + .../src/generated/models/WriteSet.ts | 12 + .../src/generated/models/WriteSetChange.ts | 16 + .../models/WriteSetChange_DeleteModule.ts | 10 + .../models/WriteSetChange_DeleteResource.ts | 10 + .../models/WriteSetChange_DeleteTableItem.ts | 10 + .../models/WriteSetChange_WriteModule.ts | 10 + .../models/WriteSetChange_WriteResource.ts | 10 + .../models/WriteSetChange_WriteTableItem.ts | 10 + .../src/generated/models/WriteSetPayload.ts | 13 + .../models/WriteSet_DirectWriteSet.ts | 10 + .../models/WriteSet_ScriptWriteSet.ts | 10 + .../src/generated/models/WriteTableItem.ts | 18 + .../src/generated/schemas/$AccountData.ts | 18 + .../generated/schemas/$AccountSignature.ts | 17 + .../$AccountSignature_Ed25519Signature.ts | 16 + ...$AccountSignature_MultiEd25519Signature.ts | 16 + .../src/generated/schemas/$Address.ts | 14 + .../src/generated/schemas/$AptosError.ts | 23 + .../src/generated/schemas/$AptosErrorCode.ts | 6 + .../src/generated/schemas/$Block.ts | 37 + .../schemas/$BlockMetadataTransaction.ts | 98 + .../generated/schemas/$DecodedTableData.ts | 30 + .../src/generated/schemas/$DeleteModule.ts | 21 + .../src/generated/schemas/$DeleteResource.ts | 21 + .../src/generated/schemas/$DeleteTableItem.ts | 23 + .../generated/schemas/$DeletedTableData.ts | 19 + .../src/generated/schemas/$DirectWriteSet.ts | 21 + .../generated/schemas/$Ed25519Signature.ts | 16 + .../schemas/$EncodeSubmissionRequest.ts | 38 + .../src/generated/schemas/$EntryFunctionId.ts | 12 + .../schemas/$EntryFunctionPayload.ts | 27 + .../src/generated/schemas/$Event.ts | 26 + .../src/generated/schemas/$EventGuid.ts | 15 + .../src/generated/schemas/$GasEstimation.ts | 24 + .../src/generated/schemas/$GenesisPayload.ts | 10 + .../$GenesisPayload_WriteSetPayload.ts | 16 + .../generated/schemas/$GenesisTransaction.ts | 65 + .../src/generated/schemas/$HashValue.ts | 6 + .../generated/schemas/$HealthCheckSuccess.ts | 12 + .../src/generated/schemas/$HexEncodedBytes.ts | 12 + .../generated/schemas/$IdentifierWrapper.ts | 6 + .../src/generated/schemas/$IndexResponse.ts | 48 + .../generated/schemas/$ModuleBundlePayload.ts | 14 + .../src/generated/schemas/$MoveAbility.ts | 6 + .../src/generated/schemas/$MoveFunction.ts | 47 + .../schemas/$MoveFunctionGenericTypeParam.ts | 15 + .../schemas/$MoveFunctionVisibility.ts | 6 + .../src/generated/schemas/$MoveModule.ts | 37 + .../generated/schemas/$MoveModuleBytecode.ts | 15 + .../src/generated/schemas/$MoveModuleId.ts | 14 + .../src/generated/schemas/$MoveResource.ts | 16 + .../generated/schemas/$MoveScriptBytecode.ts | 15 + .../src/generated/schemas/$MoveStruct.ts | 38 + .../src/generated/schemas/$MoveStructField.ts | 16 + .../schemas/$MoveStructGenericTypeParam.ts | 15 + .../src/generated/schemas/$MoveStructTag.ts | 24 + .../src/generated/schemas/$MoveStructValue.ts | 47 + .../src/generated/schemas/$MoveType.ts | 34 + .../src/generated/schemas/$MoveValue.ts | 38 + .../generated/schemas/$MultiAgentSignature.ts | 28 + .../schemas/$MultiEd25519Signature.ts | 34 + .../src/generated/schemas/$MultisigPayload.ts | 16 + .../schemas/$MultisigTransactionPayload.ts | 9 + .../generated/schemas/$PendingTransaction.ts | 39 + .../generated/schemas/$RawTableItemRequest.ts | 12 + .../src/generated/schemas/$RoleType.ts | 6 + .../src/generated/schemas/$ScriptPayload.ts | 27 + .../src/generated/schemas/$ScriptWriteSet.ts | 15 + .../schemas/$StateCheckpointTransaction.ts | 56 + .../src/generated/schemas/$StateKeyWrapper.ts | 8 + .../schemas/$SubmitTransactionRequest.ts | 38 + .../generated/schemas/$TableItemRequest.ts | 22 + .../src/generated/schemas/$Transaction.ts | 18 + .../generated/schemas/$TransactionPayload.ts | 16 + ...TransactionPayload_EntryFunctionPayload.ts | 16 + ...$TransactionPayload_ModuleBundlePayload.ts | 16 + .../$TransactionPayload_MultisigPayload.ts | 16 + .../$TransactionPayload_ScriptPayload.ts | 16 + .../schemas/$TransactionSignature.ts | 14 + .../$TransactionSignature_Ed25519Signature.ts | 16 + ...ransactionSignature_MultiAgentSignature.ts | 16 + ...nsactionSignature_MultiEd25519Signature.ts | 16 + .../$Transaction_BlockMetadataTransaction.ts | 16 + .../$Transaction_GenesisTransaction.ts | 16 + .../$Transaction_PendingTransaction.ts | 16 + ...$Transaction_StateCheckpointTransaction.ts | 16 + .../schemas/$Transaction_UserTransaction.ts | 16 + ...ransactionsBatchSingleSubmissionFailure.ts | 18 + .../$TransactionsBatchSubmissionResult.ts | 17 + .../src/generated/schemas/$U128.ts | 12 + .../src/generated/schemas/$U256.ts | 12 + .../src/generated/schemas/$U64.ts | 12 + .../src/generated/schemas/$UserTransaction.ts | 90 + .../src/generated/schemas/$VersionedEvent.ts | 30 + .../src/generated/schemas/$ViewRequest.ts | 27 + .../src/generated/schemas/$WriteModule.ts | 21 + .../src/generated/schemas/$WriteResource.ts | 21 + .../src/generated/schemas/$WriteSet.ts | 12 + .../src/generated/schemas/$WriteSetChange.ts | 20 + .../schemas/$WriteSetChange_DeleteModule.ts | 16 + .../schemas/$WriteSetChange_DeleteResource.ts | 16 + .../$WriteSetChange_DeleteTableItem.ts | 16 + .../schemas/$WriteSetChange_WriteModule.ts | 16 + .../schemas/$WriteSetChange_WriteResource.ts | 16 + .../schemas/$WriteSetChange_WriteTableItem.ts | 16 + .../src/generated/schemas/$WriteSetPayload.ts | 12 + .../schemas/$WriteSet_DirectWriteSet.ts | 16 + .../schemas/$WriteSet_ScriptWriteSet.ts | 16 + .../src/generated/schemas/$WriteTableItem.ts | 27 + .../src/generated/services/AccountsService.ts | 200 + .../src/generated/services/BlocksService.ts | 79 + .../src/generated/services/EventsService.ts | 100 + .../src/generated/services/GeneralService.ts | 69 + .../src/generated/services/TablesService.ts | 93 + .../generated/services/TransactionsService.ts | 302 + .../src/generated/services/ViewService.ts | 43 + m1/JavaScript-client/src/index.ts | 18 + m1/JavaScript-client/src/indexer/codegen.yml | 22 + .../src/indexer/generated/operations.ts | 113 + .../src/indexer/generated/queries.ts | 265 + .../src/indexer/generated/types.ts | 4975 ++++++++++++ .../queries/getAccountCoinsData.graphql | 11 + .../queries/getAccountCurrentTokens.graphql | 38 + .../queries/getAccountTokensCount.graphql | 7 + .../getAccountTransactionsCount.graphql | 7 + .../getAccountTransactionsData.graphql | 11 + .../getCurrentDelegatorBalancesCount.graphql | 10 + .../getDelegatedStakingActivities.graphql | 12 + .../src/indexer/queries/getLedgerInfo.graphql | 5 + .../queries/getTokenActivities.graphql | 22 + .../queries/getTokenActivitiesCount.graphql | 7 + .../src/indexer/queries/getTokenData.graphql | 16 + .../queries/getTokenOwnersData.graphql | 7 + .../queries/getTopUserTransactions.graphql | 5 + .../queries/getUserTransactions.graphql | 10 + .../src/plugins/ans_client.ts | 232 + .../src/plugins/aptos_token.ts | 490 ++ .../src/plugins/coin_client.ts | 99 + .../src/plugins/faucet_client.ts | 69 + m1/JavaScript-client/src/plugins/index.ts | 4 + .../src/plugins/token_client.ts | 672 ++ .../src/providers/aptos_client.ts | 1001 +++ m1/JavaScript-client/src/providers/index.ts | 3 + m1/JavaScript-client/src/providers/indexer.ts | 313 + .../src/providers/provider.ts | 95 + .../src/tests/e2e/ans_client.test.ts | 149 + .../src/tests/e2e/aptos_client.test.ts | 658 ++ .../src/tests/e2e/aptos_token.test.ts | 203 + .../src/tests/e2e/coin_client.test.ts | 35 + .../src/tests/e2e/faucet_client.test.ts | 78 + .../src/tests/e2e/indexer.test.ts | 195 + .../src/tests/e2e/provider.test.ts | 99 + .../src/tests/e2e/token_client.test.ts | 153 + .../src/tests/unit/abi.test.ts | 67 + .../src/tests/unit/account_address.test.ts | 80 + .../src/tests/unit/aptos_account.test.ts | 89 + .../src/tests/unit/builder.test.ts | 234 + .../src/tests/unit/builder_utils.test.ts | 399 + .../src/tests/unit/deserializer.test.ts | 132 + .../src/tests/unit/helper.test.ts | 96 + .../src/tests/unit/hex_string.test.ts | 47 + .../src/tests/unit/misc.test.ts | 12 + .../src/tests/unit/multi_ed25519.test.ts | 111 + .../src/tests/unit/property_map_serde.test.ts | 55 + .../src/tests/unit/serializer.test.ts | 179 + .../src/tests/unit/test_helper.test.ts | 35 + .../tests/unit/transaction_builder.test.ts | 295 + .../src/tests/unit/transaction_vector.test.ts | 226 + .../src/tests/unit/type_tag.test.ts | 139 + .../src/transaction_builder/builder.ts | 437 ++ .../src/transaction_builder/builder_utils.ts | 196 + .../src/transaction_builder/index.ts | 5 + .../src/utils/api-endpoints.ts | 22 + m1/JavaScript-client/src/utils/hd-key.ts | 79 + m1/JavaScript-client/src/utils/hex_string.ts | 122 + m1/JavaScript-client/src/utils/index.ts | 5 + .../src/utils/memoize-decorator.ts | 151 + m1/JavaScript-client/src/utils/misc.ts | 32 + .../src/utils/pagination_helpers.ts | 40 + .../src/utils/property_map_serde.ts | 119 + m1/JavaScript-client/src/version.ts | 2 + m1/JavaScript-client/tsconfig.json | 19 + m1/JavaScript-client/tsup.config.js | 8 + m1/JavaScript-client/yarn.lock | 6110 ++++++++++++++ m1/README.md | 2 + m1/movement-benchmark/Cargo.toml | 29 + .../benches/transaction_benches.rs | 34 + m1/movement-benchmark/src/lib.rs | 8 + m1/movement-benchmark/src/main.rs | 90 + m1/movement-benchmark/src/measurement.rs | 14 + m1/movement-benchmark/src/transactions.rs | 268 + m1/movement/CHANGELOG.md | 51 + m1/movement/Cargo.toml | 99 + m1/movement/README.md | 5 + m1/movement/build.rs | 6 + m1/movement/debug-move-example/Move.toml | 9 + .../debug-move-example/sources/DebugDemo.move | 32 + m1/movement/e2e/README.md | 58 + m1/movement/e2e/cases/__init__.py | 0 m1/movement/e2e/cases/account.py | 62 + m1/movement/e2e/cases/init.py | 43 + m1/movement/e2e/common.py | 49 + m1/movement/e2e/local_testnet.py | 100 + m1/movement/e2e/main.py | 161 + m1/movement/e2e/poetry.lock | 665 ++ m1/movement/e2e/pyproject.toml | 19 + m1/movement/e2e/test_helpers.py | 174 + m1/movement/e2e/test_results.py | 48 + m1/movement/homebrew/README.md | 210 + m1/movement/homebrew/aptos.rb | 35 + m1/movement/src/account/create.rs | 41 + .../src/account/create_resource_account.rs | 91 + .../src/account/derive_resource_account.rs | 114 + m1/movement/src/account/fund.rs | 62 + m1/movement/src/account/key_rotation.rs | 338 + m1/movement/src/account/list.rs | 124 + m1/movement/src/account/mod.rs | 69 + m1/movement/src/account/multisig_account.rs | 257 + m1/movement/src/account/transfer.rs | 115 + m1/movement/src/common/init.rs | 374 + m1/movement/src/common/mod.rs | 6 + m1/movement/src/common/types.rs | 1753 +++++ m1/movement/src/common/utils.rs | 507 ++ m1/movement/src/config/mod.rs | 360 + m1/movement/src/faucet/mod.rs | 41 + m1/movement/src/ffi.rs | 85 + m1/movement/src/genesis/git.rs | 264 + m1/movement/src/genesis/keys.rs | 344 + m1/movement/src/genesis/mod.rs | 926 +++ m1/movement/src/genesis/tests.rs | 434 + m1/movement/src/genesis/tools.rs | 100 + m1/movement/src/governance/mod.rs | 1061 +++ m1/movement/src/lib.rs | 94 + m1/movement/src/main.rs | 31 + .../src/move_tool/aptos_debug_natives.rs | 26 + .../src/move_tool/aptos_dep_example/README.md | 24 + .../aptos_dep_example/pack1/Move.toml | 6 + .../pack1/sources/hello.move | 7 + .../aptos_dep_example/pack2/Move.toml | 3 + .../aptos_dep_example/pack2/sources/m.move | 3 + m1/movement/src/move_tool/coverage.rs | 184 + m1/movement/src/move_tool/disassembler.rs | 148 + m1/movement/src/move_tool/manifest.rs | 90 + m1/movement/src/move_tool/mod.rs | 1606 ++++ m1/movement/src/move_tool/package_hooks.rs | 54 + m1/movement/src/move_tool/show.rs | 109 + m1/movement/src/move_tool/stored_package.rs | 214 + .../move_tool/transactional_tests_runner.rs | 345 + .../src/node/analyze/analyze_validators.rs | 540 ++ .../src/node/analyze/fetch_metadata.rs | 337 + m1/movement/src/node/analyze/mod.rs | 5 + m1/movement/src/node/mod.rs | 1730 ++++ m1/movement/src/op/key.rs | 396 + m1/movement/src/op/mod.rs | 4 + m1/movement/src/stake/mod.rs | 668 ++ m1/movement/src/test/mod.rs | 1221 +++ m1/movement/src/test/tests.rs | 136 + m1/movement/src/update/helpers.rs | 77 + m1/movement/src/update/mod.rs | 8 + m1/movement/src/update/tool.rs | 145 + m1/subnet/Cargo.toml | 49 + m1/subnet/src/api/chain_handlers.rs | 463 ++ m1/subnet/src/api/mod.rs | 30 + m1/subnet/src/api/static_handlers.rs | 69 + m1/subnet/src/block/mod.rs | 296 + m1/subnet/src/main.rs | 19 + m1/subnet/src/state/mod.rs | 182 + m1/subnet/src/vm/mod.rs | 1743 ++++ rsc/movement_logo.png | Bin 0 -> 18016 bytes x25519-dalek | 1 + 440 files changed, 59469 insertions(+) create mode 100755 .github/mirror.sh create mode 100644 .github/workflows/check.yml create mode 100644 .github/workflows/coverage.yml create mode 100644 .github/workflows/mirror.yml create mode 100644 .github/workflows/platform/dev.yml create mode 100644 .github/workflows/platform/prod.yml create mode 100644 .github/workflows/platform/test.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/test.yml create mode 100644 .gitignore create mode 100644 .gitmodules create mode 100644 LICENSE create mode 100644 LICENSE.header create mode 100644 README.md create mode 160000 aptos-core create mode 160000 aptos-pre-core create mode 160000 ed25519-dalek create mode 100644 m1/.gitignore create mode 100644 m1/Cargo.toml create mode 100644 m1/JavaScript-client/.eslintignore create mode 100644 m1/JavaScript-client/.eslintrc.js create mode 100644 m1/JavaScript-client/.gitignore create mode 100644 m1/JavaScript-client/.npmignore create mode 100644 m1/JavaScript-client/.nvmrc create mode 100644 m1/JavaScript-client/.prettierignore create mode 100644 m1/JavaScript-client/.versionrc.json create mode 100644 m1/JavaScript-client/CHANGELOG.md create mode 100644 m1/JavaScript-client/CONTRIBUTING.md create mode 100644 m1/JavaScript-client/README.md create mode 100644 m1/JavaScript-client/examples/README.md create mode 100644 m1/JavaScript-client/examples/javascript/index.js create mode 100644 m1/JavaScript-client/examples/javascript/package.json create mode 100644 m1/JavaScript-client/examples/javascript/pnpm-lock.yaml create mode 100644 m1/JavaScript-client/examples/typescript-esm/index.ts create mode 100644 m1/JavaScript-client/examples/typescript-esm/package.json create mode 100644 m1/JavaScript-client/examples/typescript-esm/pnpm-lock.yaml create mode 100644 m1/JavaScript-client/examples/typescript-esm/tsconfig.json create mode 100644 m1/JavaScript-client/examples/typescript/bcs_transaction.ts create mode 100644 m1/JavaScript-client/examples/typescript/call_aptos_cli.ts create mode 100644 m1/JavaScript-client/examples/typescript/common.ts create mode 100644 m1/JavaScript-client/examples/typescript/multisig_account.ts create mode 100644 m1/JavaScript-client/examples/typescript/multisig_transaction.ts create mode 100644 m1/JavaScript-client/examples/typescript/package.json create mode 100644 m1/JavaScript-client/examples/typescript/pnpm-lock.yaml create mode 100644 m1/JavaScript-client/examples/typescript/simple_nft.ts create mode 100644 m1/JavaScript-client/examples/typescript/transfer_coin.ts create mode 100644 m1/JavaScript-client/examples/typescript/tsconfig.json create mode 100644 m1/JavaScript-client/examples/typescript/your_coin.ts create mode 100644 m1/JavaScript-client/jest.config.js create mode 100644 m1/JavaScript-client/package.json create mode 100644 m1/JavaScript-client/pnpm-lock.yaml create mode 100644 m1/JavaScript-client/scripts/check.sh create mode 100644 m1/JavaScript-client/scripts/checked_publish.sh create mode 100644 m1/JavaScript-client/scripts/generate_ts_docs.sh create mode 100644 m1/JavaScript-client/scripts/publish_ans_contract.ts create mode 100644 m1/JavaScript-client/src/account/aptos_account.ts create mode 100644 m1/JavaScript-client/src/account/index.ts create mode 100644 m1/JavaScript-client/src/aptos_types/abi.ts create mode 100644 m1/JavaScript-client/src/aptos_types/account_address.ts create mode 100644 m1/JavaScript-client/src/aptos_types/authentication_key.ts create mode 100644 m1/JavaScript-client/src/aptos_types/authenticator.ts create mode 100644 m1/JavaScript-client/src/aptos_types/ed25519.ts create mode 100644 m1/JavaScript-client/src/aptos_types/identifier.ts create mode 100644 m1/JavaScript-client/src/aptos_types/index.ts create mode 100644 m1/JavaScript-client/src/aptos_types/multi_ed25519.ts create mode 100644 m1/JavaScript-client/src/aptos_types/rotation_proof_challenge.ts create mode 100644 m1/JavaScript-client/src/aptos_types/token_types.ts create mode 100644 m1/JavaScript-client/src/aptos_types/transaction.ts create mode 100644 m1/JavaScript-client/src/aptos_types/type_tag.ts create mode 100644 m1/JavaScript-client/src/bcs/consts.ts create mode 100644 m1/JavaScript-client/src/bcs/deserializer.ts create mode 100644 m1/JavaScript-client/src/bcs/helper.ts create mode 100644 m1/JavaScript-client/src/bcs/index.ts create mode 100644 m1/JavaScript-client/src/bcs/serializer.ts create mode 100644 m1/JavaScript-client/src/bcs/types.ts create mode 100644 m1/JavaScript-client/src/generated/AptosGeneratedClient.ts create mode 100644 m1/JavaScript-client/src/generated/core/ApiError.ts create mode 100644 m1/JavaScript-client/src/generated/core/ApiRequestOptions.ts create mode 100644 m1/JavaScript-client/src/generated/core/ApiResult.ts create mode 100644 m1/JavaScript-client/src/generated/core/AxiosHttpRequest.ts create mode 100644 m1/JavaScript-client/src/generated/core/BaseHttpRequest.ts create mode 100644 m1/JavaScript-client/src/generated/core/CancelablePromise.ts create mode 100644 m1/JavaScript-client/src/generated/core/OpenAPI.ts create mode 100644 m1/JavaScript-client/src/generated/core/request.ts create mode 100644 m1/JavaScript-client/src/generated/index.ts create mode 100644 m1/JavaScript-client/src/generated/models/AccountData.ts create mode 100644 m1/JavaScript-client/src/generated/models/AccountSignature.ts create mode 100644 m1/JavaScript-client/src/generated/models/AccountSignature_Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/AccountSignature_MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/Address.ts create mode 100644 m1/JavaScript-client/src/generated/models/AptosError.ts create mode 100644 m1/JavaScript-client/src/generated/models/AptosErrorCode.ts create mode 100644 m1/JavaScript-client/src/generated/models/Block.ts create mode 100644 m1/JavaScript-client/src/generated/models/BlockMetadataTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/DecodedTableData.ts create mode 100644 m1/JavaScript-client/src/generated/models/DeleteModule.ts create mode 100644 m1/JavaScript-client/src/generated/models/DeleteResource.ts create mode 100644 m1/JavaScript-client/src/generated/models/DeleteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/models/DeletedTableData.ts create mode 100644 m1/JavaScript-client/src/generated/models/DirectWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/models/Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/EncodeSubmissionRequest.ts create mode 100644 m1/JavaScript-client/src/generated/models/EntryFunctionId.ts create mode 100644 m1/JavaScript-client/src/generated/models/EntryFunctionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/Event.ts create mode 100644 m1/JavaScript-client/src/generated/models/EventGuid.ts create mode 100644 m1/JavaScript-client/src/generated/models/GasEstimation.ts create mode 100644 m1/JavaScript-client/src/generated/models/GenesisPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/GenesisPayload_WriteSetPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/GenesisTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/HashValue.ts create mode 100644 m1/JavaScript-client/src/generated/models/HealthCheckSuccess.ts create mode 100644 m1/JavaScript-client/src/generated/models/HexEncodedBytes.ts create mode 100644 m1/JavaScript-client/src/generated/models/IdentifierWrapper.ts create mode 100644 m1/JavaScript-client/src/generated/models/IndexResponse.ts create mode 100644 m1/JavaScript-client/src/generated/models/ModuleBundlePayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveAbility.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveFunction.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveFunctionGenericTypeParam.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveFunctionVisibility.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveModule.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveModuleBytecode.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveModuleId.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveResource.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveScriptBytecode.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveStruct.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveStructField.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveStructGenericTypeParam.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveStructTag.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveStructValue.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveType.ts create mode 100644 m1/JavaScript-client/src/generated/models/MoveValue.ts create mode 100644 m1/JavaScript-client/src/generated/models/MultiAgentSignature.ts create mode 100644 m1/JavaScript-client/src/generated/models/MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/MultisigPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/MultisigTransactionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/PendingTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/RawTableItemRequest.ts create mode 100644 m1/JavaScript-client/src/generated/models/RoleType.ts create mode 100644 m1/JavaScript-client/src/generated/models/ScriptPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/ScriptWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/models/StateCheckpointTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/StateKeyWrapper.ts create mode 100644 m1/JavaScript-client/src/generated/models/SubmitTransactionRequest.ts create mode 100644 m1/JavaScript-client/src/generated/models/TableItemRequest.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionPayload_EntryFunctionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionPayload_ModuleBundlePayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionPayload_MultisigPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionPayload_ScriptPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionSignature.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionSignature_Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionSignature_MultiAgentSignature.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionSignature_MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction_BlockMetadataTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction_GenesisTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction_PendingTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction_StateCheckpointTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/Transaction_UserTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionsBatchSingleSubmissionFailure.ts create mode 100644 m1/JavaScript-client/src/generated/models/TransactionsBatchSubmissionResult.ts create mode 100644 m1/JavaScript-client/src/generated/models/U128.ts create mode 100644 m1/JavaScript-client/src/generated/models/U256.ts create mode 100644 m1/JavaScript-client/src/generated/models/U64.ts create mode 100644 m1/JavaScript-client/src/generated/models/UserTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/models/VersionedEvent.ts create mode 100644 m1/JavaScript-client/src/generated/models/ViewRequest.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteModule.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteResource.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteModule.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteResource.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_WriteModule.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_WriteResource.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetChange_WriteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSetPayload.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSet_DirectWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteSet_ScriptWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/models/WriteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AccountData.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AccountSignature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AccountSignature_Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AccountSignature_MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Address.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AptosError.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$AptosErrorCode.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Block.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$BlockMetadataTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DecodedTableData.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DeleteModule.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DeleteResource.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DeleteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DeletedTableData.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$DirectWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$EncodeSubmissionRequest.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$EntryFunctionId.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$EntryFunctionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Event.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$EventGuid.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$GasEstimation.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$GenesisPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$GenesisPayload_WriteSetPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$GenesisTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$HashValue.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$HealthCheckSuccess.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$HexEncodedBytes.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$IdentifierWrapper.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$IndexResponse.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$ModuleBundlePayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveAbility.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveFunction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveFunctionGenericTypeParam.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveFunctionVisibility.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveModule.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveModuleBytecode.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveModuleId.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveResource.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveScriptBytecode.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveStruct.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveStructField.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveStructGenericTypeParam.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveStructTag.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveStructValue.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveType.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MoveValue.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MultiAgentSignature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MultisigPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$MultisigTransactionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$PendingTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$RawTableItemRequest.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$RoleType.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$ScriptPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$ScriptWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$StateCheckpointTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$StateKeyWrapper.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$SubmitTransactionRequest.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TableItemRequest.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionPayload_EntryFunctionPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ModuleBundlePayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionPayload_MultisigPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ScriptPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionSignature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionSignature_Ed25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiAgentSignature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiEd25519Signature.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction_BlockMetadataTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction_GenesisTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction_PendingTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction_StateCheckpointTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$Transaction_UserTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSingleSubmissionFailure.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSubmissionResult.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$U128.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$U256.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$U64.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$UserTransaction.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$VersionedEvent.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$ViewRequest.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteModule.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteResource.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteModule.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteResource.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteModule.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteResource.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSetPayload.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSet_DirectWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteSet_ScriptWriteSet.ts create mode 100644 m1/JavaScript-client/src/generated/schemas/$WriteTableItem.ts create mode 100644 m1/JavaScript-client/src/generated/services/AccountsService.ts create mode 100644 m1/JavaScript-client/src/generated/services/BlocksService.ts create mode 100644 m1/JavaScript-client/src/generated/services/EventsService.ts create mode 100644 m1/JavaScript-client/src/generated/services/GeneralService.ts create mode 100644 m1/JavaScript-client/src/generated/services/TablesService.ts create mode 100644 m1/JavaScript-client/src/generated/services/TransactionsService.ts create mode 100644 m1/JavaScript-client/src/generated/services/ViewService.ts create mode 100644 m1/JavaScript-client/src/index.ts create mode 100644 m1/JavaScript-client/src/indexer/codegen.yml create mode 100644 m1/JavaScript-client/src/indexer/generated/operations.ts create mode 100644 m1/JavaScript-client/src/indexer/generated/queries.ts create mode 100644 m1/JavaScript-client/src/indexer/generated/types.ts create mode 100644 m1/JavaScript-client/src/indexer/queries/getAccountCoinsData.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getAccountCurrentTokens.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getAccountTokensCount.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getAccountTransactionsCount.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getAccountTransactionsData.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getCurrentDelegatorBalancesCount.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getDelegatedStakingActivities.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getLedgerInfo.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getTokenActivities.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getTokenActivitiesCount.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getTokenData.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getTokenOwnersData.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getTopUserTransactions.graphql create mode 100644 m1/JavaScript-client/src/indexer/queries/getUserTransactions.graphql create mode 100644 m1/JavaScript-client/src/plugins/ans_client.ts create mode 100644 m1/JavaScript-client/src/plugins/aptos_token.ts create mode 100644 m1/JavaScript-client/src/plugins/coin_client.ts create mode 100644 m1/JavaScript-client/src/plugins/faucet_client.ts create mode 100644 m1/JavaScript-client/src/plugins/index.ts create mode 100644 m1/JavaScript-client/src/plugins/token_client.ts create mode 100644 m1/JavaScript-client/src/providers/aptos_client.ts create mode 100644 m1/JavaScript-client/src/providers/index.ts create mode 100644 m1/JavaScript-client/src/providers/indexer.ts create mode 100644 m1/JavaScript-client/src/providers/provider.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/ans_client.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/aptos_client.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/aptos_token.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/coin_client.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/faucet_client.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/indexer.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/provider.test.ts create mode 100644 m1/JavaScript-client/src/tests/e2e/token_client.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/abi.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/account_address.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/aptos_account.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/builder.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/builder_utils.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/deserializer.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/helper.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/hex_string.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/misc.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/multi_ed25519.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/property_map_serde.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/serializer.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/test_helper.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/transaction_builder.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/transaction_vector.test.ts create mode 100644 m1/JavaScript-client/src/tests/unit/type_tag.test.ts create mode 100644 m1/JavaScript-client/src/transaction_builder/builder.ts create mode 100644 m1/JavaScript-client/src/transaction_builder/builder_utils.ts create mode 100644 m1/JavaScript-client/src/transaction_builder/index.ts create mode 100644 m1/JavaScript-client/src/utils/api-endpoints.ts create mode 100644 m1/JavaScript-client/src/utils/hd-key.ts create mode 100644 m1/JavaScript-client/src/utils/hex_string.ts create mode 100644 m1/JavaScript-client/src/utils/index.ts create mode 100644 m1/JavaScript-client/src/utils/memoize-decorator.ts create mode 100644 m1/JavaScript-client/src/utils/misc.ts create mode 100644 m1/JavaScript-client/src/utils/pagination_helpers.ts create mode 100644 m1/JavaScript-client/src/utils/property_map_serde.ts create mode 100644 m1/JavaScript-client/src/version.ts create mode 100644 m1/JavaScript-client/tsconfig.json create mode 100644 m1/JavaScript-client/tsup.config.js create mode 100644 m1/JavaScript-client/yarn.lock create mode 100644 m1/README.md create mode 100644 m1/movement-benchmark/Cargo.toml create mode 100644 m1/movement-benchmark/benches/transaction_benches.rs create mode 100644 m1/movement-benchmark/src/lib.rs create mode 100644 m1/movement-benchmark/src/main.rs create mode 100644 m1/movement-benchmark/src/measurement.rs create mode 100644 m1/movement-benchmark/src/transactions.rs create mode 100644 m1/movement/CHANGELOG.md create mode 100644 m1/movement/Cargo.toml create mode 100644 m1/movement/README.md create mode 100644 m1/movement/build.rs create mode 100644 m1/movement/debug-move-example/Move.toml create mode 100644 m1/movement/debug-move-example/sources/DebugDemo.move create mode 100644 m1/movement/e2e/README.md create mode 100644 m1/movement/e2e/cases/__init__.py create mode 100644 m1/movement/e2e/cases/account.py create mode 100644 m1/movement/e2e/cases/init.py create mode 100644 m1/movement/e2e/common.py create mode 100644 m1/movement/e2e/local_testnet.py create mode 100644 m1/movement/e2e/main.py create mode 100644 m1/movement/e2e/poetry.lock create mode 100644 m1/movement/e2e/pyproject.toml create mode 100644 m1/movement/e2e/test_helpers.py create mode 100644 m1/movement/e2e/test_results.py create mode 100644 m1/movement/homebrew/README.md create mode 100644 m1/movement/homebrew/aptos.rb create mode 100644 m1/movement/src/account/create.rs create mode 100644 m1/movement/src/account/create_resource_account.rs create mode 100644 m1/movement/src/account/derive_resource_account.rs create mode 100644 m1/movement/src/account/fund.rs create mode 100644 m1/movement/src/account/key_rotation.rs create mode 100644 m1/movement/src/account/list.rs create mode 100644 m1/movement/src/account/mod.rs create mode 100644 m1/movement/src/account/multisig_account.rs create mode 100644 m1/movement/src/account/transfer.rs create mode 100644 m1/movement/src/common/init.rs create mode 100644 m1/movement/src/common/mod.rs create mode 100644 m1/movement/src/common/types.rs create mode 100644 m1/movement/src/common/utils.rs create mode 100644 m1/movement/src/config/mod.rs create mode 100644 m1/movement/src/faucet/mod.rs create mode 100644 m1/movement/src/ffi.rs create mode 100644 m1/movement/src/genesis/git.rs create mode 100644 m1/movement/src/genesis/keys.rs create mode 100644 m1/movement/src/genesis/mod.rs create mode 100644 m1/movement/src/genesis/tests.rs create mode 100644 m1/movement/src/genesis/tools.rs create mode 100644 m1/movement/src/governance/mod.rs create mode 100644 m1/movement/src/lib.rs create mode 100644 m1/movement/src/main.rs create mode 100644 m1/movement/src/move_tool/aptos_debug_natives.rs create mode 100644 m1/movement/src/move_tool/aptos_dep_example/README.md create mode 100644 m1/movement/src/move_tool/aptos_dep_example/pack1/Move.toml create mode 100644 m1/movement/src/move_tool/aptos_dep_example/pack1/sources/hello.move create mode 100644 m1/movement/src/move_tool/aptos_dep_example/pack2/Move.toml create mode 100644 m1/movement/src/move_tool/aptos_dep_example/pack2/sources/m.move create mode 100644 m1/movement/src/move_tool/coverage.rs create mode 100644 m1/movement/src/move_tool/disassembler.rs create mode 100644 m1/movement/src/move_tool/manifest.rs create mode 100644 m1/movement/src/move_tool/mod.rs create mode 100644 m1/movement/src/move_tool/package_hooks.rs create mode 100644 m1/movement/src/move_tool/show.rs create mode 100644 m1/movement/src/move_tool/stored_package.rs create mode 100644 m1/movement/src/move_tool/transactional_tests_runner.rs create mode 100644 m1/movement/src/node/analyze/analyze_validators.rs create mode 100644 m1/movement/src/node/analyze/fetch_metadata.rs create mode 100644 m1/movement/src/node/analyze/mod.rs create mode 100644 m1/movement/src/node/mod.rs create mode 100644 m1/movement/src/op/key.rs create mode 100644 m1/movement/src/op/mod.rs create mode 100644 m1/movement/src/stake/mod.rs create mode 100644 m1/movement/src/test/mod.rs create mode 100644 m1/movement/src/test/tests.rs create mode 100644 m1/movement/src/update/helpers.rs create mode 100644 m1/movement/src/update/mod.rs create mode 100644 m1/movement/src/update/tool.rs create mode 100644 m1/subnet/Cargo.toml create mode 100644 m1/subnet/src/api/chain_handlers.rs create mode 100644 m1/subnet/src/api/mod.rs create mode 100644 m1/subnet/src/api/static_handlers.rs create mode 100644 m1/subnet/src/block/mod.rs create mode 100644 m1/subnet/src/main.rs create mode 100644 m1/subnet/src/state/mod.rs create mode 100644 m1/subnet/src/vm/mod.rs create mode 100644 rsc/movement_logo.png create mode 160000 x25519-dalek diff --git a/.github/mirror.sh b/.github/mirror.sh new file mode 100755 index 00000000..9b826131 --- /dev/null +++ b/.github/mirror.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +git config --global user.email "l.mak.monninger@gmail.com" +git config --global user.name "CI Bot" + +# Clone the Movement Framework Mirror repository +git clone https://github.com/movemntdev/movement-framework-mirror.git + +# Copy the aptos-move directory to the Movement Framework Mirror repository +cp -r vm/aptos-vm/aptos-move movement-framework-mirror/ + +# Change directory to the Movement Framework Mirror repository +cd movement-framework-mirror/ + +# Add all changes to Git +git add -A + +# Commit the changes +git commit -m "Mirror aptos-move directory from current repo" + +# Push the changes with force +git push --force diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml new file mode 100644 index 00000000..9ee4c8d2 --- /dev/null +++ b/.github/workflows/check.yml @@ -0,0 +1,27 @@ +name: Cargo Check + +on: + push: + branches: + - main + +jobs: + check: + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + with: + submodules: 'recursive' + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Run Cargo Check + working-directory: ./subnet # Ensures we're in the correct directory + run: cargo check diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 00000000..b983ae85 --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,52 @@ +name: Code Coverage + +on: + push: + branches: + - main + +jobs: + coverage: + runs-on: ubuntu-latest + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + with: + submodules: 'recursive' + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + components: rustfmt + override: true + + - name: Install kcov + run: | + sudo apt-get update + sudo apt-get install -y pkg-config libcurl4-openssl-dev libelf-dev libdw-dev cmake gcc binutils-dev libiberty-dev + wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz + tar xzf master.tar.gz + mkdir kcov-master/build + cd kcov-master/build + cmake .. + make + sudo make install + + - name: Build for Coverage + run: | + cargo build + + - name: Test & Generate Coverage Report + run: | + for file in target/debug/deps/*; do + mkdir -p "target/cov/$(basename $file)"; + kcov --exclude-pattern=/.cargo,/usr/lib --verify "target/cov/$(basename $file)" "$file"; + done + + - name: Upload Coverage to Codecov + uses: codecov/codecov-action@v1 + with: + token: ${{ secrets.CODECOV_TOKEN }} # Add your Codecov token here + directory: ./target/cov/ diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml new file mode 100644 index 00000000..a7700c0b --- /dev/null +++ b/.github/workflows/mirror.yml @@ -0,0 +1,25 @@ +name: Force Push to Movement Framework Mirror + +on: + push: + branches: + - main # Change this to the branch you want to trigger the action on + +jobs: + force-push: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Pushes to another repository + uses: cpina/github-action-push-to-another-repository@main + env: + API_TOKEN_GITHUB: ${{ secrets.API_TOKEN_GITHUB }} + with: + source-directory: 'vm/aptos-vm/aptos-move' + destination-github-username: 'movemntdev' + destination-repository-name: 'movement-framework-mirror' + user-email: l.mak.monninger@gmail.com + target-branch: main # maybe this should be changed diff --git a/.github/workflows/platform/dev.yml b/.github/workflows/platform/dev.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/workflows/platform/prod.yml b/.github/workflows/platform/prod.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/workflows/platform/test.yml b/.github/workflows/platform/test.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..4cec4e8e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,111 @@ +name: Binary Release + +on: + push: + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + with: + submodules: 'recursive' # Ensures submodules are fetched + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Install Dependencies + run: | + sudo apt-get update + sudo apt-get install -y build-essential + + - name: Run dev setup script + run: | + cd "$GITHUB_WORKSPACE/aptos-pre-core" + chmod -R 755 ./scripts + echo "yes" | ./scripts/dev_setup.sh + + - name: Build subnet binaries + run: | + cd "$GITHUB_WORKSPACE/subnet" + cargo build --release + + - name: Archive binaries + uses: actions/upload-artifact@v2 + with: + name: binaries + path: | + $GITHUB_WORKSPACE/subnet/target/release/* + + - name: Set up cross-compilation environment + uses: crazy-max/ghaction-docker-buildx@v3 + with: + version: latest + + - name: Build binaries for macOS + run: | + cd "$GITHUB_WORKSPACE/subnet" + docker buildx create --name mybuilder + docker buildx use mybuilder + docker buildx inspect --bootstrap + cargo build --release --target=x86_64-apple-darwin + + - name: Archive macOS binaries + uses: actions/upload-artifact@v2 + with: + name: macos-binaries + path: | + $GITHUB_WORKSPACE/subnet/target/x86_64-apple-darwin/release/* + + - name: Build binaries for Linux arm64 + run: | + cd "$GITHUB_WORKSPACE/subnet" + docker buildx create --name mybuilder + docker buildx use mybuilder + docker buildx inspect --bootstrap + cargo build --release --target=aarch64-unknown-linux-gnu + + - name: Archive Linux arm64 binaries + uses: actions/upload-artifact@v2 + with: + name: linux-arm64-binaries + path: | + $GITHUB_WORKSPACE/subnet/target/aarch64-unknown-linux-gnu/release/* + + - name: Build binaries for Linux x86_64 + run: | + cd "$GITHUB_WORKSPACE/subnet" + docker buildx create --name mybuilder + docker buildx use mybuilder + docker buildx inspect --bootstrap + cargo build --release --target=x86_64-unknown-linux-gnu + + - name: Archive Linux x86_64 binaries + uses: actions/upload-artifact@v2 + with: + name: linux-x86_64-binaries + path: | + $GITHUB_WORKSPACE/subnet/target/x86_64-unknown-linux-gnu/release/* + + - name: Build binaries for Windows + run: | + cd "$GITHUB_WORKSPACE/subnet" + docker buildx create --name mybuilder + docker buildx use mybuilder + docker buildx inspect --bootstrap + cargo build --release --target=x86_64-pc-windows-gnu + + - name: Archive Windows binaries + uses: actions/upload-artifact@v2 + with: + name: windows-binaries + path: | + $GITHUB_WORKSPACE/subnet/target/x86_64-pc-windows-gnu/release/*.exe diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..4a662d6c --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,36 @@ +name: Rust Tests + +on: + push: + branches: + - main + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: nightly + + - name: Build and Test Subnet + run: | + cd ./subnet + RUSTFLAGS="--cfg tokio_unstable" cargo build --all --all-targets + RUSTFLAGS="--cfg tokio_unstable" cargo test --all + + - name: Update Badge + run: | + if [ $? -eq 0 ]; then + sed -i 's/badge\/tests-[a-zA-Z]*/badge\/tests-Passing-brightgreen/g' README.md + else + sed -i 's/badge\/tests-[a-zA-Z]*/badge\/tests-Failing-red/g' README.md + fi + if: ${{ always() }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..cff6b599 --- /dev/null +++ b/.gitignore @@ -0,0 +1,62 @@ +./main + +*.log +*~ +.DS_Store + +awscpu + +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +*.profile + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# ignore GoLand metafiles directory +.idea/ + +*logs/ + +.vscode* + +*.pb* + +*cpu[0-9]* +*mem[0-9]* +*lock[0-9]* +*.profile +*.txt +*.swp +*.aux +*.fdb* +*.fls +*.gz +*.pdf + +.coverage + +bin/ +build/ +vendor + +genesis.json +*.test + +dist/ + +cmd/simulator/simulator-keys/* +cmd/simulator/simulator-keys-temp/* + +.ipfs/ +*.pk +.viewer/ +.searcher/ +.replayer/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..64321b04 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,14 @@ +[submodule "aptos-core"] + path = aptos-core + url = https://github.com/movemntdev/aptos-core + branch = m1 +[submodule "x25519-dalek"] + path = x25519-dalek + url = https://github.com/movemntdev/x25519-dalek +[submodule "ed25519-dalek"] + path = ed25519-dalek + url = https://github.com/movemntdev/ed25519-dalek +[submodule "aptos-pre-core"] + path = aptos-pre-core + url = https://github.com/movemntdev/aptos-core + branch = m1-pre-core diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..dc33b08c --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2022, Ava Labs, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSE.header b/LICENSE.header new file mode 100644 index 00000000..6a6e5707 --- /dev/null +++ b/LICENSE.header @@ -0,0 +1,2 @@ +Copyright (C) 2023, Ava Labs, Inc. All rights reserved. +See the file LICENSE for licensing terms. diff --git a/README.md b/README.md new file mode 100644 index 00000000..af8a4dc8 --- /dev/null +++ b/README.md @@ -0,0 +1,71 @@ +
+ Project Logo + +# M1 + +[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT) +[![Tests](https://img.shields.io/badge/tests-Passing-brightgreen)](#) +[![Build Status](https://img.shields.io/badge/build-Passing-brightgreen)](#) +[![Coverage](https://img.shields.io/codecov/c/github/username/project.svg)](https://codecov.io/gh/username/project) +[![Windows](https://img.shields.io/badge/Windows-Download-blue)](https://example.com/releases/windows) +[![macOS](https://img.shields.io/badge/macOS-Download-blue)](https://example.com/releases/macos) +[![Linux](https://img.shields.io/badge/Linux-Download-blue)](https://example.com/releases/linux) + +**An L1 for Move VM built on Avalanche.** + +
+ + +## Table of Contents + +- [Introduction](#introduction) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [Contributing](#contributing) +- [License](#license) + +--- + +## Introduction + +The Move programming language poses numerous benefits to builders including direct interaction with digital assets through custom resource types, flexibility with transaction script declaration, on-chain verification, and bytecode safety privileges. + +Movement M1 is designed for the Avalanche subnet, allowing users to seamlessly interact with and build with the Move language on a on a high-performance, modular, scalable and ineroperable Layer 1. + +- Movement will be able to hit 160,000+ theoretical TPS as the project scales to provide much needed performance to protocols. +- Move bytecode verifiers and interpreters provide native solvency for the reentrancy attacks and security woes that have plagued Solidity developers for years, resulting in $3 billion lost last year. + +This repository contains the code and contributor documentation for M1. If you would like to learn how to use and develop for the platform, please visit [docs.movementlabs.xyx](docs.movementlabs.xyz). + +## Features + +Currently, M1 consists of... +- A testnet with bootstrap nodes at `https://seed1-node.movementlabs.xyz` +- An Aptos-compatible cient called `movement`. +- A fork of Aptos framework. + +## Installation + +See [docs.movementlabs.xyx](docs.movementlabs.xyz) for a more complete installation guide. We recommend working with our Docker containers or using our installer. + +## Usage + +Once you've installed our platform, the easiest way to get started developing is to use the CLI to test code locally and publish to our testnet. + +```bash +# test +movement move test + +# compile and publish +movement move compile && movement move publish +``` + +## Contributing + +Please submit and review/comment on issues before contributing. Review [CONTRIBUTING.md](./CONTRIBUTING.md). + +## License + +This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. + diff --git a/aptos-core b/aptos-core new file mode 160000 index 00000000..0c4e264c --- /dev/null +++ b/aptos-core @@ -0,0 +1 @@ +Subproject commit 0c4e264c85e8ee905a4f96a29834d200f37d35ef diff --git a/aptos-pre-core b/aptos-pre-core new file mode 160000 index 00000000..fee58a3d --- /dev/null +++ b/aptos-pre-core @@ -0,0 +1 @@ +Subproject commit fee58a3d5bd3552c777c4ccf051e86ff3919ddd1 diff --git a/ed25519-dalek b/ed25519-dalek new file mode 160000 index 00000000..ee09712b --- /dev/null +++ b/ed25519-dalek @@ -0,0 +1 @@ +Subproject commit ee09712b387912b6a28d3f3c58a9140e6fd94763 diff --git a/m1/.gitignore b/m1/.gitignore new file mode 100644 index 00000000..f2f9e58e --- /dev/null +++ b/m1/.gitignore @@ -0,0 +1,2 @@ +target +Cargo.lock \ No newline at end of file diff --git a/m1/Cargo.toml b/m1/Cargo.toml new file mode 100644 index 00000000..5dd762f1 --- /dev/null +++ b/m1/Cargo.toml @@ -0,0 +1,168 @@ +[workspace] +resolver = "2" +members = [ + "subnet", + # "movement", + # "movement-benchmark", +] + +[workspace.package] +version = "0.1.0" +edition = "2021" +license = "MIT OR Apache-2.0" +authors = ["Liam Monninger "] +homepage = "https://www.movementlabs.xyz" +publish = false +repository = "https://github.com/movemntdev/m2" +rust-version = "1.66" + +[workspace.dependencies] +anyhow = "1.0.62" +futures = "0.3.28" +rand = { version = "0.7.3" } +bcs = { git = "https://github.com/aptos-labs/bcs.git", rev = "d31fab9d81748e2594be5cd5cdf845786a30562d" } + +# aptos +# TODO: pleas remove unused dependencies +aptos = { path = "../aptos-pre-core/crates/aptos" } +aptos-accumulator = { path = "../aptos-pre-core/storage/accumulator" } +aptos-aggregator = { path = "../aptos-pre-core/aptos-move/aptos-aggregator" } +aptos-api = { path = "../aptos-pre-core/api" } +aptos-api-test-context = { path = "../aptos-pre-core/api/test-context" } +aptos-api-types = { path = "../aptos-pre-core/api/types" } +aptos-backup-cli = { path = "../aptos-pre-core/storage/backup/backup-cli" } +aptos-backup-service = { path = "../aptos-pre-core/storage/backup/backup-service" } +aptos-bounded-executor = { path = "../aptos-pre-core/crates/bounded-executor" } +aptos-block-executor = { path = "../aptos-pre-core/aptos-move/block-executor" } +aptos-bitvec = { path = "../aptos-pre-core/crates/aptos-bitvec" } +aptos-build-info = { path = "../aptos-pre-core/crates/aptos-build-info" } +aptos-cached-packages = { path = "../aptos-pre-core/aptos-move/framework/cached-packages" } +aptos-channels = { path = "../aptos-pre-core/crates/channel" } +aptos-cli-common = { path = "../aptos-pre-core/crates/aptos-cli-common" } +aptos-compression = { path = "../aptos-pre-core/crates/aptos-compression" } +aptos-consensus = { path = "../aptos-pre-core/consensus" } +aptos-consensus-notifications = { path = "../aptos-pre-core/state-sync/inter-component/consensus-notifications" } +aptos-consensus-types = { path = "../aptos-pre-core/consensus/consensus-types" } +aptos-config = { path = "../aptos-pre-core/config" } +aptos-crash-handler = { path = "../aptos-pre-core/crates/crash-handler" } +aptos-crypto = { path = "../aptos-pre-core/crates/aptos-crypto" } +aptos-crypto-derive = { path = "../aptos-pre-core/crates/aptos-crypto-derive" } +aptos-data-client = { path = "../aptos-pre-core/state-sync/aptos-data-client" } +aptos-data-streaming-service = { path = "../aptos-pre-core/state-sync/state-sync-v2/data-streaming-service" } +aptos-db = { path = "../aptos-pre-core/storage/aptosdb" } +aptos-db-indexer = { path = "../aptos-pre-core/storage/indexer" } +aptos-db-tool = { path = "../aptos-pre-core/storage/db-tool" } +aptos-debugger = { path = "../aptos-pre-core/aptos-move/aptos-debugger" } +aptos-event-notifications = { path = "../aptos-pre-core/state-sync/inter-component/event-notifications" } +aptos-executable-store = { path = "../aptos-pre-core/storage/executable-store" } +aptos-executor = { path = "../aptos-pre-core/execution/executor" } +aptos-block-partitioner = { path = "../aptos-pre-core/execution/block-partitioner" } +aptos-enum-conversion-derive = { path = "../aptos-pre-core/crates/aptos-enum-conversion-derive" } +aptos-executor-service = { path = "../aptos-pre-core/execution/executor-service" } +aptos-executor-test-helpers = { path = "../aptos-pre-core/execution/executor-test-helpers" } +aptos-executor-types = { path = "../aptos-pre-core/execution/executor-types" } +aptos-faucet-cli = { path = "../aptos-pre-core/crates/aptos-faucet/cli" } +aptos-faucet-core = { path = "../aptos-pre-core/crates/aptos-faucet/core" } +aptos-faucet-service = { path = "../aptos-pre-core/crates/aptos-faucet/service" } +aptos-faucet-metrics-server = { path = "../aptos-pre-core/crates/aptos-faucet/metrics-server" } +aptos-fallible = { path = "../aptos-pre-core/crates/fallible" } +aptos-forge = { path = "../aptos-pre-core/testsuite/forge" } +aptos-framework = { path = "../aptos-pre-core/aptos-move/framework" } +aptos-fuzzer = { path = "../aptos-pre-core/testsuite/aptos-fuzzer" } +fuzzer = { path = "../aptos-pre-core/testsuite/fuzzer" } +aptos-gas = { path = "../aptos-pre-core/aptos-move/aptos-gas" } +aptos-gas-algebra-ext = { path = "../aptos-pre-core/aptos-move/gas-algebra-ext" } +aptos-gas-profiling = { path = "../aptos-pre-core/aptos-move/aptos-gas-profiling" } +aptos-genesis = { path = "../aptos-pre-core/crates/aptos-genesis" } +aptos-github-client = { path = "../aptos-pre-core/crates/aptos-github-client" } +aptos-global-constants = { path = "../aptos-pre-core/config/global-constants" } +aptos-id-generator = { path = "../aptos-pre-core/crates/aptos-id-generator" } +aptos-indexer = { path = "../aptos-pre-core/crates/indexer" } +aptos-indexer-grpc-cache-worker = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-cache-worker" } +aptos-indexer-grpc-data-service = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-data-service" } +aptos-indexer-grpc-file-store = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-file-store" } +aptos-indexer-grpc-post-processor = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-post-processor" } +aptos-indexer-grpc-fullnode = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-fullnode" } +aptos-indexer-grpc-utils = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-utils" } +aptos-indexer-grpc-parser = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-parser" } +aptos-indexer-grpc-server-framework = { path = "../aptos-pre-core/ecosystem/indexer-grpc/indexer-grpc-server-framework" } +aptos-infallible = { path = "../aptos-pre-core/crates/aptos-infallible" } +aptos-inspection-service = { path = "../aptos-pre-core/crates/aptos-inspection-service" } +aptos-jellyfish-merkle = { path = "../aptos-pre-core/storage/jellyfish-merkle" } +aptos-keygen = { path = "../aptos-pre-core/crates/aptos-keygen" } +aptos-language-e2e-tests = { path = "../aptos-pre-core/aptos-move/e2e-tests" } +aptos-ledger = { path = "../aptos-pre-core/crates/aptos-ledger" } +aptos-log-derive = { path = "../aptos-pre-core/crates/aptos-log-derive" } +aptos-logger = { path = "../aptos-pre-core/crates/aptos-logger" } +aptos-memory-usage-tracker = { path = "../aptos-pre-core/aptos-move/aptos-memory-usage-tracker" } +aptos-mempool = { path = "../aptos-pre-core/mempool" } +aptos-mempool-notifications = { path = "../aptos-pre-core/state-sync/inter-component/mempool-notifications" } +aptos-memsocket = { path = "../aptos-pre-core/network/memsocket" } +aptos-metrics-core = { path = "../aptos-pre-core/crates/aptos-metrics-core" } +aptos-move-examples = { path = "../aptos-pre-core/aptos-move/move-examples" } +aptos-moving-average = { path = "../aptos-pre-core/crates/moving-average" } +aptos-mvhashmap = { path = "../aptos-pre-core/aptos-move/mvhashmap" } +aptos-netcore = { path = "../aptos-pre-core/network/netcore" } +aptos-network = { path = "../aptos-pre-core/network" } +aptos-network-builder = { path = "../aptos-pre-core/network/builder" } +aptos-network-checker = { path = "../aptos-pre-core/crates/aptos-network-checker" } +aptos-network-discovery = { path = "../aptos-pre-core/network/discovery" } +aptos-node = { path = "../aptos-pre-core/aptos-node" } +aptos-node-checker = { path = "../aptos-pre-core/ecosystem/node-checker" } +aptos-node-identity = { path = "../aptos-pre-core/crates/aptos-node-identity" } +aptos-node-resource-metrics = { path = "../aptos-pre-core/crates/node-resource-metrics" } +aptos-num-variants = { path = "../aptos-pre-core/crates/num-variants" } +aptos-openapi = { path = "../aptos-pre-core/crates/aptos-openapi" } +aptos-package-builder = { path = "../aptos-pre-core/aptos-move/package-builder" } +aptos-peer-monitoring-service-client = { path = "../aptos-pre-core/network/peer-monitoring-service/client" } +aptos-peer-monitoring-service-server = { path = "../aptos-pre-core/network/peer-monitoring-service/server" } +aptos-peer-monitoring-service-types = { path = "../aptos-pre-core/network/peer-monitoring-service/types" } +aptos-proptest-helpers = { path = "../aptos-pre-core/crates/aptos-proptest-helpers" } +aptos-protos = { path = "../aptos-pre-core/crates/aptos-protos" } +aptos-proxy = { path = "../aptos-pre-core/crates/proxy" } +aptos-push-metrics = { path = "../aptos-pre-core/crates/aptos-push-metrics" } +aptos-rate-limiter = { path = "../aptos-pre-core/crates/aptos-rate-limiter" } +aptos-release-builder = { path = "../aptos-pre-core/aptos-move/aptos-release-builder" } +aptos-resource-viewer = { path = "../aptos-pre-core/aptos-move/aptos-resource-viewer" } +aptos-rest-client = { path = "../aptos-pre-core/crates/aptos-rest-client" } +aptos-retrier = { path = "../aptos-pre-core/crates/aptos-retrier" } +aptos-rocksdb-options = { path = "../aptos-pre-core/storage/rocksdb-options" } +aptos-rosetta = { path = "../aptos-pre-core/crates/aptos-rosetta" } +aptos-runtimes = { path = "../aptos-pre-core/crates/aptos-runtimes" } +aptos-safety-rules = { path = "../aptos-pre-core/consensus/safety-rules" } +aptos-schemadb = { path = "../aptos-pre-core/storage/schemadb" } +aptos-scratchpad = { path = "../aptos-pre-core/storage/scratchpad" } +aptos-sdk = { path = "../aptos-pre-core/sdk" } +aptos-sdk-builder = { path = "../aptos-pre-core/aptos-move/aptos-sdk-builder" } +aptos-secure-net = { path = "../aptos-pre-core/secure/net" } +aptos-secure-storage = { path = "../aptos-pre-core/secure/storage" } +aptos-short-hex-str = { path = "../aptos-pre-core/crates/short-hex-str" } +aptos-speculative-state-helper = { path = "../aptos-pre-core/crates/aptos-speculative-state-helper" } +aptos-state-sync-driver = { path = "../aptos-pre-core/state-sync/state-sync-v2/state-sync-driver" } +aptos-state-view = { path = "../aptos-pre-core/storage/state-view" } +aptos-storage-interface = { path = "../aptos-pre-core/storage/storage-interface" } +aptos-storage-service-client = { path = "../aptos-pre-core/state-sync/storage-service/client" } +aptos-storage-service-notifications = { path = "../aptos-pre-core/state-sync/inter-component/storage-service-notifications" } +aptos-storage-service-types = { path = "../aptos-pre-core/state-sync/storage-service/types" } +aptos-storage-service-server = { path = "../aptos-pre-core/state-sync/storage-service/server" } +aptos-telemetry = { path = "../aptos-pre-core/crates/aptos-telemetry" } +aptos-telemetry-service = { path = "../aptos-pre-core/crates/aptos-telemetry-service" } +aptos-temppath = { path = "../aptos-pre-core/crates/aptos-temppath" } +aptos-testcases = { path = "../aptos-pre-core/testsuite/testcases" } +aptos-time-service = { path = "../aptos-pre-core/crates/aptos-time-service", features = [ + "async", +] } +aptos-transaction-emitter-lib = { path = "../aptos-pre-core/crates/transaction-emitter-lib" } +aptos-transaction-generator-lib = { path = "../aptos-pre-core/crates/transaction-generator-lib" } +aptos-transactional-test-harness = { path = "../aptos-pre-core/aptos-move/aptos-transactional-test-harness" } +aptos-types = { path = "../aptos-pre-core/types" } +aptos-utils = { path = "../aptos-pre-core/aptos-utils" } +aptos-validator-interface = { path = "../aptos-pre-core/aptos-move/aptos-validator-interface" } +aptos-vault-client = { path = "../aptos-pre-core/secure/storage/vault" } +aptos-vm = { path = "../aptos-pre-core/aptos-move/aptos-vm" } +aptos-vm-logging = { path = "../aptos-pre-core/aptos-move/aptos-vm-logging" } +aptos-vm-genesis = { path = "../aptos-pre-core/aptos-move/vm-genesis" } +aptos-vm-types = { path = "../aptos-pre-core/aptos-move/aptos-vm-types" } +aptos-vm-validator = { path = "../aptos-pre-core/vm-validator" } +aptos-warp-webserver = { path = "../aptos-pre-core/crates/aptos-warp-webserver" } +aptos-writeset-generator = { path = "../aptos-pre-core/aptos-move/writeset-transaction-generator" } \ No newline at end of file diff --git a/m1/JavaScript-client/.eslintignore b/m1/JavaScript-client/.eslintignore new file mode 100644 index 00000000..4123c09b --- /dev/null +++ b/m1/JavaScript-client/.eslintignore @@ -0,0 +1,4 @@ +node_modules +packages/**/node_modules/ +dist/** +**/*.test.ts diff --git a/m1/JavaScript-client/.eslintrc.js b/m1/JavaScript-client/.eslintrc.js new file mode 100644 index 00000000..86745e95 --- /dev/null +++ b/m1/JavaScript-client/.eslintrc.js @@ -0,0 +1,36 @@ +module.exports = { + env: { + browser: true, + es2021: true, + node: true, + }, + ignorePatterns: ["*.js", "examples/*", "src/indexer/generated/**", "scripts/publish_ans_contract.ts"], + extends: ["airbnb-base", "airbnb-typescript/base", "prettier"], + parser: "@typescript-eslint/parser", + parserOptions: { + tsconfigRootDir: __dirname, + project: ["tsconfig.json"], + ecmaVersion: "latest", + sourceType: "module", + }, + plugins: ["@typescript-eslint"], + rules: { + quotes: ["error", "double"], + "max-len": ["error", 120], + "import/extensions": ["error", "never"], + "max-classes-per-file": ["error", 10], + "import/prefer-default-export": "off", + "object-curly-newline": "off", + "no-use-before-define": "off", + "no-unused-vars": "off", + "@typescript-eslint/no-use-before-define": ["error", { functions: false, classes: false }], + "@typescript-eslint/no-unused-vars": ["error"], + }, + settings: { + "import/resolver": { + node: { + extensions: [".js", ".jsx", ".ts", ".tsx"], + }, + }, + }, +}; diff --git a/m1/JavaScript-client/.gitignore b/m1/JavaScript-client/.gitignore new file mode 100644 index 00000000..afdee0d2 --- /dev/null +++ b/m1/JavaScript-client/.gitignore @@ -0,0 +1,17 @@ +.env +.DS_Store +*/**/.DS_Store +npm-debug.log +.npm/ +/coverage +/tmp +node_modules +.idea/ +.history/ +.vscode/ +dist/ +.nyc_output/ +build/ + +# Doc generation output +docs/ \ No newline at end of file diff --git a/m1/JavaScript-client/.npmignore b/m1/JavaScript-client/.npmignore new file mode 100644 index 00000000..5f7ebc02 --- /dev/null +++ b/m1/JavaScript-client/.npmignore @@ -0,0 +1,4 @@ +coverage +node_modules +.aptos +.env diff --git a/m1/JavaScript-client/.nvmrc b/m1/JavaScript-client/.nvmrc new file mode 100644 index 00000000..15a54169 --- /dev/null +++ b/m1/JavaScript-client/.nvmrc @@ -0,0 +1 @@ +v16.14.0n diff --git a/m1/JavaScript-client/.prettierignore b/m1/JavaScript-client/.prettierignore new file mode 100644 index 00000000..9543bddf --- /dev/null +++ b/m1/JavaScript-client/.prettierignore @@ -0,0 +1,2 @@ +src/generated/* +src/indexer/generated/** diff --git a/m1/JavaScript-client/.versionrc.json b/m1/JavaScript-client/.versionrc.json new file mode 100644 index 00000000..94df5f41 --- /dev/null +++ b/m1/JavaScript-client/.versionrc.json @@ -0,0 +1,19 @@ +{ + "types": [ + { "type": "feat", "section": "Features" }, + { "type": "fix", "section": "Bug Fixes" }, + { "type": "chore", "hidden": true }, + { "type": "docs", "hidden": true }, + { "type": "style", "hidden": true }, + { "type": "refactor", "hidden": true }, + { "type": "perf", "hidden": true }, + { "type": "test", "hidden": true } + ], + "skip": { + "bump": true, + "commit": true, + "tag": true + }, + "path": ".", + "header": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\nNote: This changelog is generated automatically.\n\n" +} diff --git a/m1/JavaScript-client/CHANGELOG.md b/m1/JavaScript-client/CHANGELOG.md new file mode 100644 index 00000000..6509817c --- /dev/null +++ b/m1/JavaScript-client/CHANGELOG.md @@ -0,0 +1,225 @@ +# Aptos TS SDK Changelog + +All notable changes to the Aptos Node SDK will be captured in this file. This changelog is written by hand for now. It adheres to the format set out by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +**Note:** The Aptos TS SDK does not follow semantic version while we are in active development. Instead, breaking changes will be announced with each devnet cut. Once we launch our mainnet, the SDK will follow semantic versioning closely. + +## Unreleased + +## 1.8.5 (2023-04-29) + +- Add local tests for `AnsClient` +- Add `AptosToken` plugin to support tokenv2 +- Add generic support to input params in move entry functions +- Add signature verification method to AptosAccount. + +## 1.8.4 (2023-04-13) + +- Move `TypeTagParser` from `builder_utils.ts` to `type_tag.ts` +- Update `StructTag.fromString()` to use and relies on TypeTagParser + +## 1.8.3 (2023-04-10) + +- Add `publish-ans-contract` script and pnpm command for tests +- Revert User-Agent header from both `AptosClient` and `IndexerClient` due to a browser error + +## 1.8.2 (2023-04-06) + +- Introduce `AnsClient` class to support ANS (Aptos Names Service) data fetching queries +- Add `User-Agent` header to `AptosClient` and `IndexerClient` queries +- Add Indexer queries to `IndexerClient` - `getAccountCoinsData`, `getAccountTokensCount`, `getAccountTransactionsCount`, `getAccountTransactionsData`, `getCurrentDelegatorBalancesCount`, `getDelegatedStakingActivities`, `getTokensActivitiesCount`, `getTokenData`, `getTokenOwnersData`, `getTopUserTransactions`, `getUserTransactions` +- Add convertion layer to `IndexerClient` queries to handle missing `0x` +- Add validation layer to `IndexerClient` to validate queried account address is in the long format, i.e 66 chars long (0x<64 chars>) +- Change `queryIndexer` function in `IndexerClient` class visibility to public +- Add mint Aptos Name function `mintAptosName()` to `AnsClient` class + +## 1.7.2 (2023-03-13) + +- `CoinClient` and `TokenClient` to use remote ABI instead of local ABIs +- Reorganize SDK files structure for a better readability and maintainability +- Add `getIndexerLedgerInfo` query to `IndexerClient` + +## 1.7.1 (2023-03-02) + +- Fix IndexerClient error parsing using JSON.stringify() to display the error message correctly on the console + +## 1.7.0 (2023-03-01) + +- Add Indexer support. We introduce a new class `IndexerClient` that queries our Indexer to support data shaping fetching and providing users with a seamless experience. +- Introduces a `Provider` class we can initialize and query our blockchain by hiding the underlying implementation (fullnode vs indexer) + +## 1.6.0 (2023-01-20) + +- Add support to Move view functions + +## 1.5.0 (2023-01-05) + +- Export classes from property_map_serde +- User can specify token string property type using "string", "String" or "0x1::string::String" to serde the string token property on-chain +- Use `getAccountResource` to replace `getAccountResources` in `CoinClient#checkBalance`, which can reduce network load. + +## 1.4.0 (2022-11-30) + +- Add missing fields to TokenData class +- Add PropertyMap and PropertyValue type to match on-chain data +- Support token property map deseralizer to read the property map in the original data format. +- Allow `checkBalance` in `CoinClient` to take in a `MaybeHexString` as well as `AptosAccount`, since users might want to check the balance of accounts they don't own (which is generally how you use `AptosAccount`). +- Similar to `checkBalance`, allow `transfer` in `CoinClient` to take in a `MaybeHexString` for the `receiver` argument. +- Add a new `createReceiverIfMissing` argument to `transfer` in `CoinClient`. If set, the `0x1::aptos_account::transfer` function will be called instead of `0x1::coin::transfer`, which will create the account on chain if it doesn't exist instead of failing. + +## 1.3.17 (2022-11-08) + +- Support computing resource account address based off a source address and a seed +- Exported ABI types +- `getAccountModules` and `getAccountResources` now use pagination under the hood. This addresses the issue raised here: https://github.com/aptos-labs/aptos-core/issues/5298. The changes are non-breaking, if you use these functions with an older node that hasn't updated to include the relevant support in its API service, it will still work as it did before. +- To support the above, the generated client has been updated to attach the headers to the response object, as per the changes here: https://github.com/aptos-labs/openapi-typescript-codegen/compare/v0.23.0...aptos-labs:openapi-typescript-codegen:0.24.0?expand=1. Consider this an implementation detail, not a supported part of the SDK interface. +- Add functions to token client support + - direct transfer with opt-in + - burn token by owner + - burn token by creator + - mutate token properties +- Add property map serializer to serialize input to BCS encode + +## 1.3.16 (2022-10-12) + +- Add `estimatePrioritizedGasUnitPrice` to the simulation interface. If set to true, the estimated gas unit price is higher than the original estimate. Therefore, transactions have a higher chance to be executed during congestion period. +- `esitmateGasPrice` now returns `deprioritized_gas_estimate` and `prioritized_gas_estimate` along with `gas_estimate`. `deprioritized_gas_estimate` is a conservative price estimate. Users might end up paying less gas eventually, but the transaction execution is deprioritized by the block chain. On the other hand, `prioritized_gas_estimate` is a higher price esitmate. Transactions need to be executed sooner could use `prioritized_gas_estimate`. + +## 1.3.15 (2022-09-30) + +- **[Breaking Changes]** Following the deprecation notice in the release notes of 1.3.13, the following breaking changes have landed in this release. Please see the notes from last release for information on the new endpoints you must migrate to: + - The `getEventsByEventKey` function has been removed. + - The `key` field in the `Event` struct has been removed. +- Turn on `strict` in tsconfig + +## 1.3.14 (2022-09-20) + +- Enable SDK axios client to carry cookies for both the browser and node environments. +- Added new functions `getBlockByHeight` and `getBlockByVersion`. + +## 1.3.13 (2022-09-15) + +- Increase the default wait time for `waitForTransactionWithResult` to 20s. +- A new function called `getEventsByCreationNumber` has been added, corresponding to the new endpoint on the API. For more information on this change, see the [API changelog](https://github.com/aptos-labs/aptos-core/blob/main/api/doc/CHANGELOG.md) for API version 1.1.0. +- **[Deprecated]** The `getEventsByEventKey` function is now deprecated. In the next release it will be removed entirely. You must migrate to the new function, `getEventsByCreationNumber`, by then. +- Included in the `Event` struct (which is what the events endpoints return) is a new field called `guid`. This is a more easily interpretable representation of an event identifier than the `key` field. See the [API changelog](https://github.com/aptos-labs/aptos-core/blob/main/api/doc/CHANGELOG.md) for an example of the new field. +- **[Deprecated]** The `key` field in the `Event` struct is now deprecated. In the next release it will be removed entirely. You must migrate to using the `guid` field by then. +- Removed NPM dependencies ed25519-hd-key and typescript-memoize. +- Added IIFE bundle that can be served from CDN. No NPM is required to use the SDK in browser environment. + +## 1.3.12 (2022-09-08) + +- Feature to rotate auth key for single signature account + +## 1.3.11 (2022-08-31) + +- Upgraded typescript version from 4.7.4 to 4.8.2, as well as linter package versions. +- **[Breaking Change]** ModuleBundle transaction support is removed. Instead, SDK users should use `AptosClient.publishPackage` to publish Move packages. +- Expose detailed API errors. +- Accept stringified values as transaction payload parameters. + +## 1.3.10 (2022-08-26) + +- Fix the bug in `waitForTransactionWithResult`. When API returns `404`, the function should continue waiting rather than returning early. The reason is that the txn might not be committed promptly. `waitForTransactionWithResult` should either timeout or get an error in such case. + +## 1.3.9 (2022-08-25) + +- **[Breaking Change]** Reimplemented the JSON transaction submission interfaces with BCS. This is a breaking change. `createSigningMessage` is removed. Before the changes, the transaction payloads take string aruguments. But now, Typescript payload arguments have to match the smart contract arugment types. e.g. `number` matches `u8`, `number | bigint` matches `u64` and `u128`, etc. +- **[Breaking Change]** `getTokenBalance` and `getTokenBalanceForAccount` have been renamed to `getToken` and `getTokenForAccount`, since they were never getting just the balance, but the full token. +- Added `CoinClient` to help working with coins. This contains common operations such as `transfer`, `checkBalance`, etc. +- Added `generateSignSubmitWaitForTransaction`, a function that provides a simple way to execute the full end to end transaction submission flow. You may also leverage `generateSignSubmit`, a helper that does the same but without waiting, instead returning teh transaction hash. +- Added `fromDerivePath` to `AptosAccount`. You can use this to create an `AptosAccount` (which is a local representation of an account) using a bip44 path and mnemonics. + +## 1.3.7 (2022-08-17) + +- Add a transaction builder that is able to serialize transaction arguments with remote ABIs. Remote ABIs are fetchable through REST APIs. With the remote ABI transaction builder, developers can build BCS transactions by only providing the native JS values. +- Make all functions that accept `BigInt` parameters accept `BigInt | number` instead. + +## 1.3.6 (2022-08-10) + +- Switch back to representing certain move types (MoveModuleId, MoveStructTag, ScriptFunctionId) as strings, for both requests and responses. This reverts the change made in 1.3.2. See [#2663](https://github.com/aptos-labs/aptos-core/pull/2663) for more. +- Represent certain fields with slightly different snake casing, e.g. `ed25519_signature` now instead of `ed_25519_signature`. +- Add generated types for healthcheck endpoint. +- If the given URL is missing `/v1`, the `AptosClient` constructor will add it for you. You can opt out of this behavior by setting `doNotFixNodeUrl` to true when calling the constructor. + +## 1.3.5 (2022-08-08) + +- Re-expose BCS and items from `transaction_builder/builder` from the root of the module. + +## 1.3.4 (2022-08-07) + +- Downscaled default value for `max_gas`. + +## 1.3.3 (2022-08-05) + +- Update the token clients to submit transactions through BCS interface. The new token client doesn't hex-code "name", "decription" and "uri" any more. String properties are passed and saved just as strings. +- Expose `buildTransactionPayload` from ABI transaction builder. In some scenarios, developers just want to get a TransactionPayload rather than a RawTransaction. + +## 1.3.2 (2022-08-04) + +This special entry does not conform to the format set out by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) as there are noteworthy breaking changes with necessary rationale. Future entries will follow this format. + +This release updates the SDK to work with V1 of the Aptos Node API. There are some key changes between V0 and V1 that you can read about in the [API changelog](https://github.com/aptos-labs/aptos-core/blob/main/api/doc/v1/CHANGELOG.md), refer to the notes for version 1.0.0. Accordingly, this SDK version represents breaking changes compared to 1.2.1. + +- The SDK now communicates by default with the `/v1` path of the API. It will not work correctly with the v0 API. If you provide a path yourself when instantiating a client, make sure you include `/v1`, e.g. http://fullnode.devnet.aptoslabs.com/v1. +- As of this release, the API, API spec, client generated from that spec, SDK wrapper, and examples are all tested together in CI. Previously it was possible for these to be out of sync, or in some cases, they would test against a different deployment entirely, such as devnet. Now we make the guarantee that all these pieces from the same commit work together. Notably this means exactly that; there is no guarantee that the latest version of the SDK will work with a particular Aptos network, such as devnet, except for a network built from the same commit as the SDK. +- The generated client within the SDK is generated using a different tool, [openapi-typescript-codegen](https://www.npmjs.com/package/openapi-typescript-codegen). Most of these changes are transparent to the user, as we continue to wrap the generated client, but some of the generated types are different, which we mention here. +- Token types are no longer exposed from the generated client (under `Types`) as they are no longer part of the API (indeed, they never truly were). Instead you can find these definitions exposed at `TokenTypes`. +- Some functions, such as for getting account resources and events, no longer accept resource types as concatenated strings. For example: + +```tsx +# Before: +const aptosCoin = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; +# After +const aptosCoin = const aptosCoin = { + address: "0x1", + module: "coin", + name: "CoinStore", + generic_type_params: ["0x1::aptos_coin::AptosCoin"], +}; +``` + +- Similarly, some endpoints no longer return this data as a string, but in a structured format, e.g. `MoveStructTag`. Remember to use something like `lodash.isEqual` to do equality checks with these structs. +- To help work with these different formats, functions for converting between them have been added to `utils`. +- A new function, `waitForTransactionWithResult`, has been added to help wait for a transaction and then get access to the response from the server once the function exits. + +For help with migration, we recommend you see the updated examples under `examples/`, they demonstrate how to deal with some of these changes, such as the more structured responses. We are also available to assist in the [Aptos Discord](https://discord.com/invite/aptoslabs). + +**Deprecation Notice**: On September 1st we will remove the v0 API from the running nodes. As a user of the TS SDK, the best way you can migrate prior to this is by upgrading to version 1.3.2 or higher of the SDK. We will repeatedly remind developers of this upcoming deprecation as we approach that date. + +## 1.3.1 (2022-08-04) + +See release notes for 1.3.2. + +## 1.3.0 (2022-08-03) + +See release notes for 1.3.2. + +## 1.2.1 (2022-07-23) + +**Note:** This entry and earlier do not conform to the format set out by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +### Features + +- Deprecate getTokenBalance api in SDK ([2ec554e](https://github.com/aptos-labs/aptos-core/commit/2ec554e6e40a81cee4e760f6f84ef7362c570240)) +- Memoize chain id in aptos client ([#1589](https://github.com/aptos-labs/aptos-core/issues/1589)) ([4a6453b](https://github.com/aptos-labs/aptos-core/commit/4a6453bf0e620247557854053b661446bff807a7)) +- **Multiagent:** Support multiagent transaction submission ([#1543](https://github.com/aptos-labs/aptos-core/issues/1543)) ([0f0c70e](https://github.com/aptos-labs/aptos-core/commit/0f0c70e8ed2fefa952f0c89b7edb78edc174cb49)) +- Support retrieving token balance for any account ([7f93c21](https://github.com/aptos-labs/aptos-core/commit/7f93c2100f8b8e848461a0b5a395bfb76ade8667)) + +### Bug Fixes + +- Get rid of "natual" calls ([#1678](https://github.com/aptos-labs/aptos-core/issues/1678)) ([54601f7](https://github.com/aptos-labs/aptos-core/commit/54601f79206ea0f8b8b1b0d6599d31832fc4d195)) + +## 1.2.0 (2022-06-28) + +### Features + +- Vector tests for transaction signing ([6210c10](https://github.com/aptos-labs/aptos-core/commit/6210c10d3192fd0417b35709545fae850099e4d4)) +- Add royalty support for NFT tokens ([93a2cd0](https://github.com/aptos-labs/aptos-core/commit/93a2cd0bfd644725ac524f419e94077e0b16343b)) +- Add transaction builder examples ([a710a50](https://github.com/aptos-labs/aptos-core/commit/a710a50e8177258d9c0766762b3c2959fc231259)) +- Support transaction simulation ([93073bf](https://github.com/aptos-labs/aptos-core/commit/93073bf1b508d00cfa1f8bb441ed57085fd08a82)) + +### Bug Fixes + +- Fix a typo, natual now becomes natural ([1b7d295](https://github.com/aptos-labs/aptos-core/commit/1b7d2957b79a5d2821ada0c5096cf43c412e0c2d)), closes [#1526](https://github.com/aptos-labs/aptos-core/issues/1526) +- Fix Javascript example ([5781fee](https://github.com/aptos-labs/aptos-core/commit/5781fee74b8f2b065e7f04c2f76952026860751d)), closes [#1405](https://github.com/aptos-labs/aptos-core/issues/1405) diff --git a/m1/JavaScript-client/CONTRIBUTING.md b/m1/JavaScript-client/CONTRIBUTING.md new file mode 100644 index 00000000..bb0a9040 --- /dev/null +++ b/m1/JavaScript-client/CONTRIBUTING.md @@ -0,0 +1,18 @@ +# Contribution Guidelines for Typescript SDK + +- Coding Styles + - File names must use Snake case. For example, `aptos_account.ts` . + - Class names must use Pascal case. For example, `class AuthenticationKey` . + - Function and method names must use Camel case. For example, `derivedAddress(): HexString` . + - Constants must use all caps (upper case) words separated by `_`. For example, `MAX_U8_NUMBER` . +- Comments + - Comments are required for new classes and functions. + - Comments should follow the TSDoc standard, [https://tsdoc.org/](https://tsdoc.org/). +- Lints and Formats + - ESlint (eslint) and Prettier (prettier) should be used for code checking and code formatting. Make sure to run `pnpm lint` and `pnpm fmt` after making changes to the code. +- Tests + - Unit tests are required for any non-trivial changes you make. + - The Jest testing framework is used in the repo and we recommend you use it. See Jest: [https://jestjs.io/](https://jestjs.io/). + - Make sure to run `pnpm test` after making changes. +- Commits + - Commit messages follow the [Angular convention](https://www.conventionalcommits.org/en/v1.0.0-beta.4/#summary). diff --git a/m1/JavaScript-client/README.md b/m1/JavaScript-client/README.md new file mode 100644 index 00000000..d5c674e3 --- /dev/null +++ b/m1/JavaScript-client/README.md @@ -0,0 +1,9 @@ +# SDK for Movement Node API + +## Quickstart + +The public SDK downloaded from [npmjs](https://www.npmjs.com/package/movement-sdk) is compatible with the [movement testnet](https://seed-node1.movementlabs.xyz). To start building, run below command in your project directory: + +```bash +pnpm add movement-sdk +``` diff --git a/m1/JavaScript-client/examples/README.md b/m1/JavaScript-client/examples/README.md new file mode 100644 index 00000000..bfd9ff25 --- /dev/null +++ b/m1/JavaScript-client/examples/README.md @@ -0,0 +1,5 @@ +**NOTE**: These examples are tested to work with the [latest SDK published to npmjs](https://www.npmjs.com/package/aptos), not the SDK in the parent directory. Accordingly, these examples should be run against devnet. This is how the examples work by default. + +If you'd like to learn more about how these examples work, please see the following tutorials: +- [Your first transaction](https://aptos.dev/tutorials/your-first-transaction-sdk) +- [Your first NFT](https://aptos.dev/tutorials/your-first-nft-sdk) diff --git a/m1/JavaScript-client/examples/javascript/index.js b/m1/JavaScript-client/examples/javascript/index.js new file mode 100644 index 00000000..1711417d --- /dev/null +++ b/m1/JavaScript-client/examples/javascript/index.js @@ -0,0 +1,82 @@ +require("dotenv").config(); + +const aptos = require("aptos"); + +const NODE_URL = process.env.APTOS_NODE_URL || "https://fullnode.devnet.aptoslabs.com"; +const FAUCET_URL = process.env.APTOS_FAUCET_URL || "https://faucet.devnet.aptoslabs.com"; + +const aptosCoin = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; + +(async () => { + const client = new aptos.AptosClient(NODE_URL); + const faucetClient = new aptos.FaucetClient(NODE_URL, FAUCET_URL, null); + const tokenClient = new aptos.TokenClient(client); + + const account1 = new aptos.AptosAccount(); + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoin); + console.log(`account1 coins: ${accountResource.data.coin.value}. Should be 100_000_000!`); + + const account2 = new aptos.AptosAccount(); + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + console.log(`account2 coins: ${accountResource.data.coin.value}. Should be 0!`); + + const payload = { + type: "entry_function_payload", + function: "0x1::coin::transfer", + type_arguments: ["0x1::aptos_coin::AptosCoin"], + arguments: [account2.address().hex(), 717], + }; + const txnRequest = await client.generateTransaction(account1.address(), payload); + const signedTxn = await client.signTransaction(account1, txnRequest); + const transactionRes = await client.submitTransaction(signedTxn); + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + console.log(`account2 coins: ${accountResource.data.coin.value}. Should be 717!`); + + const provider = new aptos.Provider(aptos.Network.DEVNET); + console.log("\n=== Checking if indexer devnet chainId same as fullnode chainId ==="); + const indexerLedgerInfo = await provider.getIndexerLedgerInfo(); + const fullNodeChainId = await provider.getChainId(); + + console.log(`\n fullnode chain id is: ${fullNodeChainId}, indexer chain id is: ${indexerLedgerInfo}`); + if (indexerLedgerInfo.ledger_infos[0].chain_id !== fullNodeChainId) { + console.log(`\n fullnode chain id and indexer chain id are not synced, skipping rest of tests`); + return; + } + + console.log("=== Creating account1's NFT Collection and Token ==="); + + const collectionName = "Alice's"; + const tokenName = "Alice's first token"; + + // Create the collection. + // :!:>section_4 + const txnHash1 = await tokenClient.createCollection( + account1, + collectionName, + "Alice's simple collection", + "https://alice.com", + ); // <:!:section_4 + await client.waitForTransaction(txnHash1, { checkSuccess: true }); + + // Create a token in that collection. + // :!:>section_5 + const txnHash2 = await tokenClient.createToken( + account1, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + ); // <:!:section_5 + await client.waitForTransaction(txnHash2, { checkSuccess: true }); + + const nfts = await provider.getAccountNFTs(account1.address().hex()); + console.log(`account1 current token ownership: ${nfts.current_token_ownerships[0].amount}. Should be 1`); +})(); diff --git a/m1/JavaScript-client/examples/javascript/package.json b/m1/JavaScript-client/examples/javascript/package.json new file mode 100644 index 00000000..615f91e6 --- /dev/null +++ b/m1/JavaScript-client/examples/javascript/package.json @@ -0,0 +1,16 @@ +{ + "name": "js-test", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "node index.js" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "aptos": "latest", + "dotenv": "16.0.2" + } +} diff --git a/m1/JavaScript-client/examples/javascript/pnpm-lock.yaml b/m1/JavaScript-client/examples/javascript/pnpm-lock.yaml new file mode 100644 index 00000000..c853ae47 --- /dev/null +++ b/m1/JavaScript-client/examples/javascript/pnpm-lock.yaml @@ -0,0 +1,104 @@ +lockfileVersion: '6.0' + +dependencies: + aptos: + specifier: latest + version: 1.7.2 + dotenv: + specifier: 16.0.2 + version: 16.0.2 + +packages: + + /@noble/hashes@1.1.3: + resolution: {integrity: sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A==} + dev: false + + /@scure/base@1.1.1: + resolution: {integrity: sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==} + dev: false + + /@scure/bip39@1.1.0: + resolution: {integrity: sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w==} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/base': 1.1.1 + dev: false + + /aptos@1.7.2: + resolution: {integrity: sha512-unM7bPbu3UGoVB/EhTvA+QDo8nqb6pDfqttsKwC7nYavQnl4t5dxCoFfIFcbijBtSOTfo4is5ldi4Uz4cY9ESA==} + engines: {node: '>=11.0.0'} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/bip39': 1.1.0 + axios: 0.27.2 + form-data: 4.0.0 + tweetnacl: 1.0.3 + transitivePeerDependencies: + - debug + dev: false + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + dependencies: + follow-redirects: 1.15.2 + form-data: 4.0.0 + transitivePeerDependencies: + - debug + dev: false + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /dotenv@16.0.2: + resolution: {integrity: sha512-JvpYKUmzQhYoIFgK2MOnF3bciIZoItIIoryihy0rIA+H4Jy0FmgyKYAHCTN98P5ybGSJcIFbh6QKeJdtZd1qhA==} + engines: {node: '>=12'} + dev: false + + /follow-redirects@1.15.2: + resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + dev: false + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /tweetnacl@1.0.3: + resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} + dev: false diff --git a/m1/JavaScript-client/examples/typescript-esm/index.ts b/m1/JavaScript-client/examples/typescript-esm/index.ts new file mode 100644 index 00000000..42d68089 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript-esm/index.ts @@ -0,0 +1,137 @@ +/* eslint-disable no-console */ +import { AptosClient, AptosAccount, FaucetClient, BCS, TxnBuilderTypes, Provider, Network, TokenClient } from "aptos"; +import assert from "assert"; + +const NODE_URL = process.env.APTOS_NODE_URL || "https://fullnode.devnet.aptoslabs.com"; +const FAUCET_URL = process.env.APTOS_FAUCET_URL || "https://faucet.devnet.aptoslabs.com"; + +export const aptosCoinStore = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; + +const { + AccountAddress, + TypeTagStruct, + EntryFunction, + StructTag, + TransactionPayloadEntryFunction, + RawTransaction, + ChainId, +} = TxnBuilderTypes; + +/** + * This code example demonstrates the process of moving test coins from one account to another. + */ +(async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); + const tokenClient = new TokenClient(client); + + // Generates key pair for a new account + const account1 = new AptosAccount(); + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r: any) => r.type === aptosCoinStore); + let balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 100_000_000); + console.log(`account1 coins: ${balance}. Should be 100_000_000!`); + + const account2 = new AptosAccount(); + // Creates the second account and fund the account with 0 AptosCoin + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r: any) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 0); + console.log(`account2 coins: ${balance}. Should be 0!`); + + const token = new TypeTagStruct(StructTag.fromString("0x1::aptos_coin::AptosCoin")); + + // TS SDK support 3 types of transaction payloads: `EntryFunction`, `Script` and `Module`. + // See https://aptos-labs.github.io/ts-sdk-doc/ for the details. + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + // Fully qualified module name, `AccountAddress::ModuleName` + "0x1::coin", + // Module function + "transfer", + // The coin type to transfer + [token], + // Arguments for function `transfer`: receiver account address and amount to transfer + [BCS.bcsToBytes(AccountAddress.fromHex(account2.address())), BCS.bcsSerializeUint64(717)], + ), + ); + + const [{ sequence_number: sequenceNumber }, chainId] = await Promise.all([ + client.getAccount(account1.address()), + client.getChainId(), + ]); + + // See class definiton here + // https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.RawTransaction.html#constructor. + const rawTxn = new RawTransaction( + // Transaction sender account address + AccountAddress.fromHex(account1.address()), + BigInt(sequenceNumber), + entryFunctionPayload, + // Max gas unit to spend + BigInt(10000), + // Gas price per unit + BigInt(100), + // Expiration timestamp. Transaction is discarded if it is not executed within 10 seconds from now. + BigInt(Math.floor(Date.now() / 1000) + 10), + new ChainId(chainId), + ); + + // Sign the raw transaction with account1's private key + const bcsTxn = AptosClient.generateBCSTransaction(account1, rawTxn); + + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r: any) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 717); + console.log(`account2 coins: ${balance}. Should be 717!`); + + const provider = new Provider(Network.DEVNET); + console.log("\n=== Checking if indexer devnet chainId same as fullnode chainId ==="); + const indexerLedgerInfo = await provider.getIndexerLedgerInfo(); + const fullNodeChainId = await provider.getChainId(); + + console.log(`\n fullnode chain id is: ${fullNodeChainId}, indexer chain id is: ${indexerLedgerInfo}`); + if (indexerLedgerInfo.ledger_infos[0].chain_id !== fullNodeChainId) { + console.log(`\n fullnode chain id and indexer chain id are not synced, skipping rest of tests`); + return; + } + + console.log("=== Creating account1's NFT Collection and Token ==="); + + const collectionName = "Alice's"; + const tokenName = "Alice's first token"; + + // Create the collection. + // :!:>section_4 + const txnHash1 = await tokenClient.createCollection( + account1, + collectionName, + "Alice's simple collection", + "https://alice.com", + ); // <:!:section_4 + await client.waitForTransaction(txnHash1, { checkSuccess: true }); + + // Create a token in that collection. + // :!:>section_5 + const txnHash2 = await tokenClient.createToken( + account1, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + ); // <:!:section_5 + await client.waitForTransaction(txnHash2, { checkSuccess: true }); + + const nfts = await provider.getAccountNFTs(account1.address().hex()); + console.log(`account1 current token ownership: ${nfts.current_token_ownerships[0].amount}. Should be 1`); +})(); diff --git a/m1/JavaScript-client/examples/typescript-esm/package.json b/m1/JavaScript-client/examples/typescript-esm/package.json new file mode 100644 index 00000000..d4a7b9bf --- /dev/null +++ b/m1/JavaScript-client/examples/typescript-esm/package.json @@ -0,0 +1,20 @@ +{ + "name": "ts-test-esm", + "version": "1.0.0", + "description": "", + "main": "./dist/index.js", + "type": "module", + "scripts": { + "build": "rm -rf dist/* && tsc -p .", + "test": "pnpm build && node ./dist/index.js" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "aptos": "latest" + }, + "devDependencies": { + "typescript": "4.8.2" + } +} diff --git a/m1/JavaScript-client/examples/typescript-esm/pnpm-lock.yaml b/m1/JavaScript-client/examples/typescript-esm/pnpm-lock.yaml new file mode 100644 index 00000000..f0c6b759 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript-esm/pnpm-lock.yaml @@ -0,0 +1,107 @@ +lockfileVersion: '6.0' + +dependencies: + aptos: + specifier: latest + version: 1.7.2 + +devDependencies: + typescript: + specifier: 4.8.2 + version: 4.8.2 + +packages: + + /@noble/hashes@1.1.3: + resolution: {integrity: sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A==} + dev: false + + /@scure/base@1.1.1: + resolution: {integrity: sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==} + dev: false + + /@scure/bip39@1.1.0: + resolution: {integrity: sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w==} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/base': 1.1.1 + dev: false + + /aptos@1.7.2: + resolution: {integrity: sha512-unM7bPbu3UGoVB/EhTvA+QDo8nqb6pDfqttsKwC7nYavQnl4t5dxCoFfIFcbijBtSOTfo4is5ldi4Uz4cY9ESA==} + engines: {node: '>=11.0.0'} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/bip39': 1.1.0 + axios: 0.27.2 + form-data: 4.0.0 + tweetnacl: 1.0.3 + transitivePeerDependencies: + - debug + dev: false + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + dependencies: + follow-redirects: 1.15.2 + form-data: 4.0.0 + transitivePeerDependencies: + - debug + dev: false + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /follow-redirects@1.15.2: + resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + dev: false + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /tweetnacl@1.0.3: + resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} + dev: false + + /typescript@4.8.2: + resolution: {integrity: sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==} + engines: {node: '>=4.2.0'} + hasBin: true + dev: true diff --git a/m1/JavaScript-client/examples/typescript-esm/tsconfig.json b/m1/JavaScript-client/examples/typescript-esm/tsconfig.json new file mode 100644 index 00000000..4acc21c3 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript-esm/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "allowJs": true, + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "module": "ESNext", + "moduleResolution": "node", + "noImplicitAny": true, + "outDir": "./dist", + "sourceMap": true, + "target": "es2020", + "pretty": true + }, + "include": [ + "*.ts" + ], + "paths": { + "@aptos/*": "../..", + } +} diff --git a/m1/JavaScript-client/examples/typescript/bcs_transaction.ts b/m1/JavaScript-client/examples/typescript/bcs_transaction.ts new file mode 100644 index 00000000..13f2b21f --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/bcs_transaction.ts @@ -0,0 +1,98 @@ +/* eslint-disable no-console */ + +import dotenv from "dotenv"; +dotenv.config(); + +import { AptosClient, AptosAccount, FaucetClient, BCS, TxnBuilderTypes } from "aptos"; +import { aptosCoinStore } from "./common"; +import assert from "assert"; + +const NODE_URL = process.env.APTOS_NODE_URL || "https://fullnode.devnet.aptoslabs.com"; +const FAUCET_URL = process.env.APTOS_FAUCET_URL || "https://faucet.devnet.aptoslabs.com"; + +const { + AccountAddress, + TypeTagStruct, + EntryFunction, + StructTag, + TransactionPayloadEntryFunction, + RawTransaction, + ChainId, +} = TxnBuilderTypes; + +/** + * This code example demonstrates the process of moving test coins from one account to another. + */ +(async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); + + // Generates key pair for a new account + const account1 = new AptosAccount(); + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoinStore); + let balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 100_000_000); + console.log(`account1 coins: ${balance}. Should be 100000000!`); + + const account2 = new AptosAccount(); + // Creates the second account and fund the account with 0 AptosCoin + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 0); + console.log(`account2 coins: ${balance}. Should be 0!`); + + const token = new TypeTagStruct(StructTag.fromString("0x1::aptos_coin::AptosCoin")); + + // TS SDK support 3 types of transaction payloads: `EntryFunction`, `Script` and `Module`. + // See https://aptos-labs.github.io/ts-sdk-doc/ for the details. + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + // Fully qualified module name, `AccountAddress::ModuleName` + "0x1::coin", + // Module function + "transfer", + // The coin type to transfer + [token], + // Arguments for function `transfer`: receiver account address and amount to transfer + [BCS.bcsToBytes(AccountAddress.fromHex(account2.address())), BCS.bcsSerializeUint64(717)], + ), + ); + + const [{ sequence_number: sequenceNumber }, chainId] = await Promise.all([ + client.getAccount(account1.address()), + client.getChainId(), + ]); + + // See class definiton here + // https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.RawTransaction.html#constructor. + const rawTxn = new RawTransaction( + // Transaction sender account address + AccountAddress.fromHex(account1.address()), + BigInt(sequenceNumber), + entryFunctionPayload, + // Max gas unit to spend + BigInt(2000), + // Gas price per unit + BigInt(100), + // Expiration timestamp. Transaction is discarded if it is not executed within 10 seconds from now. + BigInt(Math.floor(Date.now() / 1000) + 10), + new ChainId(chainId), + ); + + // Sign the raw transaction with account1's private key + const bcsTxn = AptosClient.generateBCSTransaction(account1, rawTxn); + + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 717); + console.log(`account2 coins: ${balance}. Should be 717!`); +})(); diff --git a/m1/JavaScript-client/examples/typescript/call_aptos_cli.ts b/m1/JavaScript-client/examples/typescript/call_aptos_cli.ts new file mode 100644 index 00000000..d50ee6e2 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/call_aptos_cli.ts @@ -0,0 +1,28 @@ +const ffi = require("ffi-napi"); + +const lib = ffi.Library("../../../../../target/release/libaptos", { + run_aptos_sync: ["char *", ["string"]], // run the aptos CLI synchronously + run_aptos_async: ["char *", ["string"]], // run the aptos CLI asynchronously + free_cstring: ["void", ["char *"]], // free the return pointer memory allocated by the aptos CLI +}); + +const args_run_local_testnet = ["aptos", "node", "run-local-testnet", "--with-faucet"]; +const args_aptos_info = ["aptos", "info"]; + +(async () => { + console.log("Running aptos CLI from Typescript"); + const aptos_info = lib.run_aptos_sync(args_aptos_info.join(" ")); + const run_local_testnet = lib.run_aptos_async(args_run_local_testnet.join(" ")); + try { + console.log(`Aptos Info: ${aptos_info.readCString()}`); + console.log(`Run Local Testnet: ${run_local_testnet.readCString()}`); + } catch (error) { + console.error(error); + } finally { + // free the string pointer memory allocated by the aptos CLI + lib.free_cstring(aptos_info); + lib.free_cstring(run_local_testnet); + } + + console.log("Finish aptos CLI"); +})(); diff --git a/m1/JavaScript-client/examples/typescript/common.ts b/m1/JavaScript-client/examples/typescript/common.ts new file mode 100644 index 00000000..e11898af --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/common.ts @@ -0,0 +1,9 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +//:!:>section_1 +export const NODE_URL = process.env.APTOS_NODE_URL || "https://fullnode.devnet.aptoslabs.com"; +export const FAUCET_URL = process.env.APTOS_FAUCET_URL || "https://faucet.devnet.aptoslabs.com"; +//<:!:section_1 + +export const aptosCoinStore = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; diff --git a/m1/JavaScript-client/examples/typescript/multisig_account.ts b/m1/JavaScript-client/examples/typescript/multisig_account.ts new file mode 100644 index 00000000..81518f97 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/multisig_account.ts @@ -0,0 +1,210 @@ +/* eslint-disable no-console */ + +import dotenv from "dotenv"; +dotenv.config(); + +import { AptosClient, AptosAccount, FaucetClient, BCS, TxnBuilderTypes } from "aptos"; +import { sha3_256 as sha3Hash } from "@noble/hashes/sha3"; +import { aptosCoinStore, FAUCET_URL, NODE_URL } from "./common"; +import assert from "assert"; + +const { AccountAddress, EntryFunction, MultiSig, MultiSigTransactionPayload, TransactionPayloadMultisig } = + TxnBuilderTypes; + +/** + * This code example demonstrates the new multisig account module and transaction execution flow. + */ +(async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); + + // Create and fund 3 accounts that will be the owners of the multisig account. + const owner1 = new AptosAccount(); + const owner2 = new AptosAccount(); + const owner3 = new AptosAccount(); + const owner4 = new AptosAccount(); + await faucetClient.fundAccount(owner1.address(), 100_000_000); + await faucetClient.fundAccount(owner2.address(), 100_000_000); + await faucetClient.fundAccount(owner3.address(), 100_000_000); + + // Step 1: Setup a 2-of-3 multisig account + // =========================================================================================== + const createMultisig = await client.generateTransaction(owner1.address(), { + function: "0x1::multisig_account::create_with_owners", + type_arguments: [], + arguments: [[owner2.address().hex(), owner3.address().hex()], 2, ["Shaka"], [BCS.bcsSerializeStr("Bruh")]], + }); + await client.generateSignSubmitWaitForTransaction(owner1, createMultisig.payload); + // Find the multisig account address. + let ownedMultisigAccounts = await client.getAccountResource( + owner1.address(), + "0x1::multisig_account::OwnedMultisigAccounts", + ); + const multisigAddress = (ownedMultisigAccounts?.data as any).multisig_accounts[0]; + assert((await getSignatureThreshold(client, multisigAddress)) == 2); + assert((await getNumberOfOwners(client, multisigAddress)) == 3); + + // Fund the multisig account for transfers. + await faucetClient.fundAccount(multisigAddress, 100_000_000); + + // Step 2: Create a multisig transaction to send 1_000_000 coins to an account. + // We'll be including the full payload to be stored on chain. + // =========================================================================================== + const recipient = new AptosAccount(); + const transferTxPayload = new MultiSigTransactionPayload( + EntryFunction.natural( + "0x1::aptos_account", + "transfer", + [], + [BCS.bcsToBytes(AccountAddress.fromHex(recipient.address())), BCS.bcsSerializeUint64(1_000_000)], + ), + ); + const multisigTxExecution = new TransactionPayloadMultisig( + new MultiSig(AccountAddress.fromHex(multisigAddress), transferTxPayload), + ); + const [simulationResp] = await client.simulateTransaction( + owner2, + await client.generateRawTransaction(owner2.address(), multisigTxExecution), + ); + assert(simulationResp.success); + + // Create the multisig tx on chain. + const createMultisigTx = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::create_transaction", + type_arguments: [], + arguments: [multisigAddress, BCS.bcsToBytes(transferTxPayload)], + }); + await client.generateSignSubmitWaitForTransaction(owner2, createMultisigTx.payload); + + // Owner 1 rejects but owner 3 approves. + await rejectAndApprove(client, owner1, owner3, multisigAddress, 1); + + // Owner 2 can now execute the transactions as it already has 2 approvals (from owners 2 and 3). + await client.generateSignSubmitWaitForTransaction(owner2, multisigTxExecution); + let accountResource = await client.getAccountResource(recipient.address(), aptosCoinStore); + let balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 1_000_000); + + // Step 3: Create another multisig transaction to send 1_000_000 coins but use payload hash instead. + // =========================================================================================== + const transferTxPayloadHash = sha3Hash.create(); + transferTxPayloadHash.update(BCS.bcsToBytes(transferTxPayload)); + const createMultisigTxWithHash = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::create_transaction_with_hash", + type_arguments: [], + arguments: [multisigAddress, transferTxPayloadHash.digest()], + }); + await client.generateSignSubmitWaitForTransaction(owner2, createMultisigTxWithHash.payload); + await rejectAndApprove(client, owner1, owner3, multisigAddress, 2); + + const multisigTxExecution2 = new TransactionPayloadMultisig( + new MultiSig(AccountAddress.fromHex(multisigAddress), transferTxPayload), + ); + await client.generateSignSubmitWaitForTransaction(owner2, multisigTxExecution2); + accountResource = await client.getAccountResource(recipient.address(), aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 2_000_000); + + // Step 4: Create 2 multisig transactions: one to add a new owner and another one to remove it. + // =========================================================================================== + const owner_4 = new AptosAccount(); + const addOwnerPayload = new MultiSigTransactionPayload( + EntryFunction.natural( + "0x1::multisig_account", + "add_owner", + [], + [BCS.bcsToBytes(AccountAddress.fromHex(owner_4.address()))], + ), + ); + const addOwnerTx = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::create_transaction", + type_arguments: [], + arguments: [multisigAddress, BCS.bcsToBytes(addOwnerPayload)], + }); + await client.generateSignSubmitWaitForTransaction(owner2, addOwnerTx.payload); + await rejectAndApprove(client, owner1, owner3, multisigAddress, 3); + await client.generateSignSubmitWaitForTransaction( + owner2, + new TransactionPayloadMultisig(new MultiSig(AccountAddress.fromHex(multisigAddress))), + ); + // The multisig account should now have 4 owners. + assert((await getNumberOfOwners(client, multisigAddress)) == 4); + + const removeOwnerPayload = new MultiSigTransactionPayload( + EntryFunction.natural( + "0x1::multisig_account", + "remove_owner", + [], + [BCS.bcsToBytes(AccountAddress.fromHex(owner_4.address()))], + ), + ); + const removeOwnerTx = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::create_transaction", + type_arguments: [], + arguments: [multisigAddress, BCS.bcsToBytes(removeOwnerPayload)], + }); + await client.generateSignSubmitWaitForTransaction(owner2, removeOwnerTx.payload); + await rejectAndApprove(client, owner1, owner3, multisigAddress, 4); + await client.generateSignSubmitWaitForTransaction( + owner2, + new TransactionPayloadMultisig(new MultiSig(AccountAddress.fromHex(multisigAddress))), + ); + // The multisig account should now have 3 owners. + assert((await getNumberOfOwners(client, multisigAddress)) == 3); + + // Step 5: Create a multisig transactions to change the signature threshold to 3-of-3. + // =========================================================================================== + const changeSigThresholdPayload = new MultiSigTransactionPayload( + EntryFunction.natural("0x1::multisig_account", "update_signatures_required", [], [BCS.bcsSerializeUint64(3)]), + ); + const changeSigThresholdTx = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::create_transaction", + type_arguments: [], + arguments: [multisigAddress, BCS.bcsToBytes(changeSigThresholdPayload)], + }); + await client.generateSignSubmitWaitForTransaction(owner2, changeSigThresholdTx.payload); + await rejectAndApprove(client, owner1, owner3, multisigAddress, 5); + await client.generateSignSubmitWaitForTransaction( + owner2, + new TransactionPayloadMultisig(new MultiSig(AccountAddress.fromHex(multisigAddress))), + ); + // The multisig account should now be 3-of-3. + assert((await getSignatureThreshold(client, multisigAddress)) == 3); +})(); + +const rejectAndApprove = async ( + client: AptosClient, + owner1: AptosAccount, + owner2: AptosAccount, + multisigAddress: string, + transactionId: number, +) => { + let rejectTx = await client.generateTransaction(owner1.address(), { + function: "0x1::multisig_account::reject_transaction", + type_arguments: [], + arguments: [multisigAddress, transactionId], + }); + await client.generateSignSubmitWaitForTransaction(owner1, rejectTx.payload); + let approveTx = await client.generateTransaction(owner2.address(), { + function: "0x1::multisig_account::approve_transaction", + type_arguments: [], + arguments: [multisigAddress, transactionId], + }); + await client.generateSignSubmitWaitForTransaction(owner2, approveTx.payload); +}; + +const getNumberOfOwners = async (client: AptosClient, multisigAddress: string): Promise => { + const multisigAccountResource = await client.getAccountResource( + multisigAddress, + "0x1::multisig_account::MultisigAccount", + ); + return Number((multisigAccountResource.data as any).owners.length); +}; + +const getSignatureThreshold = async (client: AptosClient, multisigAddress: string): Promise => { + const multisigAccountResource = await client.getAccountResource( + multisigAddress, + "0x1::multisig_account::MultisigAccount", + ); + return Number((multisigAccountResource.data as any).num_signatures_required); +}; diff --git a/m1/JavaScript-client/examples/typescript/multisig_transaction.ts b/m1/JavaScript-client/examples/typescript/multisig_transaction.ts new file mode 100644 index 00000000..17a59b93 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/multisig_transaction.ts @@ -0,0 +1,136 @@ +/* eslint-disable no-console */ + +import dotenv from "dotenv"; +dotenv.config(); + +import { AptosClient, AptosAccount, FaucetClient, BCS, TransactionBuilderMultiEd25519, TxnBuilderTypes } from "aptos"; +import { aptosCoinStore } from "./common"; +import assert from "assert"; + +const NODE_URL = process.env.APTOS_NODE_URL || "https://fullnode.devnet.aptoslabs.com"; +const FAUCET_URL = process.env.APTOS_FAUCET_URL || "https://faucet.devnet.aptoslabs.com"; + +/** + * This code example demonstrates the process of moving test coins from one multisig + * account to a single signature account. + */ +(async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); + + // Genereate 3 key pairs and account instances + const account1 = new AptosAccount(); + const account2 = new AptosAccount(); + const account3 = new AptosAccount(); + + // Create a 2 out of 3 MultiEd25519PublicKey. '2 out of 3' means for a multisig transaction + // to be executed, at least 2 accounts must have signed the transaction. + // See https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.MultiEd25519PublicKey.html#constructor + const multiSigPublicKey = new TxnBuilderTypes.MultiEd25519PublicKey( + [ + new TxnBuilderTypes.Ed25519PublicKey(account1.signingKey.publicKey), + new TxnBuilderTypes.Ed25519PublicKey(account2.signingKey.publicKey), + new TxnBuilderTypes.Ed25519PublicKey(account3.signingKey.publicKey), + ], + // Threshold + 2, + ); + + // Each Aptos account stores an auth key. Initial account address can be derived from auth key. + // See https://aptos.dev/concepts/accounts for more details. + const authKey = TxnBuilderTypes.AuthenticationKey.fromMultiEd25519PublicKey(multiSigPublicKey); + + // Derive the multisig account address and fund the address with 5000 AptosCoin. + const mutisigAccountAddress = authKey.derivedAddress(); + await faucetClient.fundAccount(mutisigAccountAddress, 100_000_000); + + let resources = await client.getAccountResources(mutisigAccountAddress); + let accountResource = resources.find((r) => r.type === aptosCoinStore); + let balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 100_000_000); + console.log(`multisig account coins: ${balance}. Should be 100000000!`); + + const account4 = new AptosAccount(); + // Creates a receiver account and fund the account with 0 AptosCoin + await faucetClient.fundAccount(account4.address(), 0); + resources = await client.getAccountResources(account4.address()); + accountResource = resources.find((r) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 0); + console.log(`account4 coins: ${balance}. Should be 0!`); + + const token = new TxnBuilderTypes.TypeTagStruct(TxnBuilderTypes.StructTag.fromString("0x1::aptos_coin::AptosCoin")); + + // TS SDK support 3 types of transaction payloads: `EntryFunction`, `Script` and `Module`. + // See https://aptos-labs.github.io/ts-sdk-doc/ for the details. + const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + // Fully qualified module name, `AccountAddress::ModuleName` + "0x1::coin", + // Module function + "transfer", + // The coin type to transfer + [token], + // Arguments for function `transfer`: receiver account address and amount to transfer + [BCS.bcsToBytes(TxnBuilderTypes.AccountAddress.fromHex(account4.address())), BCS.bcsSerializeUint64(123)], + ), + ); + + const [{ sequence_number: sequenceNumber }, chainId] = await Promise.all([ + client.getAccount(mutisigAccountAddress), + client.getChainId(), + ]); + + // See class definiton here + // https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.RawTransaction.html#constructor. + const rawTxn = new TxnBuilderTypes.RawTransaction( + // Transaction sender account address + TxnBuilderTypes.AccountAddress.fromHex(mutisigAccountAddress), + BigInt(sequenceNumber), + entryFunctionPayload, + // Max gas unit to spend + BigInt(10000), + // Gas price per unit + BigInt(100), + // Expiration timestamp. Transaction is discarded if it is not executed within 10 seconds from now. + BigInt(Math.floor(Date.now() / 1000) + 10), + new TxnBuilderTypes.ChainId(chainId), + ); + + // account1 and account3 sign the transaction + const txnBuilder = new TransactionBuilderMultiEd25519((signingMessage: TxnBuilderTypes.SigningMessage) => { + const sigHexStr1 = account1.signBuffer(signingMessage); + const sigHexStr3 = account3.signBuffer(signingMessage); + + // Bitmap masks which public key has signed transaction. + // See https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.MultiEd25519Signature.html#createBitmap + const bitmap = TxnBuilderTypes.MultiEd25519Signature.createBitmap([0, 2]); + + // See https://aptos-labs.github.io/ts-sdk-doc/classes/TxnBuilderTypes.MultiEd25519Signature.html#constructor + const muliEd25519Sig = new TxnBuilderTypes.MultiEd25519Signature( + [ + new TxnBuilderTypes.Ed25519Signature(sigHexStr1.toUint8Array()), + new TxnBuilderTypes.Ed25519Signature(sigHexStr3.toUint8Array()), + ], + bitmap, + ); + + return muliEd25519Sig; + }, multiSigPublicKey); + + const bcsTxn = txnBuilder.sign(rawTxn); + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(mutisigAccountAddress); + accountResource = resources.find((r) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + console.log(`multisig account coins: ${balance}.`); + + resources = await client.getAccountResources(account4.address()); + accountResource = resources.find((r) => r.type === aptosCoinStore); + balance = parseInt((accountResource?.data as any).coin.value); + assert(balance === 123); + console.log(`account4 coins: ${balance}. Should be 123!`); +})(); diff --git a/m1/JavaScript-client/examples/typescript/package.json b/m1/JavaScript-client/examples/typescript/package.json new file mode 100644 index 00000000..46e700c5 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/package.json @@ -0,0 +1,32 @@ +{ + "name": "ts-test", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "bcs_transaction": "ts-node bcs_transaction.ts", + "multisig_account": "ts-node multisig_account.ts", + "multisig_transaction": "ts-node multisig_transaction.ts", + "simple_nft": "ts-node simple_nft.ts", + "transfer_coin": "ts-node transfer_coin.ts", + "test": "run-s bcs_transaction multisig_transaction simple_nft transfer_coin", + "your_coin": "ts-node your_coin.ts", + "call_aptos_cli": "ts-node call_aptos_cli.ts" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "aptos": "latest", + "dotenv": "16.0.1", + "ffi-napi": "^4.0.3", + "ref-array-di": "^1.2.2" + }, + "devDependencies": { + "@types/ffi-napi": "^4.0.7", + "@types/node": "18.6.2", + "npm-run-all": "4.1.5", + "ts-node": "10.9.1", + "typescript": "4.8.2" + } +} diff --git a/m1/JavaScript-client/examples/typescript/pnpm-lock.yaml b/m1/JavaScript-client/examples/typescript/pnpm-lock.yaml new file mode 100644 index 00000000..71fba9cf --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/pnpm-lock.yaml @@ -0,0 +1,1076 @@ +lockfileVersion: '6.0' + +dependencies: + aptos: + specifier: latest + version: 1.8.4 + dotenv: + specifier: 16.0.1 + version: 16.0.1 + ffi-napi: + specifier: ^4.0.3 + version: 4.0.3 + ref-array-di: + specifier: ^1.2.2 + version: 1.2.2 + +devDependencies: + '@types/ffi-napi': + specifier: ^4.0.7 + version: 4.0.7 + '@types/node': + specifier: 18.6.2 + version: 18.6.2 + npm-run-all: + specifier: 4.1.5 + version: 4.1.5 + ts-node: + specifier: 10.9.1 + version: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + typescript: + specifier: 4.8.2 + version: 4.8.2 + +packages: + + /@cspotcode/source-map-support@0.8.1: + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + dev: true + + /@jridgewell/resolve-uri@3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/sourcemap-codec@1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + dev: true + + /@jridgewell/trace-mapping@0.3.9: + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@noble/hashes@1.1.3: + resolution: {integrity: sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A==} + dev: false + + /@scure/base@1.1.1: + resolution: {integrity: sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==} + dev: false + + /@scure/bip39@1.1.0: + resolution: {integrity: sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w==} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/base': 1.1.1 + dev: false + + /@tsconfig/node10@1.0.9: + resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} + dev: true + + /@tsconfig/node12@1.0.11: + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + dev: true + + /@tsconfig/node14@1.0.3: + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + dev: true + + /@tsconfig/node16@1.0.3: + resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} + dev: true + + /@types/ffi-napi@4.0.7: + resolution: {integrity: sha512-2CvLfgxCUUSj7qVab6/uFLyVpgVd2gEV4H/TQEHHn6kZTV8iTesz9uo0bckhwzsh71atutOv8P3JmvRX2ZvpZg==} + dependencies: + '@types/node': 18.6.2 + '@types/ref-napi': 3.0.7 + '@types/ref-struct-di': 1.1.9 + dev: true + + /@types/node@18.6.2: + resolution: {integrity: sha512-KcfkBq9H4PI6Vpu5B/KoPeuVDAbmi+2mDBqGPGUgoL7yXQtcWGu2vJWmmRkneWK3Rh0nIAX192Aa87AqKHYChQ==} + dev: true + + /@types/ref-napi@3.0.7: + resolution: {integrity: sha512-CzPwr36VkezSpaJGdQX/UrczMSDsDgsWQQFEfQkS799Ft7n/s183a53lsql7RwVq+Ik4yLEgI84pRnLC0XXRlA==} + dependencies: + '@types/node': 18.6.2 + dev: true + + /@types/ref-struct-di@1.1.9: + resolution: {integrity: sha512-B1FsB1BhG1VLx0+IqBaAPXEPH0wCOb+Glaaw/i+nRUwDKFtSqWOziGnTRw05RyrBbrDsMiM0tVWmaujrs016Sw==} + dependencies: + '@types/ref-napi': 3.0.7 + dev: true + + /acorn-walk@8.2.0: + resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} + engines: {node: '>=0.4.0'} + dev: true + + /acorn@8.8.2: + resolution: {integrity: sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /ansi-styles@3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + dev: true + + /aptos@1.8.4: + resolution: {integrity: sha512-LWasWcz8+SMj4nCGQzB8kC0P/b2PRraUSjIQmeQH6jJ4O2WqS4MASzQZdk3vkG+i5O2dgLRgDK2QUZaxHqfydQ==} + engines: {node: '>=11.0.0'} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/bip39': 1.1.0 + axios: 0.27.2 + form-data: 4.0.0 + tweetnacl: 1.0.3 + transitivePeerDependencies: + - debug + dev: false + + /arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + dev: true + + /array-index@1.0.0: + resolution: {integrity: sha512-jesyNbBkLQgGZMSwA1FanaFjalb1mZUGxGeUEkSDidzgrbjBGhvizJkaItdhkt8eIHFOJC7nDsrXk+BaehTdRw==} + dependencies: + debug: 2.6.9 + es6-symbol: 3.1.3 + transitivePeerDependencies: + - supports-color + dev: false + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + dev: false + + /available-typed-arrays@1.0.5: + resolution: {integrity: sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==} + engines: {node: '>= 0.4'} + dev: true + + /axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + dependencies: + follow-redirects: 1.15.2 + form-data: 4.0.0 + transitivePeerDependencies: + - debug + dev: false + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /call-bind@1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.2.0 + dev: true + + /chalk@2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /color-convert@1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + dev: true + + /color-name@1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + dev: true + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + dev: false + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + dev: true + + /create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + dev: true + + /cross-spawn@6.0.5: + resolution: {integrity: sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==} + engines: {node: '>=4.8'} + dependencies: + nice-try: 1.0.5 + path-key: 2.0.1 + semver: 5.7.1 + shebang-command: 1.2.0 + which: 1.3.1 + dev: true + + /d@1.0.1: + resolution: {integrity: sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==} + dependencies: + es5-ext: 0.10.62 + type: 1.2.0 + dev: false + + /debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: false + + /debug@3.2.7: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + dev: false + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: false + + /define-properties@1.2.0: + resolution: {integrity: sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==} + engines: {node: '>= 0.4'} + dependencies: + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 + dev: true + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + dev: false + + /diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + dev: true + + /dotenv@16.0.1: + resolution: {integrity: sha512-1K6hR6wtk2FviQ4kEiSjFiH5rpzEVi8WW0x96aztHVMhEspNpc4DVOUTEHtEva5VThQ8IaBX1Pe4gSzpVVUsKQ==} + engines: {node: '>=12'} + dev: false + + /error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + dependencies: + is-arrayish: 0.2.1 + dev: true + + /es-abstract@1.21.1: + resolution: {integrity: sha512-QudMsPOz86xYz/1dG1OuGBKOELjCh99IIWHLzy5znUB6j8xG2yMA7bfTV86VSqKF+Y/H08vQPR+9jyXpuC6hfg==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + es-set-tostringtag: 2.0.1 + es-to-primitive: 1.2.1 + function-bind: 1.1.1 + function.prototype.name: 1.1.5 + get-intrinsic: 1.2.0 + get-symbol-description: 1.0.0 + globalthis: 1.0.3 + gopd: 1.0.1 + has: 1.0.3 + has-property-descriptors: 1.0.0 + has-proto: 1.0.1 + has-symbols: 1.0.3 + internal-slot: 1.0.5 + is-array-buffer: 3.0.2 + is-callable: 1.2.7 + is-negative-zero: 2.0.2 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + is-string: 1.0.7 + is-typed-array: 1.1.10 + is-weakref: 1.0.2 + object-inspect: 1.12.3 + object-keys: 1.1.1 + object.assign: 4.1.4 + regexp.prototype.flags: 1.4.3 + safe-regex-test: 1.0.0 + string.prototype.trimend: 1.0.6 + string.prototype.trimstart: 1.0.6 + typed-array-length: 1.0.4 + unbox-primitive: 1.0.2 + which-typed-array: 1.1.9 + dev: true + + /es-set-tostringtag@2.0.1: + resolution: {integrity: sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.2.0 + has: 1.0.3 + has-tostringtag: 1.0.0 + dev: true + + /es-to-primitive@1.2.1: + resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} + engines: {node: '>= 0.4'} + dependencies: + is-callable: 1.2.7 + is-date-object: 1.0.5 + is-symbol: 1.0.4 + dev: true + + /es5-ext@0.10.62: + resolution: {integrity: sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA==} + engines: {node: '>=0.10'} + requiresBuild: true + dependencies: + es6-iterator: 2.0.3 + es6-symbol: 3.1.3 + next-tick: 1.1.0 + dev: false + + /es6-iterator@2.0.3: + resolution: {integrity: sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==} + dependencies: + d: 1.0.1 + es5-ext: 0.10.62 + es6-symbol: 3.1.3 + dev: false + + /es6-symbol@3.1.3: + resolution: {integrity: sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==} + dependencies: + d: 1.0.1 + ext: 1.7.0 + dev: false + + /escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + dev: true + + /ext@1.7.0: + resolution: {integrity: sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==} + dependencies: + type: 2.7.2 + dev: false + + /ffi-napi@4.0.3: + resolution: {integrity: sha512-PMdLCIvDY9mS32RxZ0XGb95sonPRal8aqRhLbeEtWKZTe2A87qRFG9HjOhvG8EX2UmQw5XNRMIOT+1MYlWmdeg==} + engines: {node: '>=10'} + requiresBuild: true + dependencies: + debug: 4.3.4 + get-uv-event-loop-napi-h: 1.0.6 + node-addon-api: 3.2.1 + node-gyp-build: 4.6.0 + ref-napi: 3.0.3 + ref-struct-di: 1.1.1 + transitivePeerDependencies: + - supports-color + dev: false + + /follow-redirects@1.15.2: + resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + dev: false + + /for-each@0.3.3: + resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} + dependencies: + is-callable: 1.2.7 + dev: true + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /function-bind@1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + dev: true + + /function.prototype.name@1.1.5: + resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + es-abstract: 1.21.1 + functions-have-names: 1.2.3 + dev: true + + /functions-have-names@1.2.3: + resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} + dev: true + + /get-intrinsic@1.2.0: + resolution: {integrity: sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==} + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + dev: true + + /get-symbol-description@1.0.0: + resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.0 + dev: true + + /get-symbol-from-current-process-h@1.0.2: + resolution: {integrity: sha512-syloC6fsCt62ELLrr1VKBM1ggOpMdetX9hTrdW77UQdcApPHLmf7CI7OKcN1c9kYuNxKcDe4iJ4FY9sX3aw2xw==} + dev: false + + /get-uv-event-loop-napi-h@1.0.6: + resolution: {integrity: sha512-t5c9VNR84nRoF+eLiz6wFrEp1SE2Acg0wS+Ysa2zF0eROes+LzOfuTaVHxGy8AbS8rq7FHEJzjnCZo1BupwdJg==} + dependencies: + get-symbol-from-current-process-h: 1.0.2 + dev: false + + /globalthis@1.0.3: + resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} + engines: {node: '>= 0.4'} + dependencies: + define-properties: 1.2.0 + dev: true + + /gopd@1.0.1: + resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} + dependencies: + get-intrinsic: 1.2.0 + dev: true + + /graceful-fs@4.2.10: + resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + dev: true + + /has-bigints@1.0.2: + resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} + dev: true + + /has-flag@3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + dev: true + + /has-property-descriptors@1.0.0: + resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} + dependencies: + get-intrinsic: 1.2.0 + dev: true + + /has-proto@1.0.1: + resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} + engines: {node: '>= 0.4'} + dev: true + + /has-symbols@1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + dev: true + + /has-tostringtag@1.0.0: + resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /has@1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + dev: true + + /hosted-git-info@2.8.9: + resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} + dev: true + + /internal-slot@1.0.5: + resolution: {integrity: sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.2.0 + has: 1.0.3 + side-channel: 1.0.4 + dev: true + + /is-array-buffer@3.0.2: + resolution: {integrity: sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.0 + is-typed-array: 1.1.10 + dev: true + + /is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + dev: true + + /is-bigint@1.0.4: + resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} + dependencies: + has-bigints: 1.0.2 + dev: true + + /is-boolean-object@1.1.2: + resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + dev: true + + /is-core-module@2.11.0: + resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} + dependencies: + has: 1.0.3 + dev: true + + /is-date-object@1.0.5: + resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-negative-zero@2.0.2: + resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} + engines: {node: '>= 0.4'} + dev: true + + /is-number-object@1.0.7: + resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-regex@1.1.4: + resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-shared-array-buffer@1.0.2: + resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} + dependencies: + call-bind: 1.0.2 + dev: true + + /is-string@1.0.7: + resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-symbol@1.0.4: + resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /is-typed-array@1.1.10: + resolution: {integrity: sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + dev: true + + /is-weakref@1.0.2: + resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} + dependencies: + call-bind: 1.0.2 + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /json-parse-better-errors@1.0.2: + resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} + dev: true + + /load-json-file@4.0.0: + resolution: {integrity: sha512-Kx8hMakjX03tiGTLAIdJ+lL0htKnXjEZN6hk/tozf/WOuYGdZBJrZ+rCJRbVCugsjB3jMLn9746NsQIf5VjBMw==} + engines: {node: '>=4'} + dependencies: + graceful-fs: 4.2.10 + parse-json: 4.0.0 + pify: 3.0.0 + strip-bom: 3.0.0 + dev: true + + /make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + dev: true + + /memorystream@0.3.1: + resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==} + engines: {node: '>= 0.10.0'} + dev: true + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + dev: false + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: false + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + dev: false + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: false + + /ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + dev: false + + /next-tick@1.1.0: + resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==} + dev: false + + /nice-try@1.0.5: + resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} + dev: true + + /node-addon-api@3.2.1: + resolution: {integrity: sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==} + dev: false + + /node-gyp-build@4.6.0: + resolution: {integrity: sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ==} + hasBin: true + dev: false + + /normalize-package-data@2.5.0: + resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} + dependencies: + hosted-git-info: 2.8.9 + resolve: 1.22.1 + semver: 5.7.1 + validate-npm-package-license: 3.0.4 + dev: true + + /npm-run-all@4.1.5: + resolution: {integrity: sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==} + engines: {node: '>= 4'} + hasBin: true + dependencies: + ansi-styles: 3.2.1 + chalk: 2.4.2 + cross-spawn: 6.0.5 + memorystream: 0.3.1 + minimatch: 3.1.2 + pidtree: 0.3.1 + read-pkg: 3.0.0 + shell-quote: 1.8.0 + string.prototype.padend: 3.1.4 + dev: true + + /object-inspect@1.12.3: + resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==} + dev: true + + /object-keys@1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + dev: true + + /object.assign@4.1.4: + resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + has-symbols: 1.0.3 + object-keys: 1.1.1 + dev: true + + /parse-json@4.0.0: + resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} + engines: {node: '>=4'} + dependencies: + error-ex: 1.3.2 + json-parse-better-errors: 1.0.2 + dev: true + + /path-key@2.0.1: + resolution: {integrity: sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==} + engines: {node: '>=4'} + dev: true + + /path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + dev: true + + /path-type@3.0.0: + resolution: {integrity: sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==} + engines: {node: '>=4'} + dependencies: + pify: 3.0.0 + dev: true + + /pidtree@0.3.1: + resolution: {integrity: sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==} + engines: {node: '>=0.10'} + hasBin: true + dev: true + + /pify@3.0.0: + resolution: {integrity: sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==} + engines: {node: '>=4'} + dev: true + + /read-pkg@3.0.0: + resolution: {integrity: sha512-BLq/cCO9two+lBgiTYNqD6GdtK8s4NpaWrl6/rCO9w0TUS8oJl7cmToOZfRYllKTISY6nt1U7jQ53brmKqY6BA==} + engines: {node: '>=4'} + dependencies: + load-json-file: 4.0.0 + normalize-package-data: 2.5.0 + path-type: 3.0.0 + dev: true + + /ref-array-di@1.2.2: + resolution: {integrity: sha512-jhCmhqWa7kvCVrWhR/d7RemkppqPUdxEil1CtTtm7FkZV8LcHHCK3Or9GinUiFP5WY3k0djUkMvhBhx49Jb2iA==} + dependencies: + array-index: 1.0.0 + debug: 3.2.7 + transitivePeerDependencies: + - supports-color + dev: false + + /ref-napi@3.0.3: + resolution: {integrity: sha512-LiMq/XDGcgodTYOMppikEtJelWsKQERbLQsYm0IOOnzhwE9xYZC7x8txNnFC9wJNOkPferQI4vD4ZkC0mDyrOA==} + engines: {node: '>= 10.0'} + requiresBuild: true + dependencies: + debug: 4.3.4 + get-symbol-from-current-process-h: 1.0.2 + node-addon-api: 3.2.1 + node-gyp-build: 4.6.0 + transitivePeerDependencies: + - supports-color + dev: false + + /ref-struct-di@1.1.1: + resolution: {integrity: sha512-2Xyn/0Qgz89VT+++WP0sTosdm9oeowLP23wRJYhG4BFdMUrLj3jhwHZNEytYNYgtPKLNTP3KJX4HEgBvM1/Y2g==} + dependencies: + debug: 3.2.7 + transitivePeerDependencies: + - supports-color + dev: false + + /regexp.prototype.flags@1.4.3: + resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + functions-have-names: 1.2.3 + dev: true + + /resolve@1.22.1: + resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} + hasBin: true + dependencies: + is-core-module: 2.11.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: true + + /safe-regex-test@1.0.0: + resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.0 + is-regex: 1.1.4 + dev: true + + /semver@5.7.1: + resolution: {integrity: sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==} + hasBin: true + dev: true + + /shebang-command@1.2.0: + resolution: {integrity: sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==} + engines: {node: '>=0.10.0'} + dependencies: + shebang-regex: 1.0.0 + dev: true + + /shebang-regex@1.0.0: + resolution: {integrity: sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==} + engines: {node: '>=0.10.0'} + dev: true + + /shell-quote@1.8.0: + resolution: {integrity: sha512-QHsz8GgQIGKlRi24yFc6a6lN69Idnx634w49ay6+jA5yFh7a1UY+4Rp6HPx/L/1zcEDPEij8cIsiqR6bQsE5VQ==} + dev: true + + /side-channel@1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.0 + object-inspect: 1.12.3 + dev: true + + /spdx-correct@3.2.0: + resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} + dependencies: + spdx-expression-parse: 3.0.1 + spdx-license-ids: 3.0.13 + dev: true + + /spdx-exceptions@2.3.0: + resolution: {integrity: sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==} + dev: true + + /spdx-expression-parse@3.0.1: + resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} + dependencies: + spdx-exceptions: 2.3.0 + spdx-license-ids: 3.0.13 + dev: true + + /spdx-license-ids@3.0.13: + resolution: {integrity: sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w==} + dev: true + + /string.prototype.padend@3.1.4: + resolution: {integrity: sha512-67otBXoksdjsnXXRUq+KMVTdlVRZ2af422Y0aTyTjVaoQkGr3mxl2Bc5emi7dOQ3OGVVQQskmLEWwFXwommpNw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + es-abstract: 1.21.1 + dev: true + + /string.prototype.trimend@1.0.6: + resolution: {integrity: sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + es-abstract: 1.21.1 + dev: true + + /string.prototype.trimstart@1.0.6: + resolution: {integrity: sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.2.0 + es-abstract: 1.21.1 + dev: true + + /strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + dev: true + + /supports-color@5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + dev: true + + /supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + dev: true + + /ts-node@10.9.1(@types/node@18.6.2)(typescript@4.8.2): + resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.9 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.3 + '@types/node': 18.6.2 + acorn: 8.8.2 + acorn-walk: 8.2.0 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 4.8.2 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + dev: true + + /tweetnacl@1.0.3: + resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} + dev: false + + /type@1.2.0: + resolution: {integrity: sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==} + dev: false + + /type@2.7.2: + resolution: {integrity: sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw==} + dev: false + + /typed-array-length@1.0.4: + resolution: {integrity: sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==} + dependencies: + call-bind: 1.0.2 + for-each: 0.3.3 + is-typed-array: 1.1.10 + dev: true + + /typescript@4.8.2: + resolution: {integrity: sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==} + engines: {node: '>=4.2.0'} + hasBin: true + dev: true + + /unbox-primitive@1.0.2: + resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} + dependencies: + call-bind: 1.0.2 + has-bigints: 1.0.2 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 + dev: true + + /v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + dev: true + + /validate-npm-package-license@3.0.4: + resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} + dependencies: + spdx-correct: 3.2.0 + spdx-expression-parse: 3.0.1 + dev: true + + /which-boxed-primitive@1.0.2: + resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} + dependencies: + is-bigint: 1.0.4 + is-boolean-object: 1.1.2 + is-number-object: 1.0.7 + is-string: 1.0.7 + is-symbol: 1.0.4 + dev: true + + /which-typed-array@1.1.9: + resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.2 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + is-typed-array: 1.1.10 + dev: true + + /which@1.3.1: + resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + dev: true diff --git a/m1/JavaScript-client/examples/typescript/simple_nft.ts b/m1/JavaScript-client/examples/typescript/simple_nft.ts new file mode 100644 index 00000000..1ef57629 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/simple_nft.ts @@ -0,0 +1,188 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable no-console */ + +import dotenv from "dotenv"; +dotenv.config(); + +import { AptosClient, AptosAccount, FaucetClient, TokenClient, CoinClient, Network, Provider } from "aptos"; +import { NODE_URL, FAUCET_URL } from "./common"; + +(async () => { + // Create API and faucet clients. + // :!:>section_1a + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); // <:!:section_1a + + // Create client for working with the token module. + // :!:>section_1b + const tokenClient = new TokenClient(client); // <:!:section_1b + + // Create a coin client for checking account balances. + const coinClient = new CoinClient(client); + + // Create accounts. + // :!:>section_2 + const alice = new AptosAccount(); + const bob = new AptosAccount(); // <:!:section_2 + + console.log("=== network ==="); + console.log(NODE_URL); + + // Print out account addresses. + console.log("=== Addresses ==="); + console.log(`Alice: ${alice.address()}`); + console.log(`Bob: ${bob.address()}`); + console.log(""); + + // Fund accounts. + // :!:>section_3 + await faucetClient.fundAccount(alice.address(), 100_000_000); + await faucetClient.fundAccount(bob.address(), 100_000_000); // <:!:section_3 + + console.log("=== Initial Coin Balances ==="); + console.log(`Alice: ${await coinClient.checkBalance(alice)}`); + console.log(`Bob: ${await coinClient.checkBalance(bob)}`); + console.log(""); + + console.log("=== Creating Collection and Token ==="); + + const collectionName = "Alice's"; + const tokenName = "Alice's first token"; + const tokenPropertyVersion = 0; + + const tokenId = { + token_data_id: { + creator: alice.address().hex(), + collection: collectionName, + name: tokenName, + }, + property_version: `${tokenPropertyVersion}`, + }; + + // Create the collection. + // :!:>section_4 + const txnHash1 = await tokenClient.createCollection( + alice, + collectionName, + "Alice's simple collection", + "https://alice.com", + ); // <:!:section_4 + await client.waitForTransaction(txnHash1, { checkSuccess: true }); + + // Create a token in that collection. + // :!:>section_5 + const txnHash2 = await tokenClient.createToken( + alice, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + ); // <:!:section_5 + await client.waitForTransaction(txnHash2, { checkSuccess: true }); + + // Print the collection data. + // :!:>section_6 + const collectionData = await tokenClient.getCollectionData(alice.address(), collectionName); + console.log(`Alice's collection: ${JSON.stringify(collectionData, null, 4)}`); // <:!:section_6 + + // Get the token balance. + // :!:>section_7 + const aliceBalance1 = await tokenClient.getToken( + alice.address(), + collectionName, + tokenName, + `${tokenPropertyVersion}`, + ); + console.log(`Alice's token balance: ${aliceBalance1["amount"]}`); // <:!:section_7 + + // Get the token data. + // :!:>section_8 + const tokenData = await tokenClient.getTokenData(alice.address(), collectionName, tokenName); + console.log(`Alice's token data: ${JSON.stringify(tokenData, null, 4)}`); // <:!:section_8 + + // Alice offers one token to Bob. + console.log("\n=== Transferring the token to Bob ==="); + // :!:>section_9 + const txnHash3 = await tokenClient.offerToken( + alice, + bob.address(), + alice.address(), + collectionName, + tokenName, + 1, + tokenPropertyVersion, + ); // <:!:section_9 + await client.waitForTransaction(txnHash3, { checkSuccess: true }); + + // Bob claims the token Alice offered him. + // :!:>section_10 + const txnHash4 = await tokenClient.claimToken( + bob, + alice.address(), + alice.address(), + collectionName, + tokenName, + tokenPropertyVersion, + ); // <:!:section_10 + await client.waitForTransaction(txnHash4, { checkSuccess: true }); + + // Print their balances. + const aliceBalance2 = await tokenClient.getToken( + alice.address(), + collectionName, + tokenName, + `${tokenPropertyVersion}`, + ); + const bobBalance2 = await tokenClient.getTokenForAccount(bob.address(), tokenId); + console.log(`Alice's token balance: ${aliceBalance2["amount"]}`); + console.log(`Bob's token balance: ${bobBalance2["amount"]}`); + + console.log("\n=== Transferring the token back to Alice using MultiAgent ==="); + // :!:>section_11 + let txnHash5 = await tokenClient.directTransferToken( + bob, + alice, + alice.address(), + collectionName, + tokenName, + 1, + tokenPropertyVersion, + ); // <:!:section_11 + await client.waitForTransaction(txnHash5, { checkSuccess: true }); + + // Print out their balances one last time. + const aliceBalance3 = await tokenClient.getToken( + alice.address(), + collectionName, + tokenName, + `${tokenPropertyVersion}`, + ); + const bobBalance3 = await tokenClient.getTokenForAccount(bob.address(), tokenId); + console.log(`Alice's token balance: ${aliceBalance3["amount"]}`); + console.log(`Bob's token balance: ${bobBalance3["amount"]}`); + + const provider = new Provider(Network.DEVNET); + console.log("\n=== Checking if indexer devnet chainId same as fullnode chainId ==="); + const indexerLedgerInfo = await provider.getIndexerLedgerInfo(); + const fullNodeChainId = await provider.getChainId(); + + console.log( + `\n fullnode chain id is: ${fullNodeChainId}, indexer chain id is: ${indexerLedgerInfo.ledger_infos[0].chain_id}`, + ); + + if (indexerLedgerInfo.ledger_infos[0].chain_id !== fullNodeChainId) { + console.log(`\n fullnode chain id and indexer chain id are not synced, skipping rest of tests`); + return; + } + + console.log("\n=== Getting Alices's NFTs ==="); + const aliceNfts = await provider.getAccountNFTs(alice.address().hex()); + console.log(`Alice current token ownership: ${aliceNfts.current_token_ownerships[0].amount}. Should be 1`); + + console.log("\n=== Getting Bob's NFTs ==="); + const bobNfts = await provider.getAccountNFTs(bob.address().hex()); + console.log(`Bob current token ownership: ${bobNfts.current_token_ownerships.length}. Should be 0`); +})(); diff --git a/m1/JavaScript-client/examples/typescript/transfer_coin.ts b/m1/JavaScript-client/examples/typescript/transfer_coin.ts new file mode 100644 index 00000000..20fb899d --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/transfer_coin.ts @@ -0,0 +1,67 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable no-console */ + +import dotenv from "dotenv"; +dotenv.config(); + +import { AptosClient, AptosAccount, CoinClient, FaucetClient } from "aptos"; +import { NODE_URL, FAUCET_URL } from "./common"; + +(async () => { + // Create API and faucet clients. + // :!:>section_1 + const client = new AptosClient(NODE_URL); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); // <:!:section_1 + + // Create client for working with the coin module. + // :!:>section_1a + const coinClient = new CoinClient(client); // <:!:section_1a + + // Create accounts. + // :!:>section_2 + const alice = new AptosAccount(); + const bob = new AptosAccount(); // <:!:section_2 + + // Print out account addresses. + console.log("=== Addresses ==="); + console.log(`Alice: ${alice.address()}`); + console.log(`Bob: ${bob.address()}`); + console.log(""); + + // Fund accounts. + // :!:>section_3 + await faucetClient.fundAccount(alice.address(), 100_000_000); + await faucetClient.fundAccount(bob.address(), 0); // <:!:section_3 + + // Print out initial balances. + console.log("=== Initial Balances ==="); + // :!:>section_4 + console.log(`Alice: ${await coinClient.checkBalance(alice)}`); + console.log(`Bob: ${await coinClient.checkBalance(bob)}`); // <:!:section_4 + console.log(""); + + // Have Alice send Bob some AptosCoins. + // :!:>section_5 + let txnHash = await coinClient.transfer(alice, bob, 1_000, { gasUnitPrice: BigInt(100) }); // <:!:section_5 + // :!:>section_6a + await client.waitForTransaction(txnHash); // <:!:section_6a + + // Print out intermediate balances. + console.log("=== Intermediate Balances ==="); + console.log(`Alice: ${await coinClient.checkBalance(alice)}`); + console.log(`Bob: ${await coinClient.checkBalance(bob)}`); + console.log(""); + + // Have Alice send Bob some more AptosCoins. + txnHash = await coinClient.transfer(alice, bob, 1_000, { gasUnitPrice: BigInt(100) }); + // :!:>section_6b + await client.waitForTransaction(txnHash, { checkSuccess: true }); // <:!:section_6b + + // Print out final balances. + console.log("=== Final Balances ==="); + console.log(`Alice: ${await coinClient.checkBalance(alice)}`); + console.log(`Bob: ${await coinClient.checkBalance(bob)}`); + console.log(""); +})(); diff --git a/m1/JavaScript-client/examples/typescript/tsconfig.json b/m1/JavaScript-client/examples/typescript/tsconfig.json new file mode 100644 index 00000000..b6381f2d --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "allowJs": true, + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "module": "commonjs", + "noImplicitAny": true, + "outDir": "./dist", + "sourceMap": true, + "target": "es2020", + "pretty": true + }, + "include": [ + "src", + "*.ts" + ], + "paths": { + "@aptos/*": "../..", + } +} diff --git a/m1/JavaScript-client/examples/typescript/your_coin.ts b/m1/JavaScript-client/examples/typescript/your_coin.ts new file mode 100644 index 00000000..9460b863 --- /dev/null +++ b/m1/JavaScript-client/examples/typescript/your_coin.ts @@ -0,0 +1,137 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import assert from "assert"; +import fs from "fs"; +import path from "path"; +import { NODE_URL, FAUCET_URL } from "./common"; +import { AptosAccount, AptosClient, TxnBuilderTypes, MaybeHexString, HexString, FaucetClient } from "aptos"; +/** + This example depends on the MoonCoin.move module having already been published to the destination blockchain. + + One method to do so is to use the CLI: + * Acquire the Aptos CLI, see https://aptos.dev/cli-tools/aptos-cli-tool/install-aptos-cli + * `pnpm your_coin ~/aptos-core/aptos-move/move-examples/moon_coin`. + * Open another terminal and `aptos move compile --package-dir ~/aptos-core/aptos-move/move-examples/moon_coin --save-metadata --named-addresses MoonCoin=`. + * Return to the first terminal and press enter. + */ + +const readline = require("readline").createInterface({ + input: process.stdin, + output: process.stdout, +}); + +class CoinClient extends AptosClient { + constructor() { + super(NODE_URL); + } + + /** Register the receiver account to receive transfers for the new coin. */ + async registerCoin(coinTypeAddress: HexString, coinReceiver: AptosAccount): Promise { + const rawTxn = await this.generateTransaction(coinReceiver.address(), { + function: "0x1::managed_coin::register", + type_arguments: [`${coinTypeAddress.hex()}::moon_coin::MoonCoin`], + arguments: [], + }); + + const bcsTxn = await this.signTransaction(coinReceiver, rawTxn); + const pendingTxn = await this.submitTransaction(bcsTxn); + + return pendingTxn.hash; + } + + /** Mints the newly created coin to a specified receiver address */ + async transferCoin(sender: AptosAccount, receiverAddress: HexString, amount: number | bigint): Promise { + const rawTxn = await this.generateTransaction(sender.address(), { + function: "0x1::aptos_account::transfer_coins", + type_arguments: [`${sender.address()}::moon_coin::MoonCoin`], + arguments: [receiverAddress.hex(), amount], + }); + + const bcsTxn = await this.signTransaction(sender, rawTxn); + const pendingTxn = await this.submitTransaction(bcsTxn); + + return pendingTxn.hash; + } + + /** Mints the newly created coin to a specified receiver address */ + async mintCoin(minter: AptosAccount, receiverAddress: HexString, amount: number | bigint): Promise { + const rawTxn = await this.generateTransaction(minter.address(), { + function: "0x1::managed_coin::mint", + type_arguments: [`${minter.address()}::moon_coin::MoonCoin`], + arguments: [receiverAddress.hex(), amount], + }); + + const bcsTxn = await this.signTransaction(minter, rawTxn); + const pendingTxn = await this.submitTransaction(bcsTxn); + + return pendingTxn.hash; + } + + /** Return the balance of the newly created coin */ + async getBalance(accountAddress: MaybeHexString, coinTypeAddress: HexString): Promise { + try { + const resource = await this.getAccountResource( + accountAddress, + `0x1::coin::CoinStore<${coinTypeAddress.hex()}::moon_coin::MoonCoin>`, + ); + + return parseInt((resource.data as any)["coin"]["value"]); + } catch (_) { + return 0; + } + } +} + +/** run our demo! */ +async function main() { + assert(process.argv.length == 3, "Expecting an argument that points to the moon_coin directory."); + + const client = new CoinClient(); + const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); + + // Create two accounts, Alice and Bob, and fund Alice but not Bob + const alice = new AptosAccount(); + const bob = new AptosAccount(); + + console.log("\n=== Addresses ==="); + console.log(`Alice: ${alice.address()}`); + console.log(`Bob: ${bob.address()}`); + + await faucetClient.fundAccount(alice.address(), 100_000_000); + await faucetClient.fundAccount(bob.address(), 100_000_000); + + await new Promise((resolve) => { + readline.question("Update the module with Alice's address, compile, and press enter.", () => { + resolve(); + readline.close(); + }); + }); + + // :!:>publish + const modulePath = process.argv[2]; + const packageMetadata = fs.readFileSync(path.join(modulePath, "build", "Examples", "package-metadata.bcs")); + const moduleData = fs.readFileSync(path.join(modulePath, "build", "Examples", "bytecode_modules", "moon_coin.mv")); + + console.log("Publishing MoonCoin package."); + let txnHash = await client.publishPackage(alice, new HexString(packageMetadata.toString("hex")).toUint8Array(), [ + new TxnBuilderTypes.Module(new HexString(moduleData.toString("hex")).toUint8Array()), + ]); + await client.waitForTransaction(txnHash, { checkSuccess: true }); // <:!:publish + + console.log(`Bob's initial MoonCoin balance: ${await client.getBalance(bob.address(), alice.address())}.`); + console.log("Alice mints herself some of the new coin."); + txnHash = await client.registerCoin(alice.address(), alice); + await client.waitForTransaction(txnHash, { checkSuccess: true }); + txnHash = await client.mintCoin(alice, alice.address(), 100); + await client.waitForTransaction(txnHash, { checkSuccess: true }); + + console.log("Alice transfers the newly minted coins to Bob."); + txnHash = await client.transferCoin(alice, bob.address(), 100); + await client.waitForTransaction(txnHash, { checkSuccess: true }); + console.log(`Bob's updated MoonCoin balance: ${await client.getBalance(bob.address(), alice.address())}.`); +} + +if (require.main === module) { + main().then((resp) => console.log(resp)); +} diff --git a/m1/JavaScript-client/jest.config.js b/m1/JavaScript-client/jest.config.js new file mode 100644 index 00000000..88581619 --- /dev/null +++ b/m1/JavaScript-client/jest.config.js @@ -0,0 +1,20 @@ +/** @type {import("ts-jest/dist/types").InitialOptionsTsJest} */ +module.exports = { + preset: "ts-jest", + moduleNameMapper: { + "^(\\.{1,2}/.*)\\.js$": "$1", + }, + testEnvironment: "node", + coveragePathIgnorePatterns: ["generated/*", "./aptos_types/*", "utils/memoize-decorator.ts", "utils/hd-key.ts"], + testPathIgnorePatterns: ["dist/*"], + collectCoverage: true, + setupFiles: ["dotenv/config"], + coverageThreshold: { + global: { + branches: 50, // 90, + functions: 50, // 95, + lines: 50, // 95, + statements: 50, // 95, + }, + }, +}; diff --git a/m1/JavaScript-client/package.json b/m1/JavaScript-client/package.json new file mode 100644 index 00000000..12af11d1 --- /dev/null +++ b/m1/JavaScript-client/package.json @@ -0,0 +1,84 @@ +{ + "name": "movement-sdk", + "description": "Movement SDK", + "packageManager": "pnpm@8.3.1", + "license": "Apache-2.0", + "engines": { + "node": ">=11.0.0" + }, + "main": "./dist/index.js", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "import": "./dist/index.mjs", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "scripts": { + "prepack": "pnpm build", + "build": "pnpm build:clean && pnpm _build:node && pnpm _build:browser", + "build:clean": "rm -rf dist", + "_build:browser": "tsup --platform browser --format iife --global-name movementSDK --minify", + "_build:node": "tsup --format cjs,esm --dts", + "lint": "eslint \"**/*.ts\"", + "test": "pnpm run publish-ans-contract && jest", + "_fmt": "prettier 'scripts/**/*.ts' 'src/**/*.ts' 'examples/**/*.js' 'examples/**/*.ts' '.eslintrc.js'", + "fmt": "pnpm _fmt --write", + "fmt:check": "pnpm _fmt --check", + "cov:clean": "rm -rf coverage", + "generate-client": "openapi -i ../../../api/doc/spec.yaml -o ./src/generated -c axios --name AptosGeneratedClient --exportSchemas true", + "checked-publish": "scripts/checked_publish.sh", + "generate-ts-docs": "scripts/generate_ts_docs.sh", + "indexer-codegen": "graphql-codegen --config ./src/indexer/codegen.yml", + "publish-ans-contract": "ts-node ./scripts/publish_ans_contract.ts" + }, + "repository": { + "type": "git", + "url": "https://github.com/movemntdev/movement-subnet" + }, + "homepage": "https://docs.movementlabs.xyz/", + "author": "movementdev", + "keywords": [ + "Movement", + "Movement Labs", + "Move" + ], + "dependencies": { + "@noble/hashes": "1.1.3", + "@scure/bip39": "1.1.0", + "axios": "0.27.2", + "form-data": "4.0.0", + "tweetnacl": "1.0.3" + }, + "devDependencies": { + "@graphql-codegen/cli": "^2.13.5", + "@graphql-codegen/import-types-preset": "^2.2.3", + "@graphql-codegen/typescript": "^2.7.3", + "@graphql-codegen/typescript-graphql-request": "^4.5.8", + "@graphql-codegen/typescript-operations": "^2.5.3", + "@types/jest": "28.1.8", + "@types/node": "18.6.2", + "@typescript-eslint/eslint-plugin": "5.36.2", + "@typescript-eslint/parser": "5.36.2", + "dotenv": "16.0.2", + "eslint": "8.23.0", + "eslint-config-airbnb-base": "15.0.0", + "eslint-config-airbnb-typescript": "17.0.0", + "eslint-config-prettier": "8.5.0", + "eslint-plugin-import": "2.26.0", + "graphql": "^16.5.0", + "graphql-request": "^5.1.0", + "jest": "28.1.3", + "openapi-typescript-codegen": "https://github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz", + "prettier": "2.6.2", + "ts-jest": "28.0.8", + "ts-loader": "9.3.1", + "ts-node": "10.9.1", + "tsup": "6.2.3", + "typedoc": "^0.23.20", + "typescript": "4.8.2" + }, + "version": "1.0.3" +} diff --git a/m1/JavaScript-client/pnpm-lock.yaml b/m1/JavaScript-client/pnpm-lock.yaml new file mode 100644 index 00000000..7efcb039 --- /dev/null +++ b/m1/JavaScript-client/pnpm-lock.yaml @@ -0,0 +1,6984 @@ +lockfileVersion: '6.0' + +dependencies: + '@noble/hashes': + specifier: 1.1.3 + version: 1.1.3 + '@scure/bip39': + specifier: 1.1.0 + version: 1.1.0 + axios: + specifier: 0.27.2 + version: 0.27.2 + form-data: + specifier: 4.0.0 + version: 4.0.0 + tweetnacl: + specifier: 1.0.3 + version: 1.0.3 + +devDependencies: + '@graphql-codegen/cli': + specifier: ^2.13.5 + version: 2.16.5(@babel/core@7.19.6)(@types/node@18.6.2)(graphql@16.6.0)(typescript@4.8.2) + '@graphql-codegen/import-types-preset': + specifier: ^2.2.3 + version: 2.2.6(graphql@16.6.0) + '@graphql-codegen/typescript': + specifier: ^2.7.3 + version: 2.8.8(graphql@16.6.0) + '@graphql-codegen/typescript-graphql-request': + specifier: ^4.5.8 + version: 4.5.8(graphql-request@5.1.0)(graphql-tag@2.12.6)(graphql@16.6.0) + '@graphql-codegen/typescript-operations': + specifier: ^2.5.3 + version: 2.5.13(graphql@16.6.0) + '@types/jest': + specifier: 28.1.8 + version: 28.1.8 + '@types/node': + specifier: 18.6.2 + version: 18.6.2 + '@typescript-eslint/eslint-plugin': + specifier: 5.36.2 + version: 5.36.2(@typescript-eslint/parser@5.36.2)(eslint@8.23.0)(typescript@4.8.2) + '@typescript-eslint/parser': + specifier: 5.36.2 + version: 5.36.2(eslint@8.23.0)(typescript@4.8.2) + dotenv: + specifier: 16.0.2 + version: 16.0.2 + eslint: + specifier: 8.23.0 + version: 8.23.0 + eslint-config-airbnb-base: + specifier: 15.0.0 + version: 15.0.0(eslint-plugin-import@2.26.0)(eslint@8.23.0) + eslint-config-airbnb-typescript: + specifier: 17.0.0 + version: 17.0.0(@typescript-eslint/eslint-plugin@5.36.2)(@typescript-eslint/parser@5.36.2)(eslint-plugin-import@2.26.0)(eslint@8.23.0) + eslint-config-prettier: + specifier: 8.5.0 + version: 8.5.0(eslint@8.23.0) + eslint-plugin-import: + specifier: 2.26.0 + version: 2.26.0(@typescript-eslint/parser@5.36.2)(eslint@8.23.0) + graphql: + specifier: ^16.5.0 + version: 16.6.0 + graphql-request: + specifier: ^5.1.0 + version: 5.1.0(graphql@16.6.0) + jest: + specifier: 28.1.3 + version: 28.1.3(@types/node@18.6.2)(ts-node@10.9.1) + openapi-typescript-codegen: + specifier: https://github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz + version: '@github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz' + prettier: + specifier: 2.6.2 + version: 2.6.2 + ts-jest: + specifier: 28.0.8 + version: 28.0.8(@babel/core@7.19.6)(esbuild@0.15.13)(jest@28.1.3)(typescript@4.8.2) + ts-loader: + specifier: 9.3.1 + version: 9.3.1(typescript@4.8.2)(webpack@5.80.0) + ts-node: + specifier: 10.9.1 + version: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + tsup: + specifier: 6.2.3 + version: 6.2.3(ts-node@10.9.1)(typescript@4.8.2) + typedoc: + specifier: ^0.23.20 + version: 0.23.20(typescript@4.8.2) + typescript: + specifier: 4.8.2 + version: 4.8.2 + +packages: + + /@ampproject/remapping@2.2.0: + resolution: {integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/gen-mapping': 0.1.1 + '@jridgewell/trace-mapping': 0.3.17 + dev: true + + /@apidevtools/json-schema-ref-parser@9.0.9: + resolution: {integrity: sha512-GBD2Le9w2+lVFoc4vswGI/TjkNIZSVp7+9xPf+X3uidBfWnAeUWmquteSyt0+VCrhNMWj/FTABISQrD3Z/YA+w==} + dependencies: + '@jsdevtools/ono': 7.1.3 + '@types/json-schema': 7.0.11 + call-me-maybe: 1.0.2 + js-yaml: 4.1.0 + dev: true + + /@ardatan/relay-compiler@12.0.0(graphql@16.6.0): + resolution: {integrity: sha512-9anThAaj1dQr6IGmzBMcfzOQKTa5artjuPmw8NYK/fiGEMjADbSguBY2FMDykt+QhilR3wc9VA/3yVju7JHg7Q==} + hasBin: true + peerDependencies: + graphql: '*' + dependencies: + '@babel/core': 7.19.6 + '@babel/generator': 7.20.14 + '@babel/parser': 7.20.15 + '@babel/runtime': 7.20.13 + '@babel/traverse': 7.20.13 + '@babel/types': 7.20.7 + babel-preset-fbjs: 3.4.0(@babel/core@7.19.6) + chalk: 4.1.2 + fb-watchman: 2.0.2 + fbjs: 3.0.4 + glob: 7.2.3 + graphql: 16.6.0 + immutable: 3.7.6 + invariant: 2.2.4 + nullthrows: 1.1.1 + relay-runtime: 12.0.0 + signedsource: 1.0.0 + yargs: 15.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@ardatan/sync-fetch@0.0.1: + resolution: {integrity: sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA==} + engines: {node: '>=14'} + dependencies: + node-fetch: 2.6.9 + transitivePeerDependencies: + - encoding + dev: true + + /@babel/code-frame@7.18.6: + resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.18.6 + dev: true + + /@babel/compat-data@7.20.1: + resolution: {integrity: sha512-EWZ4mE2diW3QALKvDMiXnbZpRvlj+nayZ112nK93SnhqOtpdsbVD4W+2tEoT3YNBAG9RBR0ISY758ZkOgsn6pQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/compat-data@7.20.14: + resolution: {integrity: sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/core@7.19.6: + resolution: {integrity: sha512-D2Ue4KHpc6Ys2+AxpIx1BZ8+UegLLLE2p3KJEuJRKmokHOtl49jQ5ny1773KsGLZs8MQvBidAF6yWUJxRqtKtg==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.0 + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.20.1 + '@babel/helper-compilation-targets': 7.20.0(@babel/core@7.19.6) + '@babel/helper-module-transforms': 7.19.6 + '@babel/helpers': 7.20.1 + '@babel/parser': 7.20.1 + '@babel/template': 7.18.10 + '@babel/traverse': 7.20.1 + '@babel/types': 7.20.0 + convert-source-map: 1.9.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.1 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/generator@7.20.1: + resolution: {integrity: sha512-u1dMdBUmA7Z0rBB97xh8pIhviK7oItYOkjbsCxTWMknyvbQRBwX7/gn4JXurRdirWMFh+ZtYARqkA6ydogVZpg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.0 + '@jridgewell/gen-mapping': 0.3.2 + jsesc: 2.5.2 + dev: true + + /@babel/generator@7.20.14: + resolution: {integrity: sha512-AEmuXHdcD3A52HHXxaTmYlb8q/xMEhoRP67B3T4Oq7lbmSoqroMZzjnGj3+i1io3pdnF8iBYVu4Ilj+c4hBxYg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + '@jridgewell/gen-mapping': 0.3.2 + jsesc: 2.5.2 + dev: true + + /@babel/helper-annotate-as-pure@7.18.6: + resolution: {integrity: sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/helper-compilation-targets@7.20.0(@babel/core@7.19.6): + resolution: {integrity: sha512-0jp//vDGp9e8hZzBc6N/KwA5ZK3Wsm/pfm4CrY7vzegkVxc65SgSn6wYOnwHe9Js9HRQ1YTCKLGPzDtaS3RoLQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/compat-data': 7.20.1 + '@babel/core': 7.19.6 + '@babel/helper-validator-option': 7.18.6 + browserslist: 4.21.4 + semver: 6.3.0 + dev: true + + /@babel/helper-compilation-targets@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/compat-data': 7.20.14 + '@babel/core': 7.19.6 + '@babel/helper-validator-option': 7.18.6 + browserslist: 4.21.4 + lru-cache: 5.1.1 + semver: 6.3.0 + dev: true + + /@babel/helper-create-class-features-plugin@7.20.12(@babel/core@7.19.6): + resolution: {integrity: sha512-9OunRkbT0JQcednL0UFvbfXpAsUXiGjUk0a7sN8fUXX7Mue79cUSMjHGDRRi/Vz9vYlpIhLV5fMD5dKoMhhsNQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-member-expression-to-functions': 7.20.7 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-replace-supers': 7.20.7 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + '@babel/helper-split-export-declaration': 7.18.6 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-environment-visitor@7.18.9: + resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-function-name@7.19.0: + resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.10 + '@babel/types': 7.20.0 + dev: true + + /@babel/helper-hoist-variables@7.18.6: + resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@babel/helper-member-expression-to-functions@7.20.7: + resolution: {integrity: sha512-9J0CxJLq315fEdi4s7xK5TQaNYjZw+nDVpVqr1axNGKzdrdwYBD5b4uKv3n75aABG0rCCTK8Im8Ww7eYfMrZgw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/helper-module-imports@7.18.6: + resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@babel/helper-module-transforms@7.19.6: + resolution: {integrity: sha512-fCmcfQo/KYr/VXXDIyd3CBGZ6AFhPFy1TfSEJ+PilGVlQT6jcbqtHAM4C1EciRqMza7/TpOUZliuSH+U6HAhJw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-simple-access': 7.19.4 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/helper-validator-identifier': 7.19.1 + '@babel/template': 7.18.10 + '@babel/traverse': 7.20.1 + '@babel/types': 7.20.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-module-transforms@7.20.11: + resolution: {integrity: sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-simple-access': 7.20.2 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/helper-validator-identifier': 7.19.1 + '@babel/template': 7.20.7 + '@babel/traverse': 7.20.13 + '@babel/types': 7.20.7 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-optimise-call-expression@7.18.6: + resolution: {integrity: sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/helper-plugin-utils@7.19.0: + resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-plugin-utils@7.20.2: + resolution: {integrity: sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-replace-supers@7.20.7: + resolution: {integrity: sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-member-expression-to-functions': 7.20.7 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/template': 7.20.7 + '@babel/traverse': 7.20.13 + '@babel/types': 7.20.7 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/helper-simple-access@7.19.4: + resolution: {integrity: sha512-f9Xq6WqBFqaDfbCzn2w85hwklswz5qsKlh7f08w4Y9yhJHpnNC0QemtSkK5YyOY8kPGvyiwdzZksGUhnGdaUIg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@babel/helper-simple-access@7.20.2: + resolution: {integrity: sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/helper-skip-transparent-expression-wrappers@7.20.0: + resolution: {integrity: sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/helper-split-export-declaration@7.18.6: + resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@babel/helper-string-parser@7.19.4: + resolution: {integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-validator-identifier@7.19.1: + resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helper-validator-option@7.18.6: + resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} + engines: {node: '>=6.9.0'} + dev: true + + /@babel/helpers@7.20.1: + resolution: {integrity: sha512-J77mUVaDTUJFZ5BpP6mMn6OIl3rEWymk2ZxDBQJUG3P+PbmyMcF3bYWvz0ma69Af1oobDqT/iAsvzhB58xhQUg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.18.10 + '@babel/traverse': 7.20.1 + '@babel/types': 7.20.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/highlight@7.18.6: + resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.19.1 + chalk: 2.4.2 + js-tokens: 4.0.0 + dev: true + + /@babel/parser@7.20.1: + resolution: {integrity: sha512-hp0AYxaZJhxULfM1zyp7Wgr+pSUKBcP3M+PHnSzWGdXOzg/kHWIgiUWARvubhUKGOEw3xqY4x+lyZ9ytBVcELw==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@babel/parser@7.20.15: + resolution: {integrity: sha512-DI4a1oZuf8wC+oAJA9RW6ga3Zbe8RZFt7kD9i4qAspz3I/yHet1VvC3DiSy/fsUvv5pvJuNPh0LPOdCcqinDPg==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.20.7 + dev: true + + /@babel/plugin-proposal-class-properties@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-create-class-features-plugin': 7.20.12(@babel/core@7.19.6) + '@babel/helper-plugin-utils': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-proposal-object-rest-spread@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/compat-data': 7.20.14 + '@babel/core': 7.19.6 + '@babel/helper-compilation-targets': 7.20.7(@babel/core@7.19.6) + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-transform-parameters': 7.20.7(@babel/core@7.19.6) + dev: true + + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.19.6): + resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.19.6): + resolution: {integrity: sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-flow@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-LUbR+KNTBWCUAqRG9ex5Gnzu2IOkt8jRJbHHXFT9q+L9zm7M/QQbEqXyw1n1pohYvOyWC8CjeyjrSaIwiYjK7A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-import-assertions@7.20.0(@babel/core@7.19.6): + resolution: {integrity: sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.19.6): + resolution: {integrity: sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-jsx@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.19.6): + resolution: {integrity: sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.19.6): + resolution: {integrity: sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.19.6): + resolution: {integrity: sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.19.6): + resolution: {integrity: sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-syntax-typescript@7.20.0(@babel/core@7.19.6): + resolution: {integrity: sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.19.0 + dev: true + + /@babel/plugin-transform-arrow-functions@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-block-scoped-functions@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-block-scoping@7.20.15(@babel/core@7.19.6): + resolution: {integrity: sha512-Vv4DMZ6MiNOhu/LdaZsT/bsLRxgL94d269Mv4R/9sp6+Mp++X/JqypZYypJXLlM4mlL352/Egzbzr98iABH1CA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-classes@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-LWYbsiXTPKl+oBlXUGlwNlJZetXD5Am+CyBdqhPsDVjM9Jc8jwBJFrKhHf900Kfk2eZG1y9MAG3UNajol7A4VQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-compilation-targets': 7.20.7(@babel/core@7.19.6) + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-optimise-call-expression': 7.18.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-replace-supers': 7.20.7 + '@babel/helper-split-export-declaration': 7.18.6 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-computed-properties@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/template': 7.20.7 + dev: true + + /@babel/plugin-transform-destructuring@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-flow-strip-types@7.19.0(@babel/core@7.19.6): + resolution: {integrity: sha512-sgeMlNaQVbCSpgLSKP4ZZKfsJVnFnNQlUSk6gPYzR/q7tzCgQF2t8RBKAP6cKJeZdveei7Q7Jm527xepI8lNLg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-flow': 7.18.6(@babel/core@7.19.6) + dev: true + + /@babel/plugin-transform-for-of@7.18.8(@babel/core@7.19.6): + resolution: {integrity: sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-function-name@7.18.9(@babel/core@7.19.6): + resolution: {integrity: sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-compilation-targets': 7.20.7(@babel/core@7.19.6) + '@babel/helper-function-name': 7.19.0 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-literals@7.18.9(@babel/core@7.19.6): + resolution: {integrity: sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-member-expression-literals@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-modules-commonjs@7.20.11(@babel/core@7.19.6): + resolution: {integrity: sha512-S8e1f7WQ7cimJQ51JkAaDrEtohVEitXjgCGAS2N8S31Y42E+kWwfSz83LYz57QdBm7q9diARVqanIaH2oVgQnw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-module-transforms': 7.20.11 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-simple-access': 7.20.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-object-super@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-replace-supers': 7.20.7 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/plugin-transform-parameters@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-property-literals@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-react-display-name@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-react-jsx@7.20.13(@babel/core@7.19.6): + resolution: {integrity: sha512-MmTZx/bkUrfJhhYAYt3Urjm+h8DQGrPrnKQ94jLo7NLuOU+T89a7IByhKmrb8SKhrIYIQ0FN0CHMbnFRen4qNw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-annotate-as-pure': 7.18.6 + '@babel/helper-module-imports': 7.18.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.19.6) + '@babel/types': 7.20.7 + dev: true + + /@babel/plugin-transform-shorthand-properties@7.18.6(@babel/core@7.19.6): + resolution: {integrity: sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/plugin-transform-spread@7.20.7(@babel/core@7.19.6): + resolution: {integrity: sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + '@babel/helper-skip-transparent-expression-wrappers': 7.20.0 + dev: true + + /@babel/plugin-transform-template-literals@7.18.9(@babel/core@7.19.6): + resolution: {integrity: sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + dependencies: + '@babel/core': 7.19.6 + '@babel/helper-plugin-utils': 7.20.2 + dev: true + + /@babel/runtime@7.20.13: + resolution: {integrity: sha512-gt3PKXs0DBoL9xCvOIIZ2NEqAGZqHjAnmVbfQtB620V0uReIQutpel14KcneZuer7UioY8ALKZ7iocavvzTNFA==} + engines: {node: '>=6.9.0'} + dependencies: + regenerator-runtime: 0.13.11 + dev: true + + /@babel/template@7.18.10: + resolution: {integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.20.1 + '@babel/types': 7.20.0 + dev: true + + /@babel/template@7.20.7: + resolution: {integrity: sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/parser': 7.20.15 + '@babel/types': 7.20.7 + dev: true + + /@babel/traverse@7.20.1: + resolution: {integrity: sha512-d3tN8fkVJwFLkHkBN479SOsw4DMZnz8cdbL/gvuDuzy3TS6Nfw80HuQqhw1pITbIruHyh7d1fMA47kWzmcUEGA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.20.1 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.20.1 + '@babel/types': 7.20.0 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/traverse@7.20.13: + resolution: {integrity: sha512-kMJXfF0T6DIS9E8cgdLCSAL+cuCK+YEZHWiLK0SXpTo8YRj5lpJu3CDNKiIBCne4m9hhTIqUg6SYTAI39tAiVQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.18.6 + '@babel/generator': 7.20.14 + '@babel/helper-environment-visitor': 7.18.9 + '@babel/helper-function-name': 7.19.0 + '@babel/helper-hoist-variables': 7.18.6 + '@babel/helper-split-export-declaration': 7.18.6 + '@babel/parser': 7.20.15 + '@babel/types': 7.20.7 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + + /@babel/types@7.20.0: + resolution: {integrity: sha512-Jlgt3H0TajCW164wkTOTzHkZb075tMQMULzrLUoUeKmO7eFL96GgDxf7/Axhc5CAuKE3KFyVW1p6ysKsi2oXAg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + dev: true + + /@babel/types@7.20.7: + resolution: {integrity: sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.19.4 + '@babel/helper-validator-identifier': 7.19.1 + to-fast-properties: 2.0.0 + dev: true + + /@bcoe/v8-coverage@0.2.3: + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + dev: true + + /@cspotcode/source-map-support@0.8.1: + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + dev: true + + /@esbuild/android-arm@0.15.13: + resolution: {integrity: sha512-RY2fVI8O0iFUNvZirXaQ1vMvK0xhCcl0gqRj74Z6yEiO1zAUa7hbsdwZM1kzqbxHK7LFyMizipfXT3JME+12Hw==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@esbuild/linux-loong64@0.15.13: + resolution: {integrity: sha512-+BoyIm4I8uJmH/QDIH0fu7MG0AEx9OXEDXnqptXCwKOlOqZiS4iraH1Nr7/ObLMokW3sOCeBNyD68ATcV9b9Ag==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@eslint/eslintrc@1.3.3: + resolution: {integrity: sha512-uj3pT6Mg+3t39fvLrj8iuCIJ38zKO9FpGtJ4BBJebJhEwjoT+KLVNCcHT5QC9NGRIEi7fZ0ZR8YRb884auB4Lg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + ajv: 6.12.6 + debug: 4.3.4 + espree: 9.4.0 + globals: 13.17.0 + ignore: 5.2.0 + import-fresh: 3.3.0 + js-yaml: 4.1.0 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@graphql-codegen/add@3.2.3(graphql@16.6.0): + resolution: {integrity: sha512-sQOnWpMko4JLeykwyjFTxnhqjd/3NOG2OyMuvK76Wnnwh8DRrNf2VEs2kmSvLl7MndMlOj7Kh5U154dVcvhmKQ==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.4.1 + dev: true + + /@graphql-codegen/cli@2.16.5(@babel/core@7.19.6)(@types/node@18.6.2)(graphql@16.6.0)(typescript@4.8.2): + resolution: {integrity: sha512-XYPIp+q7fB0xAGSAoRykiTe4oY80VU+z+dw5nuv4mLY0+pv7+pa2C6Nwhdw7a65lXOhFviBApWCCZeqd54SMnA==} + hasBin: true + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@babel/generator': 7.20.14 + '@babel/template': 7.20.7 + '@babel/types': 7.20.7 + '@graphql-codegen/core': 2.6.8(graphql@16.6.0) + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-tools/apollo-engine-loader': 7.3.23(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/code-file-loader': 7.3.18(@babel/core@7.19.6)(graphql@16.6.0) + '@graphql-tools/git-loader': 7.2.17(@babel/core@7.19.6)(graphql@16.6.0) + '@graphql-tools/github-loader': 7.3.24(@babel/core@7.19.6)(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/graphql-file-loader': 7.5.15(graphql@16.6.0) + '@graphql-tools/json-file-loader': 7.4.16(graphql@16.6.0) + '@graphql-tools/load': 7.8.11(graphql@16.6.0) + '@graphql-tools/prisma-loader': 7.2.60(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/url-loader': 7.17.9(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@whatwg-node/fetch': 0.6.6(@types/node@18.6.2) + chalk: 4.1.2 + chokidar: 3.5.3 + cosmiconfig: 7.1.0 + cosmiconfig-typescript-loader: 4.3.0(@types/node@18.6.2)(cosmiconfig@7.1.0)(ts-node@10.9.1)(typescript@4.8.2) + debounce: 1.2.1 + detect-indent: 6.1.0 + graphql: 16.6.0 + graphql-config: 4.4.1(@types/node@18.6.2)(cosmiconfig-typescript-loader@4.3.0)(graphql@16.6.0) + inquirer: 8.2.5 + is-glob: 4.0.3 + json-to-pretty-yaml: 1.2.2 + listr2: 4.0.5 + log-symbols: 4.1.0 + shell-quote: 1.8.0 + string-env-interpolation: 1.0.1 + ts-log: 2.2.5 + ts-node: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + tslib: 2.5.0 + yaml: 1.10.2 + yargs: 17.6.2 + transitivePeerDependencies: + - '@babel/core' + - '@swc/core' + - '@swc/wasm' + - '@types/node' + - bufferutil + - cosmiconfig-toml-loader + - encoding + - enquirer + - supports-color + - typescript + - utf-8-validate + dev: true + + /@graphql-codegen/core@2.6.8(graphql@16.6.0): + resolution: {integrity: sha512-JKllNIipPrheRgl+/Hm/xuWMw9++xNQ12XJR/OHHgFopOg4zmN3TdlRSyYcv/K90hCFkkIwhlHFUQTfKrm8rxQ==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-tools/schema': 9.0.15(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.4.1 + dev: true + + /@graphql-codegen/import-types-preset@2.2.6(graphql@16.6.0): + resolution: {integrity: sha512-Lo2ITOln3UVdyyEPiijj8bVhVg0Ghp/JzHXA2LXxrJVCRbXizQhVC2vjiaWTjMskPt9Zub0yIoce4+RrbsXKcg==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/add': 3.2.3(graphql@16.6.0) + '@graphql-codegen/plugin-helpers': 2.7.2(graphql@16.6.0) + '@graphql-codegen/visitor-plugin-common': 2.13.1(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-codegen/plugin-helpers@2.7.2(graphql@16.6.0): + resolution: {integrity: sha512-kln2AZ12uii6U59OQXdjLk5nOlh1pHis1R98cDZGFnfaiAbX9V3fxcZ1MMJkB7qFUymTALzyjZoXXdyVmPMfRg==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/utils': 8.13.1(graphql@16.6.0) + change-case-all: 1.0.14 + common-tags: 1.8.2 + graphql: 16.6.0 + import-from: 4.0.0 + lodash: 4.17.21 + tslib: 2.4.1 + dev: true + + /@graphql-codegen/plugin-helpers@3.1.2(graphql@16.6.0): + resolution: {integrity: sha512-emOQiHyIliVOIjKVKdsI5MXj312zmRDwmHpyUTZMjfpvxq/UVAHUJIVdVf+lnjjrI+LXBTgMlTWTgHQfmICxjg==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + change-case-all: 1.0.15 + common-tags: 1.8.2 + graphql: 16.6.0 + import-from: 4.0.0 + lodash: 4.17.21 + tslib: 2.4.1 + dev: true + + /@graphql-codegen/schema-ast@2.6.1(graphql@16.6.0): + resolution: {integrity: sha512-5TNW3b1IHJjCh07D2yQNGDQzUpUl2AD+GVe1Dzjqyx/d2Fn0TPMxLsHsKPS4Plg4saO8FK/QO70wLsP7fdbQ1w==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.4.1 + dev: true + + /@graphql-codegen/typescript-graphql-request@4.5.8(graphql-request@5.1.0)(graphql-tag@2.12.6)(graphql@16.6.0): + resolution: {integrity: sha512-XsuAA35Ou03LsklNgnIWXZ5HOHsJ5w1dBuDKtvqM9rD0cAI8x0f4TY0n6O1EraSBSvyHLP3npb1lOTPZzG2TjA==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + graphql-request: ^3.4.0 || ^4.0.0 || ^5.0.0 + graphql-tag: ^2.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 2.7.2(graphql@16.6.0) + '@graphql-codegen/visitor-plugin-common': 2.13.1(graphql@16.6.0) + auto-bind: 4.0.0 + graphql: 16.6.0 + graphql-request: 5.1.0(graphql@16.6.0) + graphql-tag: 2.12.6(graphql@16.6.0) + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-codegen/typescript-operations@2.5.13(graphql@16.6.0): + resolution: {integrity: sha512-3vfR6Rx6iZU0JRt29GBkFlrSNTM6t+MSLF86ChvL4d/Jfo/JYAGuB3zNzPhirHYzJPCvLOAx2gy9ID1ltrpYiw==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-codegen/typescript': 2.8.8(graphql@16.6.0) + '@graphql-codegen/visitor-plugin-common': 2.13.8(graphql@16.6.0) + auto-bind: 4.0.0 + graphql: 16.6.0 + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-codegen/typescript@2.8.8(graphql@16.6.0): + resolution: {integrity: sha512-A0oUi3Oy6+DormOlrTC4orxT9OBZkIglhbJBcDmk34jAKKUgesukXRd4yOhmTrnbchpXz2T8IAOFB3FWIaK4Rw==} + peerDependencies: + graphql: ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-codegen/schema-ast': 2.6.1(graphql@16.6.0) + '@graphql-codegen/visitor-plugin-common': 2.13.8(graphql@16.6.0) + auto-bind: 4.0.0 + graphql: 16.6.0 + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-codegen/visitor-plugin-common@2.13.1(graphql@16.6.0): + resolution: {integrity: sha512-mD9ufZhDGhyrSaWQGrU1Q1c5f01TeWtSWy/cDwXYjJcHIj1Y/DG2x0tOflEfCvh5WcnmHNIw4lzDsg1W7iFJEg==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 2.7.2(graphql@16.6.0) + '@graphql-tools/optimize': 1.3.1(graphql@16.6.0) + '@graphql-tools/relay-operation-optimizer': 6.5.16(graphql@16.6.0) + '@graphql-tools/utils': 8.13.1(graphql@16.6.0) + auto-bind: 4.0.0 + change-case-all: 1.0.14 + dependency-graph: 0.11.0 + graphql: 16.6.0 + graphql-tag: 2.12.6(graphql@16.6.0) + parse-filepath: 1.0.2 + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-codegen/visitor-plugin-common@2.13.8(graphql@16.6.0): + resolution: {integrity: sha512-IQWu99YV4wt8hGxIbBQPtqRuaWZhkQRG2IZKbMoSvh0vGeWb3dB0n0hSgKaOOxDY+tljtOf9MTcUYvJslQucMQ==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + '@graphql-codegen/plugin-helpers': 3.1.2(graphql@16.6.0) + '@graphql-tools/optimize': 1.3.1(graphql@16.6.0) + '@graphql-tools/relay-operation-optimizer': 6.5.16(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + auto-bind: 4.0.0 + change-case-all: 1.0.15 + dependency-graph: 0.11.0 + graphql: 16.6.0 + graphql-tag: 2.12.6(graphql@16.6.0) + parse-filepath: 1.0.2 + tslib: 2.4.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-tools/apollo-engine-loader@7.3.23(@types/node@18.6.2)(graphql@16.6.0): + resolution: {integrity: sha512-OGS0fGUeqBn2NNSfDBVIV7mjch6/7M4JCxvA7fpvVUAmdjjnQ6Z/CGyLIH2bv1eNv75gX/Kkj3baI0lwAWzsXw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@ardatan/sync-fetch': 0.0.1 + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@whatwg-node/fetch': 0.6.6(@types/node@18.6.2) + graphql: 16.6.0 + tslib: 2.5.0 + transitivePeerDependencies: + - '@types/node' + - encoding + dev: true + + /@graphql-tools/batch-execute@8.5.16(graphql@16.6.0): + resolution: {integrity: sha512-x/gXA6R1Q/qigT5LDesZYemErzFYvBBuTaVgiIJuE2wG6oMV1cln0O35Z7WVQw6H3I4vF7cCG7c7wKSoC+3z4Q==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + dataloader: 2.1.0 + graphql: 16.6.0 + tslib: 2.5.0 + value-or-promise: 1.0.12 + dev: true + + /@graphql-tools/code-file-loader@7.3.18(@babel/core@7.19.6)(graphql@16.6.0): + resolution: {integrity: sha512-DK0YjsJWKkLF6HQYuuqiDwMr9rwRojm8yR/T+J8vXCOR4ndYa1EvUm9wRHPhxHVOYeptO2u+APoWNEhuMN9Hbw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/graphql-tag-pluck': 7.4.4(@babel/core@7.19.6)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + globby: 11.1.0 + graphql: 16.6.0 + tslib: 2.5.0 + unixify: 1.0.0 + transitivePeerDependencies: + - '@babel/core' + - supports-color + dev: true + + /@graphql-tools/delegate@9.0.25(graphql@16.6.0): + resolution: {integrity: sha512-M7DMrPx8uEjXUshkki0ufcL//9Dj12eR3vykvteFB6odYL9cX5dhZC9l1D2IdQRuHzLMskhkhRtfnXRoa82KTA==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/batch-execute': 8.5.16(graphql@16.6.0) + '@graphql-tools/executor': 0.0.13(graphql@16.6.0) + '@graphql-tools/schema': 9.0.15(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + dataloader: 2.1.0 + graphql: 16.6.0 + tslib: 2.5.0 + value-or-promise: 1.0.12 + dev: true + + /@graphql-tools/executor-graphql-ws@0.0.9(graphql@16.6.0): + resolution: {integrity: sha512-S323OGzc8TQHOw8n7pFSl1+oG5pzhQhXRmgW6sAvA1F79FLjQ95TltEa6jSH7Jqw+tZobMyylJ13CQ1zFDjBPg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@repeaterjs/repeater': 3.0.4 + '@types/ws': 8.5.4 + graphql: 16.6.0 + graphql-ws: 5.11.3(graphql@16.6.0) + isomorphic-ws: 5.0.0(ws@8.12.0) + tslib: 2.5.0 + ws: 8.12.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: true + + /@graphql-tools/executor-http@0.1.4(@types/node@18.6.2)(graphql@16.6.0): + resolution: {integrity: sha512-6NGxLA9Z/cSOLExxfgddXqoS9JHr0QzvC4YmrjeMz533eW/SDnCf+4803PxkLi0j5CUTUPBnt9hC79l1AD2rZQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@repeaterjs/repeater': 3.0.4 + '@whatwg-node/fetch': 0.6.5(@types/node@18.6.2) + dset: 3.1.2 + extract-files: 11.0.0 + graphql: 16.6.0 + meros: 1.2.1(@types/node@18.6.2) + tslib: 2.5.0 + value-or-promise: 1.0.12 + transitivePeerDependencies: + - '@types/node' + dev: true + + /@graphql-tools/executor-legacy-ws@0.0.7(graphql@16.6.0): + resolution: {integrity: sha512-tSBJE/uv/r0iQjsU16QZkRLLCT0cmVWPqn8NVuAp3yqEeYlU7bzf38L4wNYTn46OHIYElFxXBFsGgMdyvrQLzg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@types/ws': 8.5.4 + graphql: 16.6.0 + isomorphic-ws: 5.0.0(ws@8.12.0) + tslib: 2.5.0 + ws: 8.12.0 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + dev: true + + /@graphql-tools/executor@0.0.13(graphql@16.6.0): + resolution: {integrity: sha512-bZ7QdUV5URLCjD/WuDkvyROYoDVoueTN5W1PatkcN949lwIwEKXUW6y3gRSpiZjXw8IH4/NmN3xPk10OT1emRw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@graphql-typed-document-node/core': 3.1.1(graphql@16.6.0) + '@repeaterjs/repeater': 3.0.4 + graphql: 16.6.0 + tslib: 2.5.0 + value-or-promise: 1.0.12 + dev: true + + /@graphql-tools/git-loader@7.2.17(@babel/core@7.19.6)(graphql@16.6.0): + resolution: {integrity: sha512-VbJQEgjy3oH0IQvkCJFKsIatep9Qv8mToBf0QSMXvS9fZkLM5wwTM4KPtw0Loim/1BAAnomBpHy6I4kiwqYU4A==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/graphql-tag-pluck': 7.4.4(@babel/core@7.19.6)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + is-glob: 4.0.3 + micromatch: 4.0.5 + tslib: 2.5.0 + unixify: 1.0.0 + transitivePeerDependencies: + - '@babel/core' + - supports-color + dev: true + + /@graphql-tools/github-loader@7.3.24(@babel/core@7.19.6)(@types/node@18.6.2)(graphql@16.6.0): + resolution: {integrity: sha512-URlH4tJFk/a97tIFTzAZuQTiFiQrwKjr0fKGohbyKMMycBf82XZ6F199PZP3GtigNmzTqV/vTkf1VLTJU97jRw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@ardatan/sync-fetch': 0.0.1 + '@graphql-tools/graphql-tag-pluck': 7.4.4(@babel/core@7.19.6)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@whatwg-node/fetch': 0.6.6(@types/node@18.6.2) + graphql: 16.6.0 + tslib: 2.5.0 + transitivePeerDependencies: + - '@babel/core' + - '@types/node' + - encoding + - supports-color + dev: true + + /@graphql-tools/graphql-file-loader@7.5.15(graphql@16.6.0): + resolution: {integrity: sha512-K6yOfKkQdXQRBl+UY4FzGdoSzGG09GLPZv4q7OFp8do16CXhaxAI+kmJvsvrutSyBfLETPTkCHtzFmdRmTeLpg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/import': 6.7.16(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + globby: 11.1.0 + graphql: 16.6.0 + tslib: 2.5.0 + unixify: 1.0.0 + dev: true + + /@graphql-tools/graphql-tag-pluck@7.4.4(@babel/core@7.19.6)(graphql@16.6.0): + resolution: {integrity: sha512-yHIEcapR/kVSrn4W4Nf3FYpJKPcoGvJbdbye8TnW3dD5GkG4UqVnKuyqFvQPOhgqXKbloFZqUhNqEuyqxqIPRw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@babel/parser': 7.20.15 + '@babel/plugin-syntax-import-assertions': 7.20.0(@babel/core@7.19.6) + '@babel/traverse': 7.20.13 + '@babel/types': 7.20.7 + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + transitivePeerDependencies: + - '@babel/core' + - supports-color + dev: true + + /@graphql-tools/import@6.7.16(graphql@16.6.0): + resolution: {integrity: sha512-m07u+8YsBtKg5w5BG04KFTd59PCAPMAy5Dv/NlR4zCiH/Zbpy5PoetokCZKDrFHYUzjPlm8r//vfCG+JTvHw7g==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + resolve-from: 5.0.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/json-file-loader@7.4.16(graphql@16.6.0): + resolution: {integrity: sha512-9MsqpwIrCx0l880V0dud01DhkwYwqCIlZlCA3bN+TExWa9U3aZhyPO/5BWQU6W52wKk61TvyN6agUa+f4R7jVQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + globby: 11.1.0 + graphql: 16.6.0 + tslib: 2.5.0 + unixify: 1.0.0 + dev: true + + /@graphql-tools/load@7.8.11(graphql@16.6.0): + resolution: {integrity: sha512-pVn3fYP/qZ3m2NE86gSexyZpEmvTSUe+OIRfWBM60a4L/SycMxgVfYB5+PyDCzruFZg/didIG3v7RfPlZ7zNTQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/schema': 9.0.15(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + p-limit: 3.1.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/merge@8.3.17(graphql@16.6.0): + resolution: {integrity: sha512-CLzz49lc6BavPhH9gPRm0sJeNA7kC/tF/jLUTQsyef6xj82Jw3rqIJ9PE+bk1cqPCOG01WLOfquBu445OMDO2g==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/optimize@1.3.1(graphql@16.6.0): + resolution: {integrity: sha512-5j5CZSRGWVobt4bgRRg7zhjPiSimk+/zIuColih8E8DxuFOaJ+t0qu7eZS5KXWBkjcd4BPNuhUPpNlEmHPqVRQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + graphql: 16.6.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/prisma-loader@7.2.60(@types/node@18.6.2)(graphql@16.6.0): + resolution: {integrity: sha512-6C/Hicwu/luLlaIqSud3YHJ1HbrIsZ0jHfxWju9aWs3dJLSwRv8Lgw1eHSoWFDEZjc+zNETYNe9GgUwt4BBZzQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/url-loader': 7.17.9(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@types/js-yaml': 4.0.5 + '@types/json-stable-stringify': 1.0.34 + '@types/jsonwebtoken': 9.0.1 + chalk: 4.1.2 + debug: 4.3.4 + dotenv: 16.0.2 + graphql: 16.6.0 + graphql-request: 5.1.0(graphql@16.6.0) + http-proxy-agent: 5.0.0 + https-proxy-agent: 5.0.1 + isomorphic-fetch: 3.0.0 + js-yaml: 4.1.0 + json-stable-stringify: 1.0.2 + jsonwebtoken: 9.0.0 + lodash: 4.17.21 + scuid: 1.1.0 + tslib: 2.5.0 + yaml-ast-parser: 0.0.43 + transitivePeerDependencies: + - '@types/node' + - bufferutil + - encoding + - supports-color + - utf-8-validate + dev: true + + /@graphql-tools/relay-operation-optimizer@6.5.16(graphql@16.6.0): + resolution: {integrity: sha512-g7P11WqrU6h/sRSe6KJULsNUt+5rdwD7mQpnjpKouhXAz/iNKwiUS0BEkkLjkneDkRVvrX0oqBB43VaMaW+gpQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@ardatan/relay-compiler': 12.0.0(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: true + + /@graphql-tools/schema@9.0.15(graphql@16.6.0): + resolution: {integrity: sha512-p2DbpkOBcsi+yCEjwoS+r4pJ5z+3JjlJdhbPkCwC4q8lGf5r93dVYrExOrqGKTU5kxLXI/mxabSxcunjNIsDIg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/merge': 8.3.17(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + value-or-promise: 1.0.12 + dev: true + + /@graphql-tools/url-loader@7.17.9(@types/node@18.6.2)(graphql@16.6.0): + resolution: {integrity: sha512-qAXQ9Tr/Am2hEelGVLCfO/YOyCMzCd4FyWMRRqcoMYIaK91arIb5X13pgILD28SUN+6H3NDsx7fgq/Z/OhwGrQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@ardatan/sync-fetch': 0.0.1 + '@graphql-tools/delegate': 9.0.25(graphql@16.6.0) + '@graphql-tools/executor-graphql-ws': 0.0.9(graphql@16.6.0) + '@graphql-tools/executor-http': 0.1.4(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/executor-legacy-ws': 0.0.7(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + '@graphql-tools/wrap': 9.3.4(graphql@16.6.0) + '@types/ws': 8.5.4 + '@whatwg-node/fetch': 0.6.6(@types/node@18.6.2) + graphql: 16.6.0 + isomorphic-ws: 5.0.0(ws@8.12.0) + tslib: 2.5.0 + value-or-promise: 1.0.12 + ws: 8.12.0 + transitivePeerDependencies: + - '@types/node' + - bufferutil + - encoding + - utf-8-validate + dev: true + + /@graphql-tools/utils@8.13.1(graphql@16.6.0): + resolution: {integrity: sha512-qIh9yYpdUFmctVqovwMdheVNJqFh+DQNWIhX87FJStfXYnmweBUDATok9fWPleKeFwxnW8IapKmY8m8toJEkAw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + graphql: 16.6.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/utils@9.2.0(graphql@16.6.0): + resolution: {integrity: sha512-s3lEG1iYkyYEnKCWrIFECX3XH2wmZvbg6Ir3udCvIDynq+ydaO7JQXobclpPtwSJtjlS353haF//6V7mnBQ4bg==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-typed-document-node/core': 3.1.1(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + dev: true + + /@graphql-tools/wrap@9.3.4(graphql@16.6.0): + resolution: {integrity: sha512-MJY6lZqi+j96izjOYOLk5fys34oiLt7U34SQ4Wd6V/sy1utVMbh2H7XiZAuDJ38JIKmr72qN7kLgNcnNOxXjHQ==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/delegate': 9.0.25(graphql@16.6.0) + '@graphql-tools/schema': 9.0.15(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + graphql: 16.6.0 + tslib: 2.5.0 + value-or-promise: 1.0.12 + dev: true + + /@graphql-typed-document-node/core@3.1.1(graphql@16.6.0): + resolution: {integrity: sha512-NQ17ii0rK1b34VZonlmT2QMJFI70m0TRwbknO/ihlbatXyaktDhN/98vBiUU6kNBPljqGqyIrl2T4nY2RpFANg==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + graphql: 16.6.0 + dev: true + + /@humanwhocodes/config-array@0.10.7: + resolution: {integrity: sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w==} + engines: {node: '>=10.10.0'} + dependencies: + '@humanwhocodes/object-schema': 1.2.1 + debug: 4.3.4 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@humanwhocodes/gitignore-to-minimatch@1.0.2: + resolution: {integrity: sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA==} + dev: true + + /@humanwhocodes/module-importer@1.0.1: + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + dev: true + + /@humanwhocodes/object-schema@1.2.1: + resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} + dev: true + + /@istanbuljs/load-nyc-config@1.1.0: + resolution: {integrity: sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==} + engines: {node: '>=8'} + dependencies: + camelcase: 5.3.1 + find-up: 4.1.0 + get-package-type: 0.1.0 + js-yaml: 3.14.1 + resolve-from: 5.0.0 + dev: true + + /@istanbuljs/schema@0.1.3: + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + dev: true + + /@jest/console@28.1.3: + resolution: {integrity: sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + chalk: 4.1.2 + jest-message-util: 28.1.3 + jest-util: 28.1.3 + slash: 3.0.0 + dev: true + + /@jest/core@28.1.3(ts-node@10.9.1): + resolution: {integrity: sha512-CIKBrlaKOzA7YG19BEqCw3SLIsEwjZkeJzf5bdooVnW4bH5cktqe3JX+G2YV1aK5vP8N9na1IGWFzYaTp6k6NA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/console': 28.1.3 + '@jest/reporters': 28.1.3 + '@jest/test-result': 28.1.3 + '@jest/transform': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 3.5.0 + exit: 0.1.2 + graceful-fs: 4.2.10 + jest-changed-files: 28.1.3 + jest-config: 28.1.3(@types/node@18.6.2)(ts-node@10.9.1) + jest-haste-map: 28.1.3 + jest-message-util: 28.1.3 + jest-regex-util: 28.0.2 + jest-resolve: 28.1.3 + jest-resolve-dependencies: 28.1.3 + jest-runner: 28.1.3 + jest-runtime: 28.1.3 + jest-snapshot: 28.1.3 + jest-util: 28.1.3 + jest-validate: 28.1.3 + jest-watcher: 28.1.3 + micromatch: 4.0.5 + pretty-format: 28.1.3 + rimraf: 3.0.2 + slash: 3.0.0 + strip-ansi: 6.0.1 + transitivePeerDependencies: + - supports-color + - ts-node + dev: true + + /@jest/environment@28.1.3: + resolution: {integrity: sha512-1bf40cMFTEkKyEf585R9Iz1WayDjHoHqvts0XFYEqyKM3cFWDpeMoqKKTAF9LSYQModPUlh8FKptoM2YcMWAXA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/fake-timers': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + jest-mock: 28.1.3 + dev: true + + /@jest/expect-utils@28.1.3: + resolution: {integrity: sha512-wvbi9LUrHJLn3NlDW6wF2hvIMtd4JUl2QNVrjq+IBSHirgfrR3o9RnVtxzdEGO2n9JyIWwHnLfby5KzqBGg2YA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + jest-get-type: 28.0.2 + dev: true + + /@jest/expect@28.1.3: + resolution: {integrity: sha512-lzc8CpUbSoE4dqT0U+g1qODQjBRHPpCPXissXD4mS9+sWQdmmpeJ9zSH1rS1HEkrsMN0fb7nKrJ9giAR1d3wBw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + expect: 28.1.3 + jest-snapshot: 28.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/fake-timers@28.1.3: + resolution: {integrity: sha512-D/wOkL2POHv52h+ok5Oj/1gOG9HSywdoPtFsRCUmlCILXNn5eIWmcnd3DIiWlJnpGvQtmajqBP95Ei0EimxfLw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + '@sinonjs/fake-timers': 9.1.2 + '@types/node': 18.6.2 + jest-message-util: 28.1.3 + jest-mock: 28.1.3 + jest-util: 28.1.3 + dev: true + + /@jest/globals@28.1.3: + resolution: {integrity: sha512-XFU4P4phyryCXu1pbcqMO0GSQcYe1IsalYCDzRNyhetyeyxMcIxa11qPNDpVNLeretItNqEmYYQn1UYz/5x1NA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/environment': 28.1.3 + '@jest/expect': 28.1.3 + '@jest/types': 28.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/reporters@28.1.3: + resolution: {integrity: sha512-JuAy7wkxQZVNU/V6g9xKzCGC5LVXx9FDcABKsSXp5MiKPEE2144a/vXTEDoyzjUpZKfVwp08Wqg5A4WfTMAzjg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 28.1.3 + '@jest/test-result': 28.1.3 + '@jest/transform': 28.1.3 + '@jest/types': 28.1.3 + '@jridgewell/trace-mapping': 0.3.17 + '@types/node': 18.6.2 + chalk: 4.1.2 + collect-v8-coverage: 1.0.1 + exit: 0.1.2 + glob: 7.2.3 + graceful-fs: 4.2.10 + istanbul-lib-coverage: 3.2.0 + istanbul-lib-instrument: 5.2.1 + istanbul-lib-report: 3.0.0 + istanbul-lib-source-maps: 4.0.1 + istanbul-reports: 3.1.5 + jest-message-util: 28.1.3 + jest-util: 28.1.3 + jest-worker: 28.1.3 + slash: 3.0.0 + string-length: 4.0.2 + strip-ansi: 6.0.1 + terminal-link: 2.1.1 + v8-to-istanbul: 9.0.1 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/schemas@28.1.3: + resolution: {integrity: sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@sinclair/typebox': 0.24.51 + dev: true + + /@jest/source-map@28.1.2: + resolution: {integrity: sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jridgewell/trace-mapping': 0.3.17 + callsites: 3.1.0 + graceful-fs: 4.2.10 + dev: true + + /@jest/test-result@28.1.3: + resolution: {integrity: sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/console': 28.1.3 + '@jest/types': 28.1.3 + '@types/istanbul-lib-coverage': 2.0.4 + collect-v8-coverage: 1.0.1 + dev: true + + /@jest/test-sequencer@28.1.3: + resolution: {integrity: sha512-NIMPEqqa59MWnDi1kvXXpYbqsfQmSJsIbnd85mdVGkiDfQ9WQQTXOLsvISUfonmnBT+w85WEgneCigEEdHDFxw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/test-result': 28.1.3 + graceful-fs: 4.2.10 + jest-haste-map: 28.1.3 + slash: 3.0.0 + dev: true + + /@jest/transform@28.1.3: + resolution: {integrity: sha512-u5dT5di+oFI6hfcLOHGTAfmUxFRrjK+vnaP0kkVow9Md/M7V/MxqQMOz/VV25UZO8pzeA9PjfTpOu6BDuwSPQA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@babel/core': 7.19.6 + '@jest/types': 28.1.3 + '@jridgewell/trace-mapping': 0.3.17 + babel-plugin-istanbul: 6.1.1 + chalk: 4.1.2 + convert-source-map: 1.9.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.10 + jest-haste-map: 28.1.3 + jest-regex-util: 28.0.2 + jest-util: 28.1.3 + micromatch: 4.0.5 + pirates: 4.0.5 + slash: 3.0.0 + write-file-atomic: 4.0.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@jest/types@28.1.3: + resolution: {integrity: sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/schemas': 28.1.3 + '@types/istanbul-lib-coverage': 2.0.4 + '@types/istanbul-reports': 3.0.1 + '@types/node': 18.6.2 + '@types/yargs': 17.0.13 + chalk: 4.1.2 + dev: true + + /@jridgewell/gen-mapping@0.1.1: + resolution: {integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@jridgewell/gen-mapping@0.3.2: + resolution: {integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.1.2 + '@jridgewell/sourcemap-codec': 1.4.14 + '@jridgewell/trace-mapping': 0.3.17 + dev: true + + /@jridgewell/resolve-uri@3.1.0: + resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/set-array@1.1.2: + resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} + engines: {node: '>=6.0.0'} + dev: true + + /@jridgewell/source-map@0.3.3: + resolution: {integrity: sha512-b+fsZXeLYi9fEULmfBrhxn4IrPlINf8fiNarzTof004v3lFdntdwa9PF7vFJqm3mg7s+ScJMxXaE3Acp1irZcg==} + dependencies: + '@jridgewell/gen-mapping': 0.3.2 + '@jridgewell/trace-mapping': 0.3.17 + dev: true + + /@jridgewell/sourcemap-codec@1.4.14: + resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} + dev: true + + /@jridgewell/trace-mapping@0.3.17: + resolution: {integrity: sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@jridgewell/trace-mapping@0.3.9: + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + + /@jsdevtools/ono@7.1.3: + resolution: {integrity: sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg==} + dev: true + + /@noble/hashes@1.1.3: + resolution: {integrity: sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A==} + dev: false + + /@nodelib/fs.scandir@2.1.5: + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + dev: true + + /@nodelib/fs.stat@2.0.5: + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + dev: true + + /@nodelib/fs.walk@1.2.8: + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.13.0 + dev: true + + /@peculiar/asn1-schema@2.3.3: + resolution: {integrity: sha512-6GptMYDMyWBHTUKndHaDsRZUO/XMSgIns2krxcm2L7SEExRHwawFvSwNBhqNPR9HJwv3MruAiF1bhN0we6j6GQ==} + dependencies: + asn1js: 3.0.5 + pvtsutils: 1.3.2 + tslib: 2.5.0 + dev: true + + /@peculiar/json-schema@1.1.12: + resolution: {integrity: sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w==} + engines: {node: '>=8.0.0'} + dependencies: + tslib: 2.5.0 + dev: true + + /@peculiar/webcrypto@1.4.1: + resolution: {integrity: sha512-eK4C6WTNYxoI7JOabMoZICiyqRRtJB220bh0Mbj5RwRycleZf9BPyZoxsTvpP0FpmVS2aS13NKOuh5/tN3sIRw==} + engines: {node: '>=10.12.0'} + dependencies: + '@peculiar/asn1-schema': 2.3.3 + '@peculiar/json-schema': 1.1.12 + pvtsutils: 1.3.2 + tslib: 2.5.0 + webcrypto-core: 1.7.5 + dev: true + + /@repeaterjs/repeater@3.0.4: + resolution: {integrity: sha512-AW8PKd6iX3vAZ0vA43nOUOnbq/X5ihgU+mSXXqunMkeQADGiqw/PY0JNeYtD5sr0PAy51YPgAPbDoeapv9r8WA==} + dev: true + + /@scure/base@1.1.1: + resolution: {integrity: sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA==} + dev: false + + /@scure/bip39@1.1.0: + resolution: {integrity: sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w==} + dependencies: + '@noble/hashes': 1.1.3 + '@scure/base': 1.1.1 + dev: false + + /@sinclair/typebox@0.24.51: + resolution: {integrity: sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA==} + dev: true + + /@sinonjs/commons@1.8.4: + resolution: {integrity: sha512-RpmQdHVo8hCEHDVpO39zToS9jOhR6nw+/lQAzRNq9ErrGV9IeHM71XCn68svVl/euFeVW6BWX4p35gkhbOcSIQ==} + dependencies: + type-detect: 4.0.8 + dev: true + + /@sinonjs/fake-timers@9.1.2: + resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==} + dependencies: + '@sinonjs/commons': 1.8.4 + dev: true + + /@tootallnate/once@2.0.0: + resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==} + engines: {node: '>= 10'} + dev: true + + /@tsconfig/node10@1.0.9: + resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} + dev: true + + /@tsconfig/node12@1.0.11: + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + dev: true + + /@tsconfig/node14@1.0.3: + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + dev: true + + /@tsconfig/node16@1.0.3: + resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} + dev: true + + /@types/babel__core@7.1.19: + resolution: {integrity: sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw==} + dependencies: + '@babel/parser': 7.20.1 + '@babel/types': 7.20.0 + '@types/babel__generator': 7.6.4 + '@types/babel__template': 7.4.1 + '@types/babel__traverse': 7.18.2 + dev: true + + /@types/babel__generator@7.6.4: + resolution: {integrity: sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@types/babel__template@7.4.1: + resolution: {integrity: sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==} + dependencies: + '@babel/parser': 7.20.1 + '@babel/types': 7.20.0 + dev: true + + /@types/babel__traverse@7.18.2: + resolution: {integrity: sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg==} + dependencies: + '@babel/types': 7.20.0 + dev: true + + /@types/eslint-scope@3.7.4: + resolution: {integrity: sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA==} + dependencies: + '@types/eslint': 8.37.0 + '@types/estree': 1.0.1 + dev: true + + /@types/eslint@8.37.0: + resolution: {integrity: sha512-Piet7dG2JBuDIfohBngQ3rCt7MgO9xCO4xIMKxBThCq5PNRB91IjlJ10eJVwfoNtvTErmxLzwBZ7rHZtbOMmFQ==} + dependencies: + '@types/estree': 1.0.1 + '@types/json-schema': 7.0.11 + dev: true + + /@types/estree@1.0.1: + resolution: {integrity: sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==} + dev: true + + /@types/graceful-fs@4.1.5: + resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} + dependencies: + '@types/node': 18.6.2 + dev: true + + /@types/istanbul-lib-coverage@2.0.4: + resolution: {integrity: sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==} + dev: true + + /@types/istanbul-lib-report@3.0.0: + resolution: {integrity: sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==} + dependencies: + '@types/istanbul-lib-coverage': 2.0.4 + dev: true + + /@types/istanbul-reports@3.0.1: + resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} + dependencies: + '@types/istanbul-lib-report': 3.0.0 + dev: true + + /@types/jest@28.1.8: + resolution: {integrity: sha512-8TJkV++s7B6XqnDrzR1m/TT0A0h948Pnl/097veySPN67VRAgQ4gZ7n2KfJo2rVq6njQjdxU3GCCyDvAeuHoiw==} + dependencies: + expect: 28.1.3 + pretty-format: 28.1.3 + dev: true + + /@types/js-yaml@4.0.5: + resolution: {integrity: sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==} + dev: true + + /@types/json-schema@7.0.11: + resolution: {integrity: sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==} + dev: true + + /@types/json-stable-stringify@1.0.34: + resolution: {integrity: sha512-s2cfwagOQAS8o06TcwKfr9Wx11dNGbH2E9vJz1cqV+a/LOyhWNLUNd6JSRYNzvB4d29UuJX2M0Dj9vE1T8fRXw==} + dev: true + + /@types/json5@0.0.29: + resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} + dev: true + + /@types/jsonwebtoken@9.0.1: + resolution: {integrity: sha512-c5ltxazpWabia/4UzhIoaDcIza4KViOQhdbjRlfcIGVnsE3c3brkz9Z+F/EeJIECOQP7W7US2hNE930cWWkPiw==} + dependencies: + '@types/node': 18.6.2 + dev: true + + /@types/node@18.6.2: + resolution: {integrity: sha512-KcfkBq9H4PI6Vpu5B/KoPeuVDAbmi+2mDBqGPGUgoL7yXQtcWGu2vJWmmRkneWK3Rh0nIAX192Aa87AqKHYChQ==} + dev: true + + /@types/parse-json@4.0.0: + resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} + dev: true + + /@types/prettier@2.7.1: + resolution: {integrity: sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==} + dev: true + + /@types/stack-utils@2.0.1: + resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} + dev: true + + /@types/ws@8.5.4: + resolution: {integrity: sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==} + dependencies: + '@types/node': 18.6.2 + dev: true + + /@types/yargs-parser@21.0.0: + resolution: {integrity: sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==} + dev: true + + /@types/yargs@17.0.13: + resolution: {integrity: sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg==} + dependencies: + '@types/yargs-parser': 21.0.0 + dev: true + + /@typescript-eslint/eslint-plugin@5.36.2(@typescript-eslint/parser@5.36.2)(eslint@8.23.0)(typescript@4.8.2): + resolution: {integrity: sha512-OwwR8LRwSnI98tdc2z7mJYgY60gf7I9ZfGjN5EjCwwns9bdTuQfAXcsjSB2wSQ/TVNYSGKf4kzVXbNGaZvwiXw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + '@typescript-eslint/parser': ^5.0.0 + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/parser': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + '@typescript-eslint/scope-manager': 5.36.2 + '@typescript-eslint/type-utils': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + '@typescript-eslint/utils': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + debug: 4.3.4 + eslint: 8.23.0 + functional-red-black-tree: 1.0.1 + ignore: 5.2.0 + regexpp: 3.2.0 + semver: 7.3.8 + tsutils: 3.21.0(typescript@4.8.2) + typescript: 4.8.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/parser@5.36.2(eslint@8.23.0)(typescript@4.8.2): + resolution: {integrity: sha512-qS/Kb0yzy8sR0idFspI9Z6+t7mqk/oRjnAYfewG+VN73opAUvmYL3oPIMmgOX6CnQS6gmVIXGshlb5RY/R22pA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/scope-manager': 5.36.2 + '@typescript-eslint/types': 5.36.2 + '@typescript-eslint/typescript-estree': 5.36.2(typescript@4.8.2) + debug: 4.3.4 + eslint: 8.23.0 + typescript: 4.8.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/scope-manager@5.36.2: + resolution: {integrity: sha512-cNNP51L8SkIFSfce8B1NSUBTJTu2Ts4nWeWbFrdaqjmn9yKrAaJUBHkyTZc0cL06OFHpb+JZq5AUHROS398Orw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.36.2 + '@typescript-eslint/visitor-keys': 5.36.2 + dev: true + + /@typescript-eslint/type-utils@5.36.2(eslint@8.23.0)(typescript@4.8.2): + resolution: {integrity: sha512-rPQtS5rfijUWLouhy6UmyNquKDPhQjKsaKH0WnY6hl/07lasj8gPaH2UD8xWkePn6SC+jW2i9c2DZVDnL+Dokw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: '*' + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/typescript-estree': 5.36.2(typescript@4.8.2) + '@typescript-eslint/utils': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + debug: 4.3.4 + eslint: 8.23.0 + tsutils: 3.21.0(typescript@4.8.2) + typescript: 4.8.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/types@5.36.2: + resolution: {integrity: sha512-9OJSvvwuF1L5eS2EQgFUbECb99F0mwq501w0H0EkYULkhFa19Qq7WFbycdw1PexAc929asupbZcgjVIe6OK/XQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + + /@typescript-eslint/typescript-estree@5.36.2(typescript@4.8.2): + resolution: {integrity: sha512-8fyH+RfbKc0mTspfuEjlfqA4YywcwQK2Amcf6TDOwaRLg7Vwdu4bZzyvBZp4bjt1RRjQ5MDnOZahxMrt2l5v9w==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/types': 5.36.2 + '@typescript-eslint/visitor-keys': 5.36.2 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.3.8 + tsutils: 3.21.0(typescript@4.8.2) + typescript: 4.8.2 + transitivePeerDependencies: + - supports-color + dev: true + + /@typescript-eslint/utils@5.36.2(eslint@8.23.0)(typescript@4.8.2): + resolution: {integrity: sha512-uNcopWonEITX96v9pefk9DC1bWMdkweeSsewJ6GeC7L6j2t0SJywisgkr9wUTtXk90fi2Eljj90HSHm3OGdGRg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + '@types/json-schema': 7.0.11 + '@typescript-eslint/scope-manager': 5.36.2 + '@typescript-eslint/types': 5.36.2 + '@typescript-eslint/typescript-estree': 5.36.2(typescript@4.8.2) + eslint: 8.23.0 + eslint-scope: 5.1.1 + eslint-utils: 3.0.0(eslint@8.23.0) + transitivePeerDependencies: + - supports-color + - typescript + dev: true + + /@typescript-eslint/visitor-keys@5.36.2: + resolution: {integrity: sha512-BtRvSR6dEdrNt7Net2/XDjbYKU5Ml6GqJgVfXT0CxTCJlnIqK7rAGreuWKMT2t8cFUT2Msv5oxw0GMRD7T5J7A==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + '@typescript-eslint/types': 5.36.2 + eslint-visitor-keys: 3.3.0 + dev: true + + /@webassemblyjs/ast@1.11.5: + resolution: {integrity: sha512-LHY/GSAZZRpsNQH+/oHqhRQ5FT7eoULcBqgfyTB5nQHogFnK3/7QoN7dLnwSE/JkUAF0SrRuclT7ODqMFtWxxQ==} + dependencies: + '@webassemblyjs/helper-numbers': 1.11.5 + '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + dev: true + + /@webassemblyjs/floating-point-hex-parser@1.11.5: + resolution: {integrity: sha512-1j1zTIC5EZOtCplMBG/IEwLtUojtwFVwdyVMbL/hwWqbzlQoJsWCOavrdnLkemwNoC/EOwtUFch3fuo+cbcXYQ==} + dev: true + + /@webassemblyjs/helper-api-error@1.11.5: + resolution: {integrity: sha512-L65bDPmfpY0+yFrsgz8b6LhXmbbs38OnwDCf6NpnMUYqa+ENfE5Dq9E42ny0qz/PdR0LJyq/T5YijPnU8AXEpA==} + dev: true + + /@webassemblyjs/helper-buffer@1.11.5: + resolution: {integrity: sha512-fDKo1gstwFFSfacIeH5KfwzjykIE6ldh1iH9Y/8YkAZrhmu4TctqYjSh7t0K2VyDSXOZJ1MLhht/k9IvYGcIxg==} + dev: true + + /@webassemblyjs/helper-numbers@1.11.5: + resolution: {integrity: sha512-DhykHXM0ZABqfIGYNv93A5KKDw/+ywBFnuWybZZWcuzWHfbp21wUfRkbtz7dMGwGgT4iXjWuhRMA2Mzod6W4WA==} + dependencies: + '@webassemblyjs/floating-point-hex-parser': 1.11.5 + '@webassemblyjs/helper-api-error': 1.11.5 + '@xtuc/long': 4.2.2 + dev: true + + /@webassemblyjs/helper-wasm-bytecode@1.11.5: + resolution: {integrity: sha512-oC4Qa0bNcqnjAowFn7MPCETQgDYytpsfvz4ujZz63Zu/a/v71HeCAAmZsgZ3YVKec3zSPYytG3/PrRCqbtcAvA==} + dev: true + + /@webassemblyjs/helper-wasm-section@1.11.5: + resolution: {integrity: sha512-uEoThA1LN2NA+K3B9wDo3yKlBfVtC6rh0i4/6hvbz071E8gTNZD/pT0MsBf7MeD6KbApMSkaAK0XeKyOZC7CIA==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/helper-buffer': 1.11.5 + '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + '@webassemblyjs/wasm-gen': 1.11.5 + dev: true + + /@webassemblyjs/ieee754@1.11.5: + resolution: {integrity: sha512-37aGq6qVL8A8oPbPrSGMBcp38YZFXcHfiROflJn9jxSdSMMM5dS5P/9e2/TpaJuhE+wFrbukN2WI6Hw9MH5acg==} + dependencies: + '@xtuc/ieee754': 1.2.0 + dev: true + + /@webassemblyjs/leb128@1.11.5: + resolution: {integrity: sha512-ajqrRSXaTJoPW+xmkfYN6l8VIeNnR4vBOTQO9HzR7IygoCcKWkICbKFbVTNMjMgMREqXEr0+2M6zukzM47ZUfQ==} + dependencies: + '@xtuc/long': 4.2.2 + dev: true + + /@webassemblyjs/utf8@1.11.5: + resolution: {integrity: sha512-WiOhulHKTZU5UPlRl53gHR8OxdGsSOxqfpqWeA2FmcwBMaoEdz6b2x2si3IwC9/fSPLfe8pBMRTHVMk5nlwnFQ==} + dev: true + + /@webassemblyjs/wasm-edit@1.11.5: + resolution: {integrity: sha512-C0p9D2fAu3Twwqvygvf42iGCQ4av8MFBLiTb+08SZ4cEdwzWx9QeAHDo1E2k+9s/0w1DM40oflJOpkZ8jW4HCQ==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/helper-buffer': 1.11.5 + '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + '@webassemblyjs/helper-wasm-section': 1.11.5 + '@webassemblyjs/wasm-gen': 1.11.5 + '@webassemblyjs/wasm-opt': 1.11.5 + '@webassemblyjs/wasm-parser': 1.11.5 + '@webassemblyjs/wast-printer': 1.11.5 + dev: true + + /@webassemblyjs/wasm-gen@1.11.5: + resolution: {integrity: sha512-14vteRlRjxLK9eSyYFvw1K8Vv+iPdZU0Aebk3j6oB8TQiQYuO6hj9s4d7qf6f2HJr2khzvNldAFG13CgdkAIfA==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + '@webassemblyjs/ieee754': 1.11.5 + '@webassemblyjs/leb128': 1.11.5 + '@webassemblyjs/utf8': 1.11.5 + dev: true + + /@webassemblyjs/wasm-opt@1.11.5: + resolution: {integrity: sha512-tcKwlIXstBQgbKy1MlbDMlXaxpucn42eb17H29rawYLxm5+MsEmgPzeCP8B1Cl69hCice8LeKgZpRUAPtqYPgw==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/helper-buffer': 1.11.5 + '@webassemblyjs/wasm-gen': 1.11.5 + '@webassemblyjs/wasm-parser': 1.11.5 + dev: true + + /@webassemblyjs/wasm-parser@1.11.5: + resolution: {integrity: sha512-SVXUIwsLQlc8srSD7jejsfTU83g7pIGr2YYNb9oHdtldSxaOhvA5xwvIiWIfcX8PlSakgqMXsLpLfbbJ4cBYew==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/helper-api-error': 1.11.5 + '@webassemblyjs/helper-wasm-bytecode': 1.11.5 + '@webassemblyjs/ieee754': 1.11.5 + '@webassemblyjs/leb128': 1.11.5 + '@webassemblyjs/utf8': 1.11.5 + dev: true + + /@webassemblyjs/wast-printer@1.11.5: + resolution: {integrity: sha512-f7Pq3wvg3GSPUPzR0F6bmI89Hdb+u9WXrSKc4v+N0aV0q6r42WoF92Jp2jEorBEBRoRNXgjp53nBniDXcqZYPA==} + dependencies: + '@webassemblyjs/ast': 1.11.5 + '@xtuc/long': 4.2.2 + dev: true + + /@whatwg-node/events@0.0.2: + resolution: {integrity: sha512-WKj/lI4QjnLuPrim0cfO7i+HsDSXHxNv1y0CrJhdntuO3hxWZmnXCwNDnwOvry11OjRin6cgWNF+j/9Pn8TN4w==} + dev: true + + /@whatwg-node/fetch@0.6.5(@types/node@18.6.2): + resolution: {integrity: sha512-3XQ78RAMX8Az0LlUqMoGM3jbT+FE0S+IKr4yiTiqzQ5S/pNxD52K/kFLcLQiEbL+3rkk/glCHqjxF1QI5155Ig==} + dependencies: + '@peculiar/webcrypto': 1.4.1 + '@whatwg-node/node-fetch': 0.0.1(@types/node@18.6.2) + busboy: 1.6.0 + urlpattern-polyfill: 6.0.2 + web-streams-polyfill: 3.2.1 + transitivePeerDependencies: + - '@types/node' + dev: true + + /@whatwg-node/fetch@0.6.6(@types/node@18.6.2): + resolution: {integrity: sha512-8kB/Pp0knQVjbm3O15h1ATKOZ7n8GXMow3z8ptVTaRmiOMnCnA9bn7gKTLWFBdD84zzWFzPp6C9pB3vsndJKlQ==} + dependencies: + '@peculiar/webcrypto': 1.4.1 + '@whatwg-node/node-fetch': 0.0.2(@types/node@18.6.2) + busboy: 1.6.0 + urlpattern-polyfill: 6.0.2 + web-streams-polyfill: 3.2.1 + transitivePeerDependencies: + - '@types/node' + dev: true + + /@whatwg-node/node-fetch@0.0.1(@types/node@18.6.2): + resolution: {integrity: sha512-dMbh604yf2jl37IzvYGA6z3heQg3dMzlqoNsiNToe46SVmKusfJXGf4KYIuiJTzh9mEEu/uVF//QakUfsLJpwA==} + peerDependencies: + '@types/node': ^18.0.6 + dependencies: + '@types/node': 18.6.2 + '@whatwg-node/events': 0.0.2 + busboy: 1.6.0 + tslib: 2.5.0 + dev: true + + /@whatwg-node/node-fetch@0.0.2(@types/node@18.6.2): + resolution: {integrity: sha512-Xs3kunumaSWTHDjKJATP9r2AhwhwPh8miQQHi3aI64MwBSrFsolBUUyCkOJe2geDuHggoNycfnU85HP528odWg==} + peerDependencies: + '@types/node': ^18.0.6 + dependencies: + '@types/node': 18.6.2 + '@whatwg-node/events': 0.0.2 + busboy: 1.6.0 + tslib: 2.5.0 + dev: true + + /@xtuc/ieee754@1.2.0: + resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==} + dev: true + + /@xtuc/long@4.2.2: + resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==} + dev: true + + /acorn-import-assertions@1.8.0(acorn@8.8.1): + resolution: {integrity: sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==} + peerDependencies: + acorn: ^8 + dependencies: + acorn: 8.8.1 + dev: true + + /acorn-jsx@5.3.2(acorn@8.8.1): + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 8.8.1 + dev: true + + /acorn-walk@8.2.0: + resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} + engines: {node: '>=0.4.0'} + dev: true + + /acorn@8.8.1: + resolution: {integrity: sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + + /agent-base@6.0.2: + resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} + engines: {node: '>= 6.0.0'} + dependencies: + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + dev: true + + /ajv-keywords@3.5.2(ajv@6.12.6): + resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} + peerDependencies: + ajv: ^6.9.1 + dependencies: + ajv: 6.12.6 + dev: true + + /ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + dev: true + + /ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + dependencies: + type-fest: 0.21.3 + dev: true + + /ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + dev: true + + /ansi-styles@3.2.1: + resolution: {integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==} + engines: {node: '>=4'} + dependencies: + color-convert: 1.9.3 + dev: true + + /ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + dependencies: + color-convert: 2.0.1 + dev: true + + /ansi-styles@5.2.0: + resolution: {integrity: sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==} + engines: {node: '>=10'} + dev: true + + /any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + dev: true + + /anymatch@3.1.2: + resolution: {integrity: sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==} + engines: {node: '>= 8'} + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + dev: true + + /arg@4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + dev: true + + /argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + dependencies: + sprintf-js: 1.0.3 + dev: true + + /argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + dev: true + + /array-includes@3.1.5: + resolution: {integrity: sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + get-intrinsic: 1.1.3 + is-string: 1.0.7 + dev: true + + /array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + dev: true + + /array.prototype.flat@1.3.1: + resolution: {integrity: sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + es-shim-unscopables: 1.0.0 + dev: true + + /asap@2.0.6: + resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} + dev: true + + /asn1js@3.0.5: + resolution: {integrity: sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==} + engines: {node: '>=12.0.0'} + dependencies: + pvtsutils: 1.3.2 + pvutils: 1.1.3 + tslib: 2.5.0 + dev: true + + /astral-regex@2.0.0: + resolution: {integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==} + engines: {node: '>=8'} + dev: true + + /asynckit@0.4.0: + resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} + + /auto-bind@4.0.0: + resolution: {integrity: sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ==} + engines: {node: '>=8'} + dev: true + + /axios@0.27.2: + resolution: {integrity: sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==} + dependencies: + follow-redirects: 1.15.2 + form-data: 4.0.0 + transitivePeerDependencies: + - debug + dev: false + + /babel-jest@28.1.3(@babel/core@7.19.6): + resolution: {integrity: sha512-epUaPOEWMk3cWX0M/sPvCHHCe9fMFAa/9hXEgKP8nFfNl/jlGkE9ucq9NqkZGXLDduCJYS0UvSlPUwC0S+rH6Q==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + '@babel/core': ^7.8.0 + dependencies: + '@babel/core': 7.19.6 + '@jest/transform': 28.1.3 + '@types/babel__core': 7.1.19 + babel-plugin-istanbul: 6.1.1 + babel-preset-jest: 28.1.3(@babel/core@7.19.6) + chalk: 4.1.2 + graceful-fs: 4.2.10 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-istanbul@6.1.1: + resolution: {integrity: sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==} + engines: {node: '>=8'} + dependencies: + '@babel/helper-plugin-utils': 7.19.0 + '@istanbuljs/load-nyc-config': 1.1.0 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-instrument: 5.2.1 + test-exclude: 6.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-plugin-jest-hoist@28.1.3: + resolution: {integrity: sha512-Ys3tUKAmfnkRUpPdpa98eYrAR0nV+sSFUZZEGuQ2EbFd1y4SOLtD5QDNHAq+bb9a+bbXvYQC4b+ID/THIMcU6Q==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@babel/template': 7.18.10 + '@babel/types': 7.20.0 + '@types/babel__core': 7.1.19 + '@types/babel__traverse': 7.18.2 + dev: true + + /babel-plugin-syntax-trailing-function-commas@7.0.0-beta.0: + resolution: {integrity: sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ==} + dev: true + + /babel-preset-current-node-syntax@1.0.1(@babel/core@7.19.6): + resolution: {integrity: sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.19.6 + '@babel/plugin-syntax-async-generators': 7.8.4(@babel/core@7.19.6) + '@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.19.6) + '@babel/plugin-syntax-import-meta': 7.10.4(@babel/core@7.19.6) + '@babel/plugin-syntax-json-strings': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-logical-assignment-operators': 7.10.4(@babel/core@7.19.6) + '@babel/plugin-syntax-nullish-coalescing-operator': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-numeric-separator': 7.10.4(@babel/core@7.19.6) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-optional-catch-binding': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-optional-chaining': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.19.6) + dev: true + + /babel-preset-fbjs@3.4.0(@babel/core@7.19.6): + resolution: {integrity: sha512-9ywCsCvo1ojrw0b+XYk7aFvTH6D9064t0RIL1rtMf3nsa02Xw41MS7sZw216Im35xj/UY0PDBQsa1brUDDF1Ow==} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.19.6 + '@babel/plugin-proposal-class-properties': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-proposal-object-rest-spread': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-syntax-class-properties': 7.12.13(@babel/core@7.19.6) + '@babel/plugin-syntax-flow': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.19.6) + '@babel/plugin-transform-arrow-functions': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-block-scoped-functions': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-block-scoping': 7.20.15(@babel/core@7.19.6) + '@babel/plugin-transform-classes': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-computed-properties': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-destructuring': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-flow-strip-types': 7.19.0(@babel/core@7.19.6) + '@babel/plugin-transform-for-of': 7.18.8(@babel/core@7.19.6) + '@babel/plugin-transform-function-name': 7.18.9(@babel/core@7.19.6) + '@babel/plugin-transform-literals': 7.18.9(@babel/core@7.19.6) + '@babel/plugin-transform-member-expression-literals': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-modules-commonjs': 7.20.11(@babel/core@7.19.6) + '@babel/plugin-transform-object-super': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-parameters': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-property-literals': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-react-display-name': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-react-jsx': 7.20.13(@babel/core@7.19.6) + '@babel/plugin-transform-shorthand-properties': 7.18.6(@babel/core@7.19.6) + '@babel/plugin-transform-spread': 7.20.7(@babel/core@7.19.6) + '@babel/plugin-transform-template-literals': 7.18.9(@babel/core@7.19.6) + babel-plugin-syntax-trailing-function-commas: 7.0.0-beta.0 + transitivePeerDependencies: + - supports-color + dev: true + + /babel-preset-jest@28.1.3(@babel/core@7.19.6): + resolution: {integrity: sha512-L+fupJvlWAHbQfn74coNX3zf60LXMJsezNvvx8eIh7iOR1luJ1poxYgQk1F8PYtNq/6QODDHCqsSnTFSWC491A==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.19.6 + babel-plugin-jest-hoist: 28.1.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.6) + dev: true + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: true + + /base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + dev: true + + /binary-extensions@2.2.0: + resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} + engines: {node: '>=8'} + dev: true + + /bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.0 + dev: true + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + dependencies: + balanced-match: 1.0.2 + dev: true + + /braces@3.0.2: + resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + engines: {node: '>=8'} + dependencies: + fill-range: 7.0.1 + dev: true + + /browserslist@4.21.4: + resolution: {integrity: sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001429 + electron-to-chromium: 1.4.284 + node-releases: 2.0.6 + update-browserslist-db: 1.0.10(browserslist@4.21.4) + dev: true + + /bs-logger@0.2.6: + resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} + engines: {node: '>= 6'} + dependencies: + fast-json-stable-stringify: 2.1.0 + dev: true + + /bser@2.1.1: + resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + dependencies: + node-int64: 0.4.0 + dev: true + + /buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + dev: true + + /buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true + + /buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: true + + /bundle-require@3.1.2(esbuild@0.15.13): + resolution: {integrity: sha512-Of6l6JBAxiyQ5axFxUM6dYeP/W7X2Sozeo/4EYB9sJhL+dqL7TKjg+shwxp6jlu/6ZSERfsYtIpSJ1/x3XkAEA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.13' + dependencies: + esbuild: 0.15.13 + load-tsconfig: 0.2.3 + dev: true + + /busboy@1.6.0: + resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} + engines: {node: '>=10.16.0'} + dependencies: + streamsearch: 1.1.0 + dev: true + + /cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + dev: true + + /call-bind@1.0.2: + resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.1.3 + dev: true + + /call-me-maybe@1.0.2: + resolution: {integrity: sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ==} + dev: true + + /callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + dev: true + + /camel-case@4.1.2: + resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} + dependencies: + pascal-case: 3.1.2 + tslib: 2.5.0 + dev: true + + /camelcase@5.3.1: + resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} + engines: {node: '>=6'} + dev: true + + /camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + dev: true + + /caniuse-lite@1.0.30001429: + resolution: {integrity: sha512-511ThLu1hF+5RRRt0zYCf2U2yRr9GPF6m5y90SBCWsvSoYoW7yAGlv/elyPaNfvGCkp6kj/KFZWU0BMA69Prsg==} + dev: true + + /capital-case@1.0.4: + resolution: {integrity: sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==} + dependencies: + no-case: 3.0.4 + tslib: 2.5.0 + upper-case-first: 2.0.2 + dev: true + + /chalk@2.4.2: + resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} + engines: {node: '>=4'} + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + dev: true + + /change-case-all@1.0.14: + resolution: {integrity: sha512-CWVm2uT7dmSHdO/z1CXT/n47mWonyypzBbuCy5tN7uMg22BsfkhwT6oHmFCAk+gL1LOOxhdbB9SZz3J1KTY3gA==} + dependencies: + change-case: 4.1.2 + is-lower-case: 2.0.2 + is-upper-case: 2.0.2 + lower-case: 2.0.2 + lower-case-first: 2.0.2 + sponge-case: 1.0.1 + swap-case: 2.0.2 + title-case: 3.0.3 + upper-case: 2.0.2 + upper-case-first: 2.0.2 + dev: true + + /change-case-all@1.0.15: + resolution: {integrity: sha512-3+GIFhk3sNuvFAJKU46o26OdzudQlPNBCu1ZQi3cMeMHhty1bhDxu2WrEilVNYaGvqUtR1VSigFcJOiS13dRhQ==} + dependencies: + change-case: 4.1.2 + is-lower-case: 2.0.2 + is-upper-case: 2.0.2 + lower-case: 2.0.2 + lower-case-first: 2.0.2 + sponge-case: 1.0.1 + swap-case: 2.0.2 + title-case: 3.0.3 + upper-case: 2.0.2 + upper-case-first: 2.0.2 + dev: true + + /change-case@4.1.2: + resolution: {integrity: sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==} + dependencies: + camel-case: 4.1.2 + capital-case: 1.0.4 + constant-case: 3.0.4 + dot-case: 3.0.4 + header-case: 2.0.4 + no-case: 3.0.4 + param-case: 3.0.4 + pascal-case: 3.1.2 + path-case: 3.0.4 + sentence-case: 3.0.4 + snake-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + dev: true + + /chardet@0.7.0: + resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==} + dev: true + + /chokidar@3.5.3: + resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} + engines: {node: '>= 8.10.0'} + dependencies: + anymatch: 3.1.2 + braces: 3.0.2 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /chrome-trace-event@1.0.3: + resolution: {integrity: sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==} + engines: {node: '>=6.0'} + dev: true + + /ci-info@3.5.0: + resolution: {integrity: sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw==} + dev: true + + /cjs-module-lexer@1.2.2: + resolution: {integrity: sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA==} + dev: true + + /clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + dev: true + + /cli-cursor@3.1.0: + resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} + engines: {node: '>=8'} + dependencies: + restore-cursor: 3.1.0 + dev: true + + /cli-spinners@2.7.0: + resolution: {integrity: sha512-qu3pN8Y3qHNgE2AFweciB1IfMnmZ/fsNTEE+NOFjmGB2F/7rLhnhzppvpCnN4FovtP26k8lHyy9ptEbNwWFLzw==} + engines: {node: '>=6'} + dev: true + + /cli-truncate@2.1.0: + resolution: {integrity: sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg==} + engines: {node: '>=8'} + dependencies: + slice-ansi: 3.0.0 + string-width: 4.2.3 + dev: true + + /cli-width@3.0.0: + resolution: {integrity: sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==} + engines: {node: '>= 10'} + dev: true + + /cliui@6.0.0: + resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 6.2.0 + dev: true + + /cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + + /clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + dev: true + + /co@4.6.0: + resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} + engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} + dev: true + + /collect-v8-coverage@1.0.1: + resolution: {integrity: sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg==} + dev: true + + /color-convert@1.9.3: + resolution: {integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==} + dependencies: + color-name: 1.1.3 + dev: true + + /color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + dependencies: + color-name: 1.1.4 + dev: true + + /color-name@1.1.3: + resolution: {integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==} + dev: true + + /color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + dev: true + + /colorette@2.0.19: + resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} + dev: true + + /combined-stream@1.0.8: + resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} + engines: {node: '>= 0.8'} + dependencies: + delayed-stream: 1.0.0 + + /commander@2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} + dev: true + + /commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + dev: true + + /commander@9.4.1: + resolution: {integrity: sha512-5EEkTNyHNGFPD2H+c/dXXfQZYa/scCKasxWcXJaWnNJ99pnQN9Vnmqow+p+PlFPE63Q6mThaZws1T+HxfpgtPw==} + engines: {node: ^12.20.0 || >=14} + dev: true + + /common-tags@1.8.2: + resolution: {integrity: sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA==} + engines: {node: '>=4.0.0'} + dev: true + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + dev: true + + /confusing-browser-globals@1.0.11: + resolution: {integrity: sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA==} + dev: true + + /constant-case@3.0.4: + resolution: {integrity: sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==} + dependencies: + no-case: 3.0.4 + tslib: 2.5.0 + upper-case: 2.0.2 + dev: true + + /convert-source-map@1.9.0: + resolution: {integrity: sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==} + dev: true + + /cosmiconfig-typescript-loader@4.3.0(@types/node@18.6.2)(cosmiconfig@7.1.0)(ts-node@10.9.1)(typescript@4.8.2): + resolution: {integrity: sha512-NTxV1MFfZDLPiBMjxbHRwSh5LaLcPMwNdCutmnHJCKoVnlvldPWlllonKwrsRJ5pYZBIBGRWWU2tfvzxgeSW5Q==} + engines: {node: '>=12', npm: '>=6'} + peerDependencies: + '@types/node': '*' + cosmiconfig: '>=7' + ts-node: '>=10' + typescript: '>=3' + dependencies: + '@types/node': 18.6.2 + cosmiconfig: 7.1.0 + ts-node: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + typescript: 4.8.2 + dev: true + + /cosmiconfig@7.1.0: + resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} + engines: {node: '>=10'} + dependencies: + '@types/parse-json': 4.0.0 + import-fresh: 3.3.0 + parse-json: 5.2.0 + path-type: 4.0.0 + yaml: 1.10.2 + dev: true + + /cosmiconfig@8.0.0: + resolution: {integrity: sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ==} + engines: {node: '>=14'} + dependencies: + import-fresh: 3.3.0 + js-yaml: 4.1.0 + parse-json: 5.2.0 + path-type: 4.0.0 + dev: true + + /create-require@1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + dev: true + + /cross-fetch@3.1.5: + resolution: {integrity: sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw==} + dependencies: + node-fetch: 2.6.7 + transitivePeerDependencies: + - encoding + dev: true + + /cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /dataloader@2.1.0: + resolution: {integrity: sha512-qTcEYLen3r7ojZNgVUaRggOI+KM7jrKxXeSHhogh/TWxYMeONEMqY+hmkobiYQozsGIyg9OYVzO4ZIfoB4I0pQ==} + dev: true + + /debounce@1.2.1: + resolution: {integrity: sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==} + dev: true + + /debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: true + + /debug@3.2.7: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + dev: true + + /debug@4.3.4: + resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + dev: true + + /decamelize@1.2.0: + resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} + engines: {node: '>=0.10.0'} + dev: true + + /dedent@0.7.0: + resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} + dev: true + + /deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + dev: true + + /deepmerge@4.2.2: + resolution: {integrity: sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==} + engines: {node: '>=0.10.0'} + dev: true + + /defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + dependencies: + clone: 1.0.4 + dev: true + + /define-properties@1.1.4: + resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} + engines: {node: '>= 0.4'} + dependencies: + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 + dev: true + + /delayed-stream@1.0.0: + resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} + engines: {node: '>=0.4.0'} + + /dependency-graph@0.11.0: + resolution: {integrity: sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg==} + engines: {node: '>= 0.6.0'} + dev: true + + /detect-indent@6.1.0: + resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} + engines: {node: '>=8'} + dev: true + + /detect-newline@3.1.0: + resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==} + engines: {node: '>=8'} + dev: true + + /diff-sequences@28.1.1: + resolution: {integrity: sha512-FU0iFaH/E23a+a718l8Qa/19bF9p06kgE0KipMOMadwa3SjnaElKzPaUC0vnibs6/B/9ni97s61mcejk8W1fQw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dev: true + + /diff@4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + dev: true + + /dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + dependencies: + path-type: 4.0.0 + dev: true + + /doctrine@2.1.0: + resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} + engines: {node: '>=0.10.0'} + dependencies: + esutils: 2.0.3 + dev: true + + /doctrine@3.0.0: + resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} + engines: {node: '>=6.0.0'} + dependencies: + esutils: 2.0.3 + dev: true + + /dot-case@3.0.4: + resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} + dependencies: + no-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /dotenv@16.0.2: + resolution: {integrity: sha512-JvpYKUmzQhYoIFgK2MOnF3bciIZoItIIoryihy0rIA+H4Jy0FmgyKYAHCTN98P5ybGSJcIFbh6QKeJdtZd1qhA==} + engines: {node: '>=12'} + dev: true + + /dset@3.1.2: + resolution: {integrity: sha512-g/M9sqy3oHe477Ar4voQxWtaPIFw1jTdKZuomOjhCcBx9nHUNn0pu6NopuFFrTh/TRZIKEj+76vLWFu9BNKk+Q==} + engines: {node: '>=4'} + dev: true + + /ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /electron-to-chromium@1.4.284: + resolution: {integrity: sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==} + dev: true + + /emittery@0.10.2: + resolution: {integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw==} + engines: {node: '>=12'} + dev: true + + /emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + dev: true + + /enhanced-resolve@5.10.0: + resolution: {integrity: sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==} + engines: {node: '>=10.13.0'} + dependencies: + graceful-fs: 4.2.10 + tapable: 2.2.1 + dev: true + + /enhanced-resolve@5.13.0: + resolution: {integrity: sha512-eyV8f0y1+bzyfh8xAwW/WTSZpLbjhqc4ne9eGSH4Zo2ejdyiNG9pU6mf9DG8a7+Auk6MFTlNOT4Y2y/9k8GKVg==} + engines: {node: '>=10.13.0'} + dependencies: + graceful-fs: 4.2.10 + tapable: 2.2.1 + dev: true + + /error-ex@1.3.2: + resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} + dependencies: + is-arrayish: 0.2.1 + dev: true + + /es-abstract@1.20.4: + resolution: {integrity: sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + es-to-primitive: 1.2.1 + function-bind: 1.1.1 + function.prototype.name: 1.1.5 + get-intrinsic: 1.1.3 + get-symbol-description: 1.0.0 + has: 1.0.3 + has-property-descriptors: 1.0.0 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + is-callable: 1.2.7 + is-negative-zero: 2.0.2 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + is-string: 1.0.7 + is-weakref: 1.0.2 + object-inspect: 1.12.2 + object-keys: 1.1.1 + object.assign: 4.1.4 + regexp.prototype.flags: 1.4.3 + safe-regex-test: 1.0.0 + string.prototype.trimend: 1.0.5 + string.prototype.trimstart: 1.0.5 + unbox-primitive: 1.0.2 + dev: true + + /es-module-lexer@1.2.1: + resolution: {integrity: sha512-9978wrXM50Y4rTMmW5kXIC09ZdXQZqkE4mxhwkd8VbzsGkXGPgV4zWuqQJgCEzYngdo2dYDa0l8xhX4fkSwJSg==} + dev: true + + /es-shim-unscopables@1.0.0: + resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==} + dependencies: + has: 1.0.3 + dev: true + + /es-to-primitive@1.2.1: + resolution: {integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==} + engines: {node: '>= 0.4'} + dependencies: + is-callable: 1.2.7 + is-date-object: 1.0.5 + is-symbol: 1.0.4 + dev: true + + /esbuild-android-64@0.15.13: + resolution: {integrity: sha512-yRorukXBlokwTip+Sy4MYskLhJsO0Kn0/Fj43s1krVblfwP+hMD37a4Wmg139GEsMLl+vh8WXp2mq/cTA9J97g==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-android-arm64@0.15.13: + resolution: {integrity: sha512-TKzyymLD6PiVeyYa4c5wdPw87BeAiTXNtK6amWUcXZxkV51gOk5u5qzmDaYSwiWeecSNHamFsaFjLoi32QR5/w==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-64@0.15.13: + resolution: {integrity: sha512-WAx7c2DaOS6CrRcoYCgXgkXDliLnFv3pQLV6GeW1YcGEZq2Gnl8s9Pg7ahValZkpOa0iE/ojRVQ87sbUhF1Cbg==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-darwin-arm64@0.15.13: + resolution: {integrity: sha512-U6jFsPfSSxC3V1CLiQqwvDuj3GGrtQNB3P3nNC3+q99EKf94UGpsG9l4CQ83zBs1NHrk1rtCSYT0+KfK5LsD8A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-64@0.15.13: + resolution: {integrity: sha512-whItJgDiOXaDG/idy75qqevIpZjnReZkMGCgQaBWZuKHoElDJC1rh7MpoUgupMcdfOd+PgdEwNQW9DAE6i8wyA==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-freebsd-arm64@0.15.13: + resolution: {integrity: sha512-6pCSWt8mLUbPtygv7cufV0sZLeylaMwS5Fznj6Rsx9G2AJJsAjQ9ifA+0rQEIg7DwJmi9it+WjzNTEAzzdoM3Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-32@0.15.13: + resolution: {integrity: sha512-VbZdWOEdrJiYApm2kkxoTOgsoCO1krBZ3quHdYk3g3ivWaMwNIVPIfEE0f0XQQ0u5pJtBsnk2/7OPiCFIPOe/w==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-64@0.15.13: + resolution: {integrity: sha512-rXmnArVNio6yANSqDQlIO4WiP+Cv7+9EuAHNnag7rByAqFVuRusLbGi2697A5dFPNXoO//IiogVwi3AdcfPC6A==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm64@0.15.13: + resolution: {integrity: sha512-alEMGU4Z+d17U7KQQw2IV8tQycO6T+rOrgW8OS22Ua25x6kHxoG6Ngry6Aq6uranC+pNWNMB6aHFPh7aTQdORQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-arm@0.15.13: + resolution: {integrity: sha512-Ac6LpfmJO8WhCMQmO253xX2IU2B3wPDbl4IvR0hnqcPrdfCaUa2j/lLMGTjmQ4W5JsJIdHEdW12dG8lFS0MbxQ==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-mips64le@0.15.13: + resolution: {integrity: sha512-47PgmyYEu+yN5rD/MbwS6DxP2FSGPo4Uxg5LwIdxTiyGC2XKwHhHyW7YYEDlSuXLQXEdTO7mYe8zQ74czP7W8A==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-ppc64le@0.15.13: + resolution: {integrity: sha512-z6n28h2+PC1Ayle9DjKoBRcx/4cxHoOa2e689e2aDJSaKug3jXcQw7mM+GLg+9ydYoNzj8QxNL8ihOv/OnezhA==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-riscv64@0.15.13: + resolution: {integrity: sha512-+Lu4zuuXuQhgLUGyZloWCqTslcCAjMZH1k3Xc9MSEJEpEFdpsSU0sRDXAnk18FKOfEjhu4YMGaykx9xjtpA6ow==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-linux-s390x@0.15.13: + resolution: {integrity: sha512-BMeXRljruf7J0TMxD5CIXS65y7puiZkAh+s4XFV9qy16SxOuMhxhVIXYLnbdfLrsYGFzx7U9mcdpFWkkvy/Uag==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /esbuild-netbsd-64@0.15.13: + resolution: {integrity: sha512-EHj9QZOTel581JPj7UO3xYbltFTYnHy+SIqJVq6yd3KkCrsHRbapiPb0Lx3EOOtybBEE9EyqbmfW1NlSDsSzvQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-openbsd-64@0.15.13: + resolution: {integrity: sha512-nkuDlIjF/sfUhfx8SKq0+U+Fgx5K9JcPq1mUodnxI0x4kBdCv46rOGWbuJ6eof2n3wdoCLccOoJAbg9ba/bT2w==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + requiresBuild: true + dev: true + optional: true + + /esbuild-sunos-64@0.15.13: + resolution: {integrity: sha512-jVeu2GfxZQ++6lRdY43CS0Tm/r4WuQQ0Pdsrxbw+aOrHQPHV0+LNOLnvbN28M7BSUGnJnHkHm2HozGgNGyeIRw==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-32@0.15.13: + resolution: {integrity: sha512-XoF2iBf0wnqo16SDq+aDGi/+QbaLFpkiRarPVssMh9KYbFNCqPLlGAWwDvxEVz+ywX6Si37J2AKm+AXq1kC0JA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-64@0.15.13: + resolution: {integrity: sha512-Et6htEfGycjDrtqb2ng6nT+baesZPYQIW+HUEHK4D1ncggNrDNk3yoboYQ5KtiVrw/JaDMNttz8rrPubV/fvPQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild-windows-arm64@0.15.13: + resolution: {integrity: sha512-3bv7tqntThQC9SWLRouMDmZnlOukBhOCTlkzNqzGCmrkCJI7io5LLjwJBOVY6kOUlIvdxbooNZwjtBvj+7uuVg==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /esbuild@0.15.13: + resolution: {integrity: sha512-Cu3SC84oyzzhrK/YyN4iEVy2jZu5t2fz66HEOShHURcjSkOSAVL8C/gfUT+lDJxkVHpg8GZ10DD0rMHRPqMFaQ==} + engines: {node: '>=12'} + hasBin: true + requiresBuild: true + optionalDependencies: + '@esbuild/android-arm': 0.15.13 + '@esbuild/linux-loong64': 0.15.13 + esbuild-android-64: 0.15.13 + esbuild-android-arm64: 0.15.13 + esbuild-darwin-64: 0.15.13 + esbuild-darwin-arm64: 0.15.13 + esbuild-freebsd-64: 0.15.13 + esbuild-freebsd-arm64: 0.15.13 + esbuild-linux-32: 0.15.13 + esbuild-linux-64: 0.15.13 + esbuild-linux-arm: 0.15.13 + esbuild-linux-arm64: 0.15.13 + esbuild-linux-mips64le: 0.15.13 + esbuild-linux-ppc64le: 0.15.13 + esbuild-linux-riscv64: 0.15.13 + esbuild-linux-s390x: 0.15.13 + esbuild-netbsd-64: 0.15.13 + esbuild-openbsd-64: 0.15.13 + esbuild-sunos-64: 0.15.13 + esbuild-windows-32: 0.15.13 + esbuild-windows-64: 0.15.13 + esbuild-windows-arm64: 0.15.13 + dev: true + + /escalade@3.1.1: + resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} + engines: {node: '>=6'} + dev: true + + /escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + dev: true + + /escape-string-regexp@2.0.0: + resolution: {integrity: sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==} + engines: {node: '>=8'} + dev: true + + /escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + dev: true + + /eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.26.0)(eslint@8.23.0): + resolution: {integrity: sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig==} + engines: {node: ^10.12.0 || >=12.0.0} + peerDependencies: + eslint: ^7.32.0 || ^8.2.0 + eslint-plugin-import: ^2.25.2 + dependencies: + confusing-browser-globals: 1.0.11 + eslint: 8.23.0 + eslint-plugin-import: 2.26.0(@typescript-eslint/parser@5.36.2)(eslint@8.23.0) + object.assign: 4.1.4 + object.entries: 1.1.5 + semver: 6.3.0 + dev: true + + /eslint-config-airbnb-typescript@17.0.0(@typescript-eslint/eslint-plugin@5.36.2)(@typescript-eslint/parser@5.36.2)(eslint-plugin-import@2.26.0)(eslint@8.23.0): + resolution: {integrity: sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g==} + peerDependencies: + '@typescript-eslint/eslint-plugin': ^5.13.0 + '@typescript-eslint/parser': ^5.0.0 + eslint: ^7.32.0 || ^8.2.0 + eslint-plugin-import: ^2.25.3 + dependencies: + '@typescript-eslint/eslint-plugin': 5.36.2(@typescript-eslint/parser@5.36.2)(eslint@8.23.0)(typescript@4.8.2) + '@typescript-eslint/parser': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + eslint: 8.23.0 + eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.26.0)(eslint@8.23.0) + eslint-plugin-import: 2.26.0(@typescript-eslint/parser@5.36.2)(eslint@8.23.0) + dev: true + + /eslint-config-prettier@8.5.0(eslint@8.23.0): + resolution: {integrity: sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==} + hasBin: true + peerDependencies: + eslint: '>=7.0.0' + dependencies: + eslint: 8.23.0 + dev: true + + /eslint-import-resolver-node@0.3.6: + resolution: {integrity: sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==} + dependencies: + debug: 3.2.7 + resolve: 1.22.1 + transitivePeerDependencies: + - supports-color + dev: true + + /eslint-module-utils@2.7.4(@typescript-eslint/parser@5.36.2)(eslint-import-resolver-node@0.3.6)(eslint@8.23.0): + resolution: {integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==} + engines: {node: '>=4'} + peerDependencies: + '@typescript-eslint/parser': '*' + eslint: '*' + eslint-import-resolver-node: '*' + eslint-import-resolver-typescript: '*' + eslint-import-resolver-webpack: '*' + peerDependenciesMeta: + '@typescript-eslint/parser': + optional: true + eslint: + optional: true + eslint-import-resolver-node: + optional: true + eslint-import-resolver-typescript: + optional: true + eslint-import-resolver-webpack: + optional: true + dependencies: + '@typescript-eslint/parser': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + debug: 3.2.7 + eslint: 8.23.0 + eslint-import-resolver-node: 0.3.6 + transitivePeerDependencies: + - supports-color + dev: true + + /eslint-plugin-import@2.26.0(@typescript-eslint/parser@5.36.2)(eslint@8.23.0): + resolution: {integrity: sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==} + engines: {node: '>=4'} + peerDependencies: + '@typescript-eslint/parser': '*' + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 + peerDependenciesMeta: + '@typescript-eslint/parser': + optional: true + dependencies: + '@typescript-eslint/parser': 5.36.2(eslint@8.23.0)(typescript@4.8.2) + array-includes: 3.1.5 + array.prototype.flat: 1.3.1 + debug: 2.6.9 + doctrine: 2.1.0 + eslint: 8.23.0 + eslint-import-resolver-node: 0.3.6 + eslint-module-utils: 2.7.4(@typescript-eslint/parser@5.36.2)(eslint-import-resolver-node@0.3.6)(eslint@8.23.0) + has: 1.0.3 + is-core-module: 2.11.0 + is-glob: 4.0.3 + minimatch: 3.1.2 + object.values: 1.1.5 + resolve: 1.22.1 + tsconfig-paths: 3.14.1 + transitivePeerDependencies: + - eslint-import-resolver-typescript + - eslint-import-resolver-webpack + - supports-color + dev: true + + /eslint-scope@5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + dev: true + + /eslint-scope@7.1.1: + resolution: {integrity: sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + dev: true + + /eslint-utils@3.0.0(eslint@8.23.0): + resolution: {integrity: sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==} + engines: {node: ^10.0.0 || ^12.0.0 || >= 14.0.0} + peerDependencies: + eslint: '>=5' + dependencies: + eslint: 8.23.0 + eslint-visitor-keys: 2.1.0 + dev: true + + /eslint-visitor-keys@2.1.0: + resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} + engines: {node: '>=10'} + dev: true + + /eslint-visitor-keys@3.3.0: + resolution: {integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + + /eslint@8.23.0: + resolution: {integrity: sha512-pBG/XOn0MsJcKcTRLr27S5HpzQo4kLr+HjLQIyK4EiCsijDl/TB+h5uEuJU6bQ8Edvwz1XWOjpaP2qgnXGpTcA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + hasBin: true + dependencies: + '@eslint/eslintrc': 1.3.3 + '@humanwhocodes/config-array': 0.10.7 + '@humanwhocodes/gitignore-to-minimatch': 1.0.2 + '@humanwhocodes/module-importer': 1.0.1 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.4 + doctrine: 3.0.0 + escape-string-regexp: 4.0.0 + eslint-scope: 7.1.1 + eslint-utils: 3.0.0(eslint@8.23.0) + eslint-visitor-keys: 3.3.0 + espree: 9.4.0 + esquery: 1.4.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + find-up: 5.0.0 + functional-red-black-tree: 1.0.1 + glob-parent: 6.0.2 + globals: 13.17.0 + globby: 11.1.0 + grapheme-splitter: 1.0.4 + ignore: 5.2.0 + import-fresh: 3.3.0 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + js-yaml: 4.1.0 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.1 + regexpp: 3.2.0 + strip-ansi: 6.0.1 + strip-json-comments: 3.1.1 + text-table: 0.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /espree@9.4.0: + resolution: {integrity: sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dependencies: + acorn: 8.8.1 + acorn-jsx: 5.3.2(acorn@8.8.1) + eslint-visitor-keys: 3.3.0 + dev: true + + /esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /esquery@1.4.0: + resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==} + engines: {node: '>=0.10'} + dependencies: + estraverse: 5.3.0 + dev: true + + /esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + dependencies: + estraverse: 5.3.0 + dev: true + + /estraverse@4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} + dev: true + + /estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + dev: true + + /esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + dev: true + + /events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + dev: true + + /execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /exit@0.1.2: + resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==} + engines: {node: '>= 0.8.0'} + dev: true + + /expect@28.1.3: + resolution: {integrity: sha512-eEh0xn8HlsuOBxFgIss+2mX85VAS4Qy3OSkjV7rlBWljtA4oWH37glVGyOZSZvErDT/yBywZdPGwCXuTvSG85g==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/expect-utils': 28.1.3 + jest-get-type: 28.0.2 + jest-matcher-utils: 28.1.3 + jest-message-util: 28.1.3 + jest-util: 28.1.3 + dev: true + + /external-editor@3.1.0: + resolution: {integrity: sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==} + engines: {node: '>=4'} + dependencies: + chardet: 0.7.0 + iconv-lite: 0.4.24 + tmp: 0.0.33 + dev: true + + /extract-files@11.0.0: + resolution: {integrity: sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ==} + engines: {node: ^12.20 || >= 14.13} + dev: true + + /extract-files@9.0.0: + resolution: {integrity: sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ==} + engines: {node: ^10.17.0 || ^12.0.0 || >= 13.7.0} + dev: true + + /fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + dev: true + + /fast-glob@3.2.12: + resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: true + + /fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + dev: true + + /fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + dev: true + + /fastq@1.13.0: + resolution: {integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==} + dependencies: + reusify: 1.0.4 + dev: true + + /fb-watchman@2.0.2: + resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} + dependencies: + bser: 2.1.1 + dev: true + + /fbjs-css-vars@1.0.2: + resolution: {integrity: sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ==} + dev: true + + /fbjs@3.0.4: + resolution: {integrity: sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ==} + dependencies: + cross-fetch: 3.1.5 + fbjs-css-vars: 1.0.2 + loose-envify: 1.4.0 + object-assign: 4.1.1 + promise: 7.3.1 + setimmediate: 1.0.5 + ua-parser-js: 0.7.33 + transitivePeerDependencies: + - encoding + dev: true + + /figures@3.2.0: + resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} + engines: {node: '>=8'} + dependencies: + escape-string-regexp: 1.0.5 + dev: true + + /file-entry-cache@6.0.1: + resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==} + engines: {node: ^10.12.0 || >=12.0.0} + dependencies: + flat-cache: 3.0.4 + dev: true + + /fill-range@7.0.1: + resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + engines: {node: '>=8'} + dependencies: + to-regex-range: 5.0.1 + dev: true + + /find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + dev: true + + /find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + dev: true + + /flat-cache@3.0.4: + resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} + engines: {node: ^10.12.0 || >=12.0.0} + dependencies: + flatted: 3.2.7 + rimraf: 3.0.2 + dev: true + + /flatted@3.2.7: + resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} + dev: true + + /follow-redirects@1.15.2: + resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + dev: false + + /form-data@3.0.1: + resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: true + + /form-data@4.0.0: + resolution: {integrity: sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==} + engines: {node: '>= 6'} + dependencies: + asynckit: 0.4.0 + combined-stream: 1.0.8 + mime-types: 2.1.35 + dev: false + + /fs-extra@10.1.0: + resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} + engines: {node: '>=12'} + dependencies: + graceful-fs: 4.2.10 + jsonfile: 6.1.0 + universalify: 2.0.0 + dev: true + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: true + + /fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /function-bind@1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + dev: true + + /function.prototype.name@1.1.5: + resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + functions-have-names: 1.2.3 + dev: true + + /functional-red-black-tree@1.0.1: + resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==} + dev: true + + /functions-have-names@1.2.3: + resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} + dev: true + + /gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + dev: true + + /get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + dev: true + + /get-intrinsic@1.1.3: + resolution: {integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==} + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + dev: true + + /get-package-type@0.1.0: + resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} + engines: {node: '>=8.0.0'} + dev: true + + /get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + dev: true + + /get-symbol-description@1.0.0: + resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + dev: true + + /glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + dependencies: + is-glob: 4.0.3 + dev: true + + /glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} + dev: true + + /glob@7.1.6: + resolution: {integrity: sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /globals@11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + dev: true + + /globals@13.17.0: + resolution: {integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==} + engines: {node: '>=8'} + dependencies: + type-fest: 0.20.2 + dev: true + + /globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.2.12 + ignore: 5.2.0 + merge2: 1.4.1 + slash: 3.0.0 + dev: true + + /graceful-fs@4.2.10: + resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + dev: true + + /grapheme-splitter@1.0.4: + resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} + dev: true + + /graphql-config@4.4.1(@types/node@18.6.2)(cosmiconfig-typescript-loader@4.3.0)(graphql@16.6.0): + resolution: {integrity: sha512-B8wlvfBHZ5WnI4IiuQZRqql6s+CKz7S+xpUeTb28Z8nRBi8tH9ChEBgT5FnTyE05PUhHlrS2jK9ICJ4YBl9OtQ==} + engines: {node: '>= 10.0.0'} + peerDependencies: + cosmiconfig-toml-loader: ^1.0.0 + cosmiconfig-typescript-loader: ^4.0.0 + graphql: ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + peerDependenciesMeta: + cosmiconfig-toml-loader: + optional: true + cosmiconfig-typescript-loader: + optional: true + dependencies: + '@graphql-tools/graphql-file-loader': 7.5.15(graphql@16.6.0) + '@graphql-tools/json-file-loader': 7.4.16(graphql@16.6.0) + '@graphql-tools/load': 7.8.11(graphql@16.6.0) + '@graphql-tools/merge': 8.3.17(graphql@16.6.0) + '@graphql-tools/url-loader': 7.17.9(@types/node@18.6.2)(graphql@16.6.0) + '@graphql-tools/utils': 9.2.0(graphql@16.6.0) + cosmiconfig: 8.0.0 + cosmiconfig-typescript-loader: 4.3.0(@types/node@18.6.2)(cosmiconfig@7.1.0)(ts-node@10.9.1)(typescript@4.8.2) + graphql: 16.6.0 + minimatch: 4.2.1 + string-env-interpolation: 1.0.1 + tslib: 2.5.0 + transitivePeerDependencies: + - '@types/node' + - bufferutil + - encoding + - utf-8-validate + dev: true + + /graphql-request@5.1.0(graphql@16.6.0): + resolution: {integrity: sha512-0OeRVYigVwIiXhNmqnPDt+JhMzsjinxHE7TVy3Lm6jUzav0guVcL0lfSbi6jVTRAxcbwgyr6yrZioSHxf9gHzw==} + peerDependencies: + graphql: 14 - 16 + dependencies: + '@graphql-typed-document-node/core': 3.1.1(graphql@16.6.0) + cross-fetch: 3.1.5 + extract-files: 9.0.0 + form-data: 3.0.1 + graphql: 16.6.0 + transitivePeerDependencies: + - encoding + dev: true + + /graphql-tag@2.12.6(graphql@16.6.0): + resolution: {integrity: sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==} + engines: {node: '>=10'} + peerDependencies: + graphql: ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + graphql: 16.6.0 + tslib: 2.5.0 + dev: true + + /graphql-ws@5.11.3(graphql@16.6.0): + resolution: {integrity: sha512-fU8zwSgAX2noXAsuFiCZ8BtXeXZOzXyK5u1LloCdacsVth4skdBMPO74EG51lBoWSIZ8beUocdpV8+cQHBODnQ==} + engines: {node: '>=10'} + peerDependencies: + graphql: '>=0.11 <=16' + dependencies: + graphql: 16.6.0 + dev: true + + /graphql@16.6.0: + resolution: {integrity: sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + dev: true + + /handlebars@4.7.7: + resolution: {integrity: sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==} + engines: {node: '>=0.4.7'} + hasBin: true + dependencies: + minimist: 1.2.7 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.17.4 + dev: true + + /has-bigints@1.0.2: + resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} + dev: true + + /has-flag@3.0.0: + resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} + engines: {node: '>=4'} + dev: true + + /has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /has-property-descriptors@1.0.0: + resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} + dependencies: + get-intrinsic: 1.1.3 + dev: true + + /has-symbols@1.0.3: + resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} + engines: {node: '>= 0.4'} + dev: true + + /has-tostringtag@1.0.0: + resolution: {integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /has@1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + dev: true + + /header-case@2.0.4: + resolution: {integrity: sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==} + dependencies: + capital-case: 1.0.4 + tslib: 2.5.0 + dev: true + + /html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true + + /http-proxy-agent@5.0.0: + resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} + engines: {node: '>= 6'} + dependencies: + '@tootallnate/once': 2.0.0 + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /https-proxy-agent@5.0.1: + resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} + engines: {node: '>= 6'} + dependencies: + agent-base: 6.0.2 + debug: 4.3.4 + transitivePeerDependencies: + - supports-color + dev: true + + /human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + dev: true + + /iconv-lite@0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} + dependencies: + safer-buffer: 2.1.2 + dev: true + + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: true + + /ignore@5.2.0: + resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} + engines: {node: '>= 4'} + dev: true + + /immutable@3.7.6: + resolution: {integrity: sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw==} + engines: {node: '>=0.8.0'} + dev: true + + /import-fresh@3.3.0: + resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} + engines: {node: '>=6'} + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + dev: true + + /import-from@4.0.0: + resolution: {integrity: sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ==} + engines: {node: '>=12.2'} + dev: true + + /import-local@3.1.0: + resolution: {integrity: sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==} + engines: {node: '>=8'} + hasBin: true + dependencies: + pkg-dir: 4.2.0 + resolve-cwd: 3.0.0 + dev: true + + /imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + dev: true + + /indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + dev: true + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: true + + /inquirer@8.2.5: + resolution: {integrity: sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ==} + engines: {node: '>=12.0.0'} + dependencies: + ansi-escapes: 4.3.2 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-width: 3.0.0 + external-editor: 3.1.0 + figures: 3.2.0 + lodash: 4.17.21 + mute-stream: 0.0.8 + ora: 5.4.1 + run-async: 2.4.1 + rxjs: 7.8.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + through: 2.3.8 + wrap-ansi: 7.0.0 + dev: true + + /internal-slot@1.0.3: + resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.1.3 + has: 1.0.3 + side-channel: 1.0.4 + dev: true + + /invariant@2.2.4: + resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} + dependencies: + loose-envify: 1.4.0 + dev: true + + /is-absolute@1.0.0: + resolution: {integrity: sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==} + engines: {node: '>=0.10.0'} + dependencies: + is-relative: 1.0.0 + is-windows: 1.0.2 + dev: true + + /is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + dev: true + + /is-bigint@1.0.4: + resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} + dependencies: + has-bigints: 1.0.2 + dev: true + + /is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + dependencies: + binary-extensions: 2.2.0 + dev: true + + /is-boolean-object@1.1.2: + resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-callable@1.2.7: + resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} + engines: {node: '>= 0.4'} + dev: true + + /is-core-module@2.11.0: + resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} + dependencies: + has: 1.0.3 + dev: true + + /is-date-object@1.0.5: + resolution: {integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + dev: true + + /is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + dev: true + + /is-generator-fn@2.1.0: + resolution: {integrity: sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==} + engines: {node: '>=6'} + dev: true + + /is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + dependencies: + is-extglob: 2.1.1 + dev: true + + /is-interactive@1.0.0: + resolution: {integrity: sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==} + engines: {node: '>=8'} + dev: true + + /is-lower-case@2.0.2: + resolution: {integrity: sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ==} + dependencies: + tslib: 2.5.0 + dev: true + + /is-negative-zero@2.0.2: + resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} + engines: {node: '>= 0.4'} + dev: true + + /is-number-object@1.0.7: + resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + dev: true + + /is-regex@1.1.4: + resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: true + + /is-relative@1.0.0: + resolution: {integrity: sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==} + engines: {node: '>=0.10.0'} + dependencies: + is-unc-path: 1.0.0 + dev: true + + /is-shared-array-buffer@1.0.2: + resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} + dependencies: + call-bind: 1.0.2 + dev: true + + /is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: true + + /is-string@1.0.7: + resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + + /is-symbol@1.0.4: + resolution: {integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==} + engines: {node: '>= 0.4'} + dependencies: + has-symbols: 1.0.3 + dev: true + + /is-unc-path@1.0.0: + resolution: {integrity: sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==} + engines: {node: '>=0.10.0'} + dependencies: + unc-path-regex: 0.1.2 + dev: true + + /is-unicode-supported@0.1.0: + resolution: {integrity: sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==} + engines: {node: '>=10'} + dev: true + + /is-upper-case@2.0.2: + resolution: {integrity: sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ==} + dependencies: + tslib: 2.5.0 + dev: true + + /is-weakref@1.0.2: + resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} + dependencies: + call-bind: 1.0.2 + dev: true + + /is-windows@1.0.2: + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + dev: true + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: true + + /isomorphic-fetch@3.0.0: + resolution: {integrity: sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==} + dependencies: + node-fetch: 2.6.9 + whatwg-fetch: 3.6.2 + transitivePeerDependencies: + - encoding + dev: true + + /isomorphic-ws@5.0.0(ws@8.12.0): + resolution: {integrity: sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw==} + peerDependencies: + ws: '*' + dependencies: + ws: 8.12.0 + dev: true + + /istanbul-lib-coverage@3.2.0: + resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} + engines: {node: '>=8'} + dev: true + + /istanbul-lib-instrument@5.2.1: + resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} + engines: {node: '>=8'} + dependencies: + '@babel/core': 7.19.6 + '@babel/parser': 7.20.1 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.0 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + dev: true + + /istanbul-lib-report@3.0.0: + resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} + engines: {node: '>=8'} + dependencies: + istanbul-lib-coverage: 3.2.0 + make-dir: 3.1.0 + supports-color: 7.2.0 + dev: true + + /istanbul-lib-source-maps@4.0.1: + resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} + engines: {node: '>=10'} + dependencies: + debug: 4.3.4 + istanbul-lib-coverage: 3.2.0 + source-map: 0.6.1 + transitivePeerDependencies: + - supports-color + dev: true + + /istanbul-reports@3.1.5: + resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} + engines: {node: '>=8'} + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.0 + dev: true + + /jest-changed-files@28.1.3: + resolution: {integrity: sha512-esaOfUWJXk2nfZt9SPyC8gA1kNfdKLkQWyzsMlqq8msYSlNKfmZxfRgZn4Cd4MGVUF+7v6dBs0d5TOAKa7iIiA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + execa: 5.1.1 + p-limit: 3.1.0 + dev: true + + /jest-circus@28.1.3: + resolution: {integrity: sha512-cZ+eS5zc79MBwt+IhQhiEp0OeBddpc1n8MBo1nMB8A7oPMKEO+Sre+wHaLJexQUj9Ya/8NOBY0RESUgYjB6fow==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/environment': 28.1.3 + '@jest/expect': 28.1.3 + '@jest/test-result': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + chalk: 4.1.2 + co: 4.6.0 + dedent: 0.7.0 + is-generator-fn: 2.1.0 + jest-each: 28.1.3 + jest-matcher-utils: 28.1.3 + jest-message-util: 28.1.3 + jest-runtime: 28.1.3 + jest-snapshot: 28.1.3 + jest-util: 28.1.3 + p-limit: 3.1.0 + pretty-format: 28.1.3 + slash: 3.0.0 + stack-utils: 2.0.5 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-cli@28.1.3(@types/node@18.6.2)(ts-node@10.9.1): + resolution: {integrity: sha512-roY3kvrv57Azn1yPgdTebPAXvdR2xfezaKKYzVxZ6It/5NCxzJym6tUI5P1zkdWhfUYkxEI9uZWcQdaFLo8mJQ==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.3(ts-node@10.9.1) + '@jest/test-result': 28.1.3 + '@jest/types': 28.1.3 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + import-local: 3.1.0 + jest-config: 28.1.3(@types/node@18.6.2)(ts-node@10.9.1) + jest-util: 28.1.3 + jest-validate: 28.1.3 + prompts: 2.4.2 + yargs: 17.6.2 + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + + /jest-config@28.1.3(@types/node@18.6.2)(ts-node@10.9.1): + resolution: {integrity: sha512-MG3INjByJ0J4AsNBm7T3hsuxKQqFIiRo/AUqb1q9LRKI5UU6Aar9JHbr9Ivn1TVwfUD9KirRoM/T6u8XlcQPHQ==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + peerDependencies: + '@types/node': '*' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + ts-node: + optional: true + dependencies: + '@babel/core': 7.19.6 + '@jest/test-sequencer': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + babel-jest: 28.1.3(@babel/core@7.19.6) + chalk: 4.1.2 + ci-info: 3.5.0 + deepmerge: 4.2.2 + glob: 7.2.3 + graceful-fs: 4.2.10 + jest-circus: 28.1.3 + jest-environment-node: 28.1.3 + jest-get-type: 28.0.2 + jest-regex-util: 28.0.2 + jest-resolve: 28.1.3 + jest-runner: 28.1.3 + jest-util: 28.1.3 + jest-validate: 28.1.3 + micromatch: 4.0.5 + parse-json: 5.2.0 + pretty-format: 28.1.3 + slash: 3.0.0 + strip-json-comments: 3.1.1 + ts-node: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + transitivePeerDependencies: + - supports-color + dev: true + + /jest-diff@28.1.3: + resolution: {integrity: sha512-8RqP1B/OXzjjTWkqMX67iqgwBVJRgCyKD3L9nq+6ZqJMdvjE8RgHktqZ6jNrkdMT+dJuYNI3rhQpxaz7drJHfw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + chalk: 4.1.2 + diff-sequences: 28.1.1 + jest-get-type: 28.0.2 + pretty-format: 28.1.3 + dev: true + + /jest-docblock@28.1.1: + resolution: {integrity: sha512-3wayBVNiOYx0cwAbl9rwm5kKFP8yHH3d/fkEaL02NPTkDojPtheGB7HZSFY4wzX+DxyrvhXz0KSCVksmCknCuA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + detect-newline: 3.1.0 + dev: true + + /jest-each@28.1.3: + resolution: {integrity: sha512-arT1z4sg2yABU5uogObVPvSlSMQlDA48owx07BDPAiasW0yYpYHYOo4HHLz9q0BVzDVU4hILFjzJw0So9aCL/g==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + chalk: 4.1.2 + jest-get-type: 28.0.2 + jest-util: 28.1.3 + pretty-format: 28.1.3 + dev: true + + /jest-environment-node@28.1.3: + resolution: {integrity: sha512-ugP6XOhEpjAEhGYvp5Xj989ns5cB1K6ZdjBYuS30umT4CQEETaxSiPcZ/E1kFktX4GkrcM4qu07IIlDYX1gp+A==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/environment': 28.1.3 + '@jest/fake-timers': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + jest-mock: 28.1.3 + jest-util: 28.1.3 + dev: true + + /jest-get-type@28.0.2: + resolution: {integrity: sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dev: true + + /jest-haste-map@28.1.3: + resolution: {integrity: sha512-3S+RQWDXccXDKSWnkHa/dPwt+2qwA8CJzR61w3FoYCvoo3Pn8tvGcysmMF0Bj0EX5RYvAI2EIvC57OmotfdtKA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + '@types/graceful-fs': 4.1.5 + '@types/node': 18.6.2 + anymatch: 3.1.2 + fb-watchman: 2.0.2 + graceful-fs: 4.2.10 + jest-regex-util: 28.0.2 + jest-util: 28.1.3 + jest-worker: 28.1.3 + micromatch: 4.0.5 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /jest-leak-detector@28.1.3: + resolution: {integrity: sha512-WFVJhnQsiKtDEo5lG2mM0v40QWnBM+zMdHHyJs8AWZ7J0QZJS59MsyKeJHWhpBZBH32S48FOVvGyOFT1h0DlqA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + jest-get-type: 28.0.2 + pretty-format: 28.1.3 + dev: true + + /jest-matcher-utils@28.1.3: + resolution: {integrity: sha512-kQeJ7qHemKfbzKoGjHHrRKH6atgxMk8Enkk2iPQ3XwO6oE/KYD8lMYOziCkeSB9G4adPM4nR1DE8Tf5JeWH6Bw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + chalk: 4.1.2 + jest-diff: 28.1.3 + jest-get-type: 28.0.2 + pretty-format: 28.1.3 + dev: true + + /jest-message-util@28.1.3: + resolution: {integrity: sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@babel/code-frame': 7.18.6 + '@jest/types': 28.1.3 + '@types/stack-utils': 2.0.1 + chalk: 4.1.2 + graceful-fs: 4.2.10 + micromatch: 4.0.5 + pretty-format: 28.1.3 + slash: 3.0.0 + stack-utils: 2.0.5 + dev: true + + /jest-mock@28.1.3: + resolution: {integrity: sha512-o3J2jr6dMMWYVH4Lh/NKmDXdosrsJgi4AviS8oXLujcjpCMBb1FMsblDnOXKZKfSiHLxYub1eS0IHuRXsio9eA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + dev: true + + /jest-pnp-resolver@1.2.2(jest-resolve@28.1.3): + resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} + engines: {node: '>=6'} + peerDependencies: + jest-resolve: '*' + peerDependenciesMeta: + jest-resolve: + optional: true + dependencies: + jest-resolve: 28.1.3 + dev: true + + /jest-regex-util@28.0.2: + resolution: {integrity: sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dev: true + + /jest-resolve-dependencies@28.1.3: + resolution: {integrity: sha512-qa0QO2Q0XzQoNPouMbCc7Bvtsem8eQgVPNkwn9LnS+R2n8DaVDPL/U1gngC0LTl1RYXJU0uJa2BMC2DbTfFrHA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + jest-regex-util: 28.0.2 + jest-snapshot: 28.1.3 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-resolve@28.1.3: + resolution: {integrity: sha512-Z1W3tTjE6QaNI90qo/BJpfnvpxtaFTFw5CDgwpyE/Kz8U/06N1Hjf4ia9quUhCh39qIGWF1ZuxFiBiJQwSEYKQ==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.10 + jest-haste-map: 28.1.3 + jest-pnp-resolver: 1.2.2(jest-resolve@28.1.3) + jest-util: 28.1.3 + jest-validate: 28.1.3 + resolve: 1.22.1 + resolve.exports: 1.1.0 + slash: 3.0.0 + dev: true + + /jest-runner@28.1.3: + resolution: {integrity: sha512-GkMw4D/0USd62OVO0oEgjn23TM+YJa2U2Wu5zz9xsQB1MxWKDOlrnykPxnMsN0tnJllfLPinHTka61u0QhaxBA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/console': 28.1.3 + '@jest/environment': 28.1.3 + '@jest/test-result': 28.1.3 + '@jest/transform': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + chalk: 4.1.2 + emittery: 0.10.2 + graceful-fs: 4.2.10 + jest-docblock: 28.1.1 + jest-environment-node: 28.1.3 + jest-haste-map: 28.1.3 + jest-leak-detector: 28.1.3 + jest-message-util: 28.1.3 + jest-resolve: 28.1.3 + jest-runtime: 28.1.3 + jest-util: 28.1.3 + jest-watcher: 28.1.3 + jest-worker: 28.1.3 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-runtime@28.1.3: + resolution: {integrity: sha512-NU+881ScBQQLc1JHG5eJGU7Ui3kLKrmwCPPtYsJtBykixrM2OhVQlpMmFWJjMyDfdkGgBMNjXCGB/ebzsgNGQw==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/environment': 28.1.3 + '@jest/fake-timers': 28.1.3 + '@jest/globals': 28.1.3 + '@jest/source-map': 28.1.2 + '@jest/test-result': 28.1.3 + '@jest/transform': 28.1.3 + '@jest/types': 28.1.3 + chalk: 4.1.2 + cjs-module-lexer: 1.2.2 + collect-v8-coverage: 1.0.1 + execa: 5.1.1 + glob: 7.2.3 + graceful-fs: 4.2.10 + jest-haste-map: 28.1.3 + jest-message-util: 28.1.3 + jest-mock: 28.1.3 + jest-regex-util: 28.0.2 + jest-resolve: 28.1.3 + jest-snapshot: 28.1.3 + jest-util: 28.1.3 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-snapshot@28.1.3: + resolution: {integrity: sha512-4lzMgtiNlc3DU/8lZfmqxN3AYD6GGLbl+72rdBpXvcV+whX7mDrREzkPdp2RnmfIiWBg1YbuFSkXduF2JcafJg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@babel/core': 7.19.6 + '@babel/generator': 7.20.1 + '@babel/plugin-syntax-typescript': 7.20.0(@babel/core@7.19.6) + '@babel/traverse': 7.20.1 + '@babel/types': 7.20.0 + '@jest/expect-utils': 28.1.3 + '@jest/transform': 28.1.3 + '@jest/types': 28.1.3 + '@types/babel__traverse': 7.18.2 + '@types/prettier': 2.7.1 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.6) + chalk: 4.1.2 + expect: 28.1.3 + graceful-fs: 4.2.10 + jest-diff: 28.1.3 + jest-get-type: 28.0.2 + jest-haste-map: 28.1.3 + jest-matcher-utils: 28.1.3 + jest-message-util: 28.1.3 + jest-util: 28.1.3 + natural-compare: 1.4.0 + pretty-format: 28.1.3 + semver: 7.3.8 + transitivePeerDependencies: + - supports-color + dev: true + + /jest-util@28.1.3: + resolution: {integrity: sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + chalk: 4.1.2 + ci-info: 3.5.0 + graceful-fs: 4.2.10 + picomatch: 2.3.1 + dev: true + + /jest-validate@28.1.3: + resolution: {integrity: sha512-SZbOGBWEsaTxBGCOpsRWlXlvNkvTkY0XxRfh7zYmvd8uL5Qzyg0CHAXiXKROflh801quA6+/DsT4ODDthOC/OA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/types': 28.1.3 + camelcase: 6.3.0 + chalk: 4.1.2 + jest-get-type: 28.0.2 + leven: 3.1.0 + pretty-format: 28.1.3 + dev: true + + /jest-watcher@28.1.3: + resolution: {integrity: sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/test-result': 28.1.3 + '@jest/types': 28.1.3 + '@types/node': 18.6.2 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.10.2 + jest-util: 28.1.3 + string-length: 4.0.2 + dev: true + + /jest-worker@27.5.1: + resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/node': 18.6.2 + merge-stream: 2.0.0 + supports-color: 8.1.1 + dev: true + + /jest-worker@28.1.3: + resolution: {integrity: sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@types/node': 18.6.2 + merge-stream: 2.0.0 + supports-color: 8.1.1 + dev: true + + /jest@28.1.3(@types/node@18.6.2)(ts-node@10.9.1): + resolution: {integrity: sha512-N4GT5on8UkZgH0O5LUavMRV1EDEhNTL0KEfRmDIeZHSV7p2XgLoY9t9VDUgL6o+yfdgYHVxuz81G8oB9VG5uyA==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 28.1.3(ts-node@10.9.1) + '@jest/types': 28.1.3 + import-local: 3.1.0 + jest-cli: 28.1.3(@types/node@18.6.2)(ts-node@10.9.1) + transitivePeerDependencies: + - '@types/node' + - supports-color + - ts-node + dev: true + + /joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + dev: true + + /js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + dev: true + + /js-yaml@3.14.1: + resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} + hasBin: true + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + dev: true + + /js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + dependencies: + argparse: 2.0.1 + dev: true + + /jsesc@2.5.2: + resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} + engines: {node: '>=4'} + hasBin: true + dev: true + + /json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + dev: true + + /json-schema-ref-parser@9.0.9: + resolution: {integrity: sha512-qcP2lmGy+JUoQJ4DOQeLaZDqH9qSkeGCK3suKWxJXS82dg728Mn3j97azDMaOUmJAN4uCq91LdPx4K7E8F1a7Q==} + engines: {node: '>=10'} + deprecated: Please switch to @apidevtools/json-schema-ref-parser + dependencies: + '@apidevtools/json-schema-ref-parser': 9.0.9 + dev: true + + /json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + dev: true + + /json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + dev: true + + /json-stable-stringify@1.0.2: + resolution: {integrity: sha512-eunSSaEnxV12z+Z73y/j5N37/In40GK4GmsSy+tEHJMxknvqnA7/djeYtAgW0GsWHUfg+847WJjKaEylk2y09g==} + dependencies: + jsonify: 0.0.1 + dev: true + + /json-to-pretty-yaml@1.2.2: + resolution: {integrity: sha512-rvm6hunfCcqegwYaG5T4yKJWxc9FXFgBVrcTZ4XfSVRwa5HA/Xs+vB/Eo9treYYHCeNM0nrSUr82V/M31Urc7A==} + engines: {node: '>= 0.2.0'} + dependencies: + remedial: 1.0.8 + remove-trailing-spaces: 1.0.8 + dev: true + + /json5@1.0.1: + resolution: {integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==} + hasBin: true + dependencies: + minimist: 1.2.7 + dev: true + + /json5@2.2.1: + resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} + engines: {node: '>=6'} + hasBin: true + dev: true + + /jsonc-parser@3.2.0: + resolution: {integrity: sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==} + dev: true + + /jsonfile@6.1.0: + resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + dependencies: + universalify: 2.0.0 + optionalDependencies: + graceful-fs: 4.2.10 + dev: true + + /jsonify@0.0.1: + resolution: {integrity: sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg==} + dev: true + + /jsonwebtoken@9.0.0: + resolution: {integrity: sha512-tuGfYXxkQGDPnLJ7SibiQgVgeDgfbPq2k2ICcbgqW8WxWLBAxKQM/ZCu/IT8SOSwmaYl4dpTFCW5xZv7YbbWUw==} + engines: {node: '>=12', npm: '>=6'} + dependencies: + jws: 3.2.2 + lodash: 4.17.21 + ms: 2.1.3 + semver: 7.3.8 + dev: true + + /jwa@1.4.1: + resolution: {integrity: sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: true + + /jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + dependencies: + jwa: 1.4.1 + safe-buffer: 5.2.1 + dev: true + + /kleur@3.0.3: + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + dev: true + + /leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + dev: true + + /levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + dev: true + + /lilconfig@2.0.6: + resolution: {integrity: sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==} + engines: {node: '>=10'} + dev: true + + /lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + dev: true + + /listr2@4.0.5: + resolution: {integrity: sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA==} + engines: {node: '>=12'} + peerDependencies: + enquirer: '>= 2.3.0 < 3' + peerDependenciesMeta: + enquirer: + optional: true + dependencies: + cli-truncate: 2.1.0 + colorette: 2.0.19 + log-update: 4.0.0 + p-map: 4.0.0 + rfdc: 1.3.0 + rxjs: 7.8.0 + through: 2.3.8 + wrap-ansi: 7.0.0 + dev: true + + /load-tsconfig@0.2.3: + resolution: {integrity: sha512-iyT2MXws+dc2Wi6o3grCFtGXpeMvHmJqS27sMPGtV2eUu4PeFnG+33I8BlFK1t1NWMjOpcx9bridn5yxLDX2gQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dev: true + + /loader-runner@4.3.0: + resolution: {integrity: sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==} + engines: {node: '>=6.11.5'} + dev: true + + /locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + dependencies: + p-locate: 4.1.0 + dev: true + + /locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + dependencies: + p-locate: 5.0.0 + dev: true + + /lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + dev: true + + /lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + dev: true + + /lodash.sortby@4.7.0: + resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} + dev: true + + /lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + dev: true + + /log-symbols@4.1.0: + resolution: {integrity: sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==} + engines: {node: '>=10'} + dependencies: + chalk: 4.1.2 + is-unicode-supported: 0.1.0 + dev: true + + /log-update@4.0.0: + resolution: {integrity: sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg==} + engines: {node: '>=10'} + dependencies: + ansi-escapes: 4.3.2 + cli-cursor: 3.1.0 + slice-ansi: 4.0.0 + wrap-ansi: 6.2.0 + dev: true + + /loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + dependencies: + js-tokens: 4.0.0 + dev: true + + /lower-case-first@2.0.2: + resolution: {integrity: sha512-EVm/rR94FJTZi3zefZ82fLWab+GX14LJN4HrWBcuo6Evmsl9hEfnqxgcHCKb9q+mNf6EVdsjx/qucYFIIB84pg==} + dependencies: + tslib: 2.5.0 + dev: true + + /lower-case@2.0.2: + resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} + dependencies: + tslib: 2.5.0 + dev: true + + /lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + dependencies: + yallist: 3.1.1 + dev: true + + /lru-cache@6.0.0: + resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} + engines: {node: '>=10'} + dependencies: + yallist: 4.0.0 + dev: true + + /lunr@2.3.9: + resolution: {integrity: sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==} + dev: true + + /make-dir@3.1.0: + resolution: {integrity: sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==} + engines: {node: '>=8'} + dependencies: + semver: 6.3.0 + dev: true + + /make-error@1.3.6: + resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + dev: true + + /makeerror@1.0.12: + resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} + dependencies: + tmpl: 1.0.5 + dev: true + + /map-cache@0.2.2: + resolution: {integrity: sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg==} + engines: {node: '>=0.10.0'} + dev: true + + /marked@4.2.2: + resolution: {integrity: sha512-JjBTFTAvuTgANXx82a5vzK9JLSMoV6V3LBVn4Uhdso6t7vXrGx7g1Cd2r6NYSsxrYbQGFCMqBDhFHyK5q2UvcQ==} + engines: {node: '>= 12'} + hasBin: true + dev: true + + /merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + dev: true + + /merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + dev: true + + /meros@1.2.1(@types/node@18.6.2): + resolution: {integrity: sha512-R2f/jxYqCAGI19KhAvaxSOxALBMkaXWH2a7rOyqQw+ZmizX5bKkEYWLzdhC+U82ZVVPVp6MCXe3EkVligh+12g==} + engines: {node: '>=13'} + peerDependencies: + '@types/node': '>=13' + peerDependenciesMeta: + '@types/node': + optional: true + dependencies: + '@types/node': 18.6.2 + dev: true + + /micromatch@4.0.5: + resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + engines: {node: '>=8.6'} + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: true + + /mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + /mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + + /mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimatch@4.2.1: + resolution: {integrity: sha512-9Uq1ChtSZO+Mxa/CL1eGizn2vRn3MlLgzhT0Iz8zaY8NdvxvB0d5QdPFmCKf7JKA9Lerx5vRrnwO03jsSfGG9g==} + engines: {node: '>=10'} + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimatch@5.1.0: + resolution: {integrity: sha512-9TPBGGak4nHfGZsPBohm9AWg6NoT7QTCehS3BIJABslyZbzxfV78QM2Y6+i741OPZIafFAaiiEMh5OyIrJPgtg==} + engines: {node: '>=10'} + dependencies: + brace-expansion: 2.0.1 + dev: true + + /minimist@1.2.7: + resolution: {integrity: sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==} + dev: true + + /ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + dev: true + + /ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + dev: true + + /ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + dev: true + + /mute-stream@0.0.8: + resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} + dev: true + + /mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + dev: true + + /natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + dev: true + + /neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + dev: true + + /no-case@3.0.4: + resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} + dependencies: + lower-case: 2.0.2 + tslib: 2.5.0 + dev: true + + /node-fetch@2.6.7: + resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: true + + /node-fetch@2.6.9: + resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==} + engines: {node: 4.x || >=6.0.0} + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + dependencies: + whatwg-url: 5.0.0 + dev: true + + /node-int64@0.4.0: + resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} + dev: true + + /node-releases@2.0.6: + resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} + dev: true + + /normalize-path@2.1.1: + resolution: {integrity: sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w==} + engines: {node: '>=0.10.0'} + dependencies: + remove-trailing-separator: 1.1.0 + dev: true + + /normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + dev: true + + /npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + dependencies: + path-key: 3.1.1 + dev: true + + /nullthrows@1.1.1: + resolution: {integrity: sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw==} + dev: true + + /object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + dev: true + + /object-inspect@1.12.2: + resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} + dev: true + + /object-keys@1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + dev: true + + /object.assign@4.1.4: + resolution: {integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + has-symbols: 1.0.3 + object-keys: 1.1.1 + dev: true + + /object.entries@1.1.5: + resolution: {integrity: sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + dev: true + + /object.values@1.1.5: + resolution: {integrity: sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + dev: true + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: true + + /onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + + /optionator@0.9.1: + resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} + engines: {node: '>= 0.8.0'} + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.3 + dev: true + + /ora@5.4.1: + resolution: {integrity: sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==} + engines: {node: '>=10'} + dependencies: + bl: 4.1.0 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-spinners: 2.7.0 + is-interactive: 1.0.0 + is-unicode-supported: 0.1.0 + log-symbols: 4.1.0 + strip-ansi: 6.0.1 + wcwidth: 1.0.1 + dev: true + + /os-tmpdir@1.0.2: + resolution: {integrity: sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==} + engines: {node: '>=0.10.0'} + dev: true + + /p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + dependencies: + p-try: 2.2.0 + dev: true + + /p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + dependencies: + p-limit: 2.3.0 + dev: true + + /p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + dependencies: + p-limit: 3.1.0 + dev: true + + /p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + dependencies: + aggregate-error: 3.1.0 + dev: true + + /p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + dev: true + + /param-case@3.0.4: + resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} + dependencies: + dot-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + dependencies: + callsites: 3.1.0 + dev: true + + /parse-filepath@1.0.2: + resolution: {integrity: sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q==} + engines: {node: '>=0.8'} + dependencies: + is-absolute: 1.0.0 + map-cache: 0.2.2 + path-root: 0.1.1 + dev: true + + /parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + dependencies: + '@babel/code-frame': 7.18.6 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: true + + /pascal-case@3.1.2: + resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} + dependencies: + no-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /path-case@3.0.4: + resolution: {integrity: sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==} + dependencies: + dot-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + dev: true + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: true + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: true + + /path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + dev: true + + /path-root-regex@0.1.2: + resolution: {integrity: sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==} + engines: {node: '>=0.10.0'} + dev: true + + /path-root@0.1.1: + resolution: {integrity: sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg==} + engines: {node: '>=0.10.0'} + dependencies: + path-root-regex: 0.1.2 + dev: true + + /path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + dev: true + + /picocolors@1.0.0: + resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} + dev: true + + /picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + dev: true + + /pirates@4.0.5: + resolution: {integrity: sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==} + engines: {node: '>= 6'} + dev: true + + /pkg-dir@4.2.0: + resolution: {integrity: sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==} + engines: {node: '>=8'} + dependencies: + find-up: 4.1.0 + dev: true + + /postcss-load-config@3.1.4(ts-node@10.9.1): + resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} + engines: {node: '>= 10'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + dependencies: + lilconfig: 2.0.6 + ts-node: 10.9.1(@types/node@18.6.2)(typescript@4.8.2) + yaml: 1.10.2 + dev: true + + /prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + dev: true + + /prettier@2.6.2: + resolution: {integrity: sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew==} + engines: {node: '>=10.13.0'} + hasBin: true + dev: true + + /pretty-format@28.1.3: + resolution: {integrity: sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + dependencies: + '@jest/schemas': 28.1.3 + ansi-regex: 5.0.1 + ansi-styles: 5.2.0 + react-is: 18.2.0 + dev: true + + /promise@7.3.1: + resolution: {integrity: sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg==} + dependencies: + asap: 2.0.6 + dev: true + + /prompts@2.4.2: + resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} + engines: {node: '>= 6'} + dependencies: + kleur: 3.0.3 + sisteransi: 1.0.5 + dev: true + + /punycode@2.1.1: + resolution: {integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==} + engines: {node: '>=6'} + dev: true + + /pvtsutils@1.3.2: + resolution: {integrity: sha512-+Ipe2iNUyrZz+8K/2IOo+kKikdtfhRKzNpQbruF2URmqPtoqAs8g3xS7TJvFF2GcPXjh7DkqMnpVveRFq4PgEQ==} + dependencies: + tslib: 2.5.0 + dev: true + + /pvutils@1.1.3: + resolution: {integrity: sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==} + engines: {node: '>=6.0.0'} + dev: true + + /queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + dev: true + + /randombytes@2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /react-is@18.2.0: + resolution: {integrity: sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==} + dev: true + + /readable-stream@3.6.0: + resolution: {integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: true + + /readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: true + + /regenerator-runtime@0.13.11: + resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} + dev: true + + /regexp.prototype.flags@1.4.3: + resolution: {integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + functions-have-names: 1.2.3 + dev: true + + /regexpp@3.2.0: + resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} + engines: {node: '>=8'} + dev: true + + /relay-runtime@12.0.0: + resolution: {integrity: sha512-QU6JKr1tMsry22DXNy9Whsq5rmvwr3LSZiiWV/9+DFpuTWvp+WFhobWMc8TC4OjKFfNhEZy7mOiqUAn5atQtug==} + dependencies: + '@babel/runtime': 7.20.13 + fbjs: 3.0.4 + invariant: 2.2.4 + transitivePeerDependencies: + - encoding + dev: true + + /remedial@1.0.8: + resolution: {integrity: sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg==} + dev: true + + /remove-trailing-separator@1.1.0: + resolution: {integrity: sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==} + dev: true + + /remove-trailing-spaces@1.0.8: + resolution: {integrity: sha512-O3vsMYfWighyFbTd8hk8VaSj9UAGENxAtX+//ugIst2RMk5e03h6RoIS+0ylsFxY1gvmPuAY/PO4It+gPEeySA==} + dev: true + + /require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /require-main-filename@2.0.0: + resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} + dev: true + + /resolve-cwd@3.0.0: + resolution: {integrity: sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==} + engines: {node: '>=8'} + dependencies: + resolve-from: 5.0.0 + dev: true + + /resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + dev: true + + /resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + dev: true + + /resolve.exports@1.1.0: + resolution: {integrity: sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==} + engines: {node: '>=10'} + dev: true + + /resolve@1.22.1: + resolution: {integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==} + hasBin: true + dependencies: + is-core-module: 2.11.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: true + + /restore-cursor@3.1.0: + resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} + engines: {node: '>=8'} + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + dev: true + + /reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + dev: true + + /rfdc@1.3.0: + resolution: {integrity: sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA==} + dev: true + + /rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /rollup@2.79.1: + resolution: {integrity: sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw==} + engines: {node: '>=10.0.0'} + hasBin: true + optionalDependencies: + fsevents: 2.3.2 + dev: true + + /run-async@2.4.1: + resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} + engines: {node: '>=0.12.0'} + dev: true + + /run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + dev: true + + /rxjs@7.8.0: + resolution: {integrity: sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==} + dependencies: + tslib: 2.5.0 + dev: true + + /safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + dev: true + + /safe-regex-test@1.0.0: + resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + is-regex: 1.1.4 + dev: true + + /safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + dev: true + + /schema-utils@3.1.2: + resolution: {integrity: sha512-pvjEHOgWc9OWA/f/DE3ohBWTD6EleVLf7iFUkoSwAxttdBhB9QUebQgxER2kWueOvRJXPHNnyrvvh9eZINB8Eg==} + engines: {node: '>= 10.13.0'} + dependencies: + '@types/json-schema': 7.0.11 + ajv: 6.12.6 + ajv-keywords: 3.5.2(ajv@6.12.6) + dev: true + + /scuid@1.1.0: + resolution: {integrity: sha512-MuCAyrGZcTLfQoH2XoBlQ8C6bzwN88XT/0slOGz0pn8+gIP85BOAfYa44ZXQUTOwRwPU0QvgU+V+OSajl/59Xg==} + dev: true + + /semver@6.3.0: + resolution: {integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==} + hasBin: true + dev: true + + /semver@7.3.8: + resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} + engines: {node: '>=10'} + hasBin: true + dependencies: + lru-cache: 6.0.0 + dev: true + + /sentence-case@3.0.4: + resolution: {integrity: sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==} + dependencies: + no-case: 3.0.4 + tslib: 2.5.0 + upper-case-first: 2.0.2 + dev: true + + /serialize-javascript@6.0.1: + resolution: {integrity: sha512-owoXEFjWRllis8/M1Q+Cw5k8ZH40e3zhp/ovX+Xr/vi1qj6QesbyXXViFbpNvWvPNAD62SutwEXavefrLJWj7w==} + dependencies: + randombytes: 2.1.0 + dev: true + + /set-blocking@2.0.0: + resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} + dev: true + + /setimmediate@1.0.5: + resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} + dev: true + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: true + + /shell-quote@1.8.0: + resolution: {integrity: sha512-QHsz8GgQIGKlRi24yFc6a6lN69Idnx634w49ay6+jA5yFh7a1UY+4Rp6HPx/L/1zcEDPEij8cIsiqR6bQsE5VQ==} + dev: true + + /shiki@0.11.1: + resolution: {integrity: sha512-EugY9VASFuDqOexOgXR18ZV+TbFrQHeCpEYaXamO+SZlsnT/2LxuLBX25GGtIrwaEVFXUAbUQ601SWE2rMwWHA==} + dependencies: + jsonc-parser: 3.2.0 + vscode-oniguruma: 1.6.2 + vscode-textmate: 6.0.0 + dev: true + + /side-channel@1.0.4: + resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + object-inspect: 1.12.2 + dev: true + + /signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + dev: true + + /signedsource@1.0.0: + resolution: {integrity: sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww==} + dev: true + + /sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + dev: true + + /slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + dev: true + + /slice-ansi@3.0.0: + resolution: {integrity: sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ==} + engines: {node: '>=8'} + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + dev: true + + /slice-ansi@4.0.0: + resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + dev: true + + /snake-case@3.0.4: + resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} + dependencies: + dot-case: 3.0.4 + tslib: 2.5.0 + dev: true + + /source-map-support@0.5.13: + resolution: {integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + dev: true + + /source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + dev: true + + /source-map@0.8.0-beta.0: + resolution: {integrity: sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA==} + engines: {node: '>= 8'} + dependencies: + whatwg-url: 7.1.0 + dev: true + + /sponge-case@1.0.1: + resolution: {integrity: sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA==} + dependencies: + tslib: 2.5.0 + dev: true + + /sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + dev: true + + /stack-utils@2.0.5: + resolution: {integrity: sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==} + engines: {node: '>=10'} + dependencies: + escape-string-regexp: 2.0.0 + dev: true + + /streamsearch@1.1.0: + resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} + engines: {node: '>=10.0.0'} + dev: true + + /string-env-interpolation@1.0.1: + resolution: {integrity: sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg==} + dev: true + + /string-length@4.0.2: + resolution: {integrity: sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==} + engines: {node: '>=10'} + dependencies: + char-regex: 1.0.2 + strip-ansi: 6.0.1 + dev: true + + /string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + dev: true + + /string.prototype.trimend@1.0.5: + resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + dev: true + + /string.prototype.trimstart@1.0.5: + resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==} + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.4 + dev: true + + /string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + dependencies: + safe-buffer: 5.2.1 + dev: true + + /strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + dependencies: + ansi-regex: 5.0.1 + dev: true + + /strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + dev: true + + /strip-bom@4.0.0: + resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} + engines: {node: '>=8'} + dev: true + + /strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + dev: true + + /strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + dev: true + + /sucrase@3.28.0: + resolution: {integrity: sha512-TK9600YInjuiIhVM3729rH4ZKPOsGeyXUwY+Ugu9eilNbdTFyHr6XcAGYbRVZPDgWj6tgI7bx95aaJjHnbffag==} + engines: {node: '>=8'} + hasBin: true + dependencies: + commander: 4.1.1 + glob: 7.1.6 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.5 + ts-interface-checker: 0.1.13 + dev: true + + /supports-color@5.5.0: + resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} + engines: {node: '>=4'} + dependencies: + has-flag: 3.0.0 + dev: true + + /supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + dependencies: + has-flag: 4.0.0 + dev: true + + /supports-hyperlinks@2.3.0: + resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} + engines: {node: '>=8'} + dependencies: + has-flag: 4.0.0 + supports-color: 7.2.0 + dev: true + + /supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + dev: true + + /swap-case@2.0.2: + resolution: {integrity: sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw==} + dependencies: + tslib: 2.5.0 + dev: true + + /tapable@2.2.1: + resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} + engines: {node: '>=6'} + dev: true + + /terminal-link@2.1.1: + resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} + engines: {node: '>=8'} + dependencies: + ansi-escapes: 4.3.2 + supports-hyperlinks: 2.3.0 + dev: true + + /terser-webpack-plugin@5.3.7(esbuild@0.15.13)(webpack@5.80.0): + resolution: {integrity: sha512-AfKwIktyP7Cu50xNjXF/6Qb5lBNzYaWpU6YfoX3uZicTx0zTy0stDDCsvjDapKsSDvOeWo5MEq4TmdBy2cNoHw==} + engines: {node: '>= 10.13.0'} + peerDependencies: + '@swc/core': '*' + esbuild: '*' + uglify-js: '*' + webpack: ^5.1.0 + peerDependenciesMeta: + '@swc/core': + optional: true + esbuild: + optional: true + uglify-js: + optional: true + dependencies: + '@jridgewell/trace-mapping': 0.3.17 + esbuild: 0.15.13 + jest-worker: 27.5.1 + schema-utils: 3.1.2 + serialize-javascript: 6.0.1 + terser: 5.17.1 + webpack: 5.80.0(esbuild@0.15.13) + dev: true + + /terser@5.17.1: + resolution: {integrity: sha512-hVl35zClmpisy6oaoKALOpS0rDYLxRFLHhRuDlEGTKey9qHjS1w9GMORjuwIMt70Wan4lwsLYyWDVnWgF+KUEw==} + engines: {node: '>=10'} + hasBin: true + dependencies: + '@jridgewell/source-map': 0.3.3 + acorn: 8.8.1 + commander: 2.20.3 + source-map-support: 0.5.21 + dev: true + + /test-exclude@6.0.0: + resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} + engines: {node: '>=8'} + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 7.2.3 + minimatch: 3.1.2 + dev: true + + /text-table@0.2.0: + resolution: {integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==} + dev: true + + /thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + dependencies: + thenify: 3.3.1 + dev: true + + /thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + dependencies: + any-promise: 1.3.0 + dev: true + + /through@2.3.8: + resolution: {integrity: sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==} + dev: true + + /title-case@3.0.3: + resolution: {integrity: sha512-e1zGYRvbffpcHIrnuqT0Dh+gEJtDaxDSoG4JAIpq4oDFyooziLBIiYQv0GBT4FUAnUop5uZ1hiIAj7oAF6sOCA==} + dependencies: + tslib: 2.5.0 + dev: true + + /tmp@0.0.33: + resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} + engines: {node: '>=0.6.0'} + dependencies: + os-tmpdir: 1.0.2 + dev: true + + /tmpl@1.0.5: + resolution: {integrity: sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==} + dev: true + + /to-fast-properties@2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + dev: true + + /to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + dependencies: + is-number: 7.0.0 + dev: true + + /tr46@0.0.3: + resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} + dev: true + + /tr46@1.0.1: + resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} + dependencies: + punycode: 2.1.1 + dev: true + + /tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + dev: true + + /ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + dev: true + + /ts-jest@28.0.8(@babel/core@7.19.6)(esbuild@0.15.13)(jest@28.1.3)(typescript@4.8.2): + resolution: {integrity: sha512-5FaG0lXmRPzApix8oFG8RKjAz4ehtm8yMKOTy5HX3fY6W8kmvOrmcY0hKDElW52FJov+clhUbrKAqofnj4mXTg==} + engines: {node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0} + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@jest/types': ^28.0.0 + babel-jest: ^28.0.0 + esbuild: '*' + jest: ^28.0.0 + typescript: '>=4.3' + peerDependenciesMeta: + '@babel/core': + optional: true + '@jest/types': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + dependencies: + '@babel/core': 7.19.6 + bs-logger: 0.2.6 + esbuild: 0.15.13 + fast-json-stable-stringify: 2.1.0 + jest: 28.1.3(@types/node@18.6.2)(ts-node@10.9.1) + jest-util: 28.1.3 + json5: 2.2.1 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.3.8 + typescript: 4.8.2 + yargs-parser: 21.1.1 + dev: true + + /ts-loader@9.3.1(typescript@4.8.2)(webpack@5.80.0): + resolution: {integrity: sha512-OkyShkcZTsTwyS3Kt7a4rsT/t2qvEVQuKCTg4LJmpj9fhFR7ukGdZwV6Qq3tRUkqcXtfGpPR7+hFKHCG/0d3Lw==} + engines: {node: '>=12.0.0'} + peerDependencies: + typescript: '*' + webpack: ^5.0.0 + dependencies: + chalk: 4.1.2 + enhanced-resolve: 5.10.0 + micromatch: 4.0.5 + semver: 7.3.8 + typescript: 4.8.2 + webpack: 5.80.0(esbuild@0.15.13) + dev: true + + /ts-log@2.2.5: + resolution: {integrity: sha512-PGcnJoTBnVGy6yYNFxWVNkdcAuAMstvutN9MgDJIV6L0oG8fB+ZNNy1T+wJzah8RPGor1mZuPQkVfXNDpy9eHA==} + dev: true + + /ts-node@10.9.1(@types/node@18.6.2)(typescript@4.8.2): + resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.9 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.3 + '@types/node': 18.6.2 + acorn: 8.8.1 + acorn-walk: 8.2.0 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 4.8.2 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + dev: true + + /tsconfig-paths@3.14.1: + resolution: {integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==} + dependencies: + '@types/json5': 0.0.29 + json5: 1.0.1 + minimist: 1.2.7 + strip-bom: 3.0.0 + dev: true + + /tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + dev: true + + /tslib@2.4.1: + resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} + dev: true + + /tslib@2.5.0: + resolution: {integrity: sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==} + dev: true + + /tsup@6.2.3(ts-node@10.9.1)(typescript@4.8.2): + resolution: {integrity: sha512-J5Pu2Dx0E1wlpIEsVFv9ryzP1pZ1OYsJ2cBHZ7GrKteytNdzaSz5hmLX7/nAxtypq+jVkVvA79d7S83ETgHQ5w==} + engines: {node: '>=14'} + hasBin: true + peerDependencies: + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: ^4.1.0 + peerDependenciesMeta: + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + dependencies: + bundle-require: 3.1.2(esbuild@0.15.13) + cac: 6.7.14 + chokidar: 3.5.3 + debug: 4.3.4 + esbuild: 0.15.13 + execa: 5.1.1 + globby: 11.1.0 + joycon: 3.1.1 + postcss-load-config: 3.1.4(ts-node@10.9.1) + resolve-from: 5.0.0 + rollup: 2.79.1 + source-map: 0.8.0-beta.0 + sucrase: 3.28.0 + tree-kill: 1.2.2 + typescript: 4.8.2 + transitivePeerDependencies: + - supports-color + - ts-node + dev: true + + /tsutils@3.21.0(typescript@4.8.2): + resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} + engines: {node: '>= 6'} + peerDependencies: + typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' + dependencies: + tslib: 1.14.1 + typescript: 4.8.2 + dev: true + + /tweetnacl@1.0.3: + resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} + dev: false + + /type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + dependencies: + prelude-ls: 1.2.1 + dev: true + + /type-detect@4.0.8: + resolution: {integrity: sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==} + engines: {node: '>=4'} + dev: true + + /type-fest@0.20.2: + resolution: {integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==} + engines: {node: '>=10'} + dev: true + + /type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + dev: true + + /typedoc@0.23.20(typescript@4.8.2): + resolution: {integrity: sha512-nfb4Mx05ZZZXux3zPcLuc7+3TVePDW3jTdEBqXdQzJUyEILxoprgPIiTChbvci9crkqNJG9YESmfCptuh9Gn3g==} + engines: {node: '>= 14.14'} + hasBin: true + peerDependencies: + typescript: 4.6.x || 4.7.x || 4.8.x + dependencies: + lunr: 2.3.9 + marked: 4.2.2 + minimatch: 5.1.0 + shiki: 0.11.1 + typescript: 4.8.2 + dev: true + + /typescript@4.8.2: + resolution: {integrity: sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==} + engines: {node: '>=4.2.0'} + hasBin: true + dev: true + + /ua-parser-js@0.7.33: + resolution: {integrity: sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw==} + dev: true + + /uglify-js@3.17.4: + resolution: {integrity: sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==} + engines: {node: '>=0.8.0'} + hasBin: true + requiresBuild: true + dev: true + optional: true + + /unbox-primitive@1.0.2: + resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} + dependencies: + call-bind: 1.0.2 + has-bigints: 1.0.2 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 + dev: true + + /unc-path-regex@0.1.2: + resolution: {integrity: sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg==} + engines: {node: '>=0.10.0'} + dev: true + + /universalify@2.0.0: + resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} + engines: {node: '>= 10.0.0'} + dev: true + + /unixify@1.0.0: + resolution: {integrity: sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg==} + engines: {node: '>=0.10.0'} + dependencies: + normalize-path: 2.1.1 + dev: true + + /update-browserslist-db@1.0.10(browserslist@4.21.4): + resolution: {integrity: sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.21.4 + escalade: 3.1.1 + picocolors: 1.0.0 + dev: true + + /upper-case-first@2.0.2: + resolution: {integrity: sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==} + dependencies: + tslib: 2.5.0 + dev: true + + /upper-case@2.0.2: + resolution: {integrity: sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==} + dependencies: + tslib: 2.5.0 + dev: true + + /uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + dependencies: + punycode: 2.1.1 + dev: true + + /urlpattern-polyfill@6.0.2: + resolution: {integrity: sha512-5vZjFlH9ofROmuWmXM9yj2wljYKgWstGwe8YTyiqM7hVum/g9LyCizPZtb3UqsuppVwety9QJmfc42VggLpTgg==} + dependencies: + braces: 3.0.2 + dev: true + + /util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + dev: true + + /v8-compile-cache-lib@3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + dev: true + + /v8-to-istanbul@9.0.1: + resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} + engines: {node: '>=10.12.0'} + dependencies: + '@jridgewell/trace-mapping': 0.3.17 + '@types/istanbul-lib-coverage': 2.0.4 + convert-source-map: 1.9.0 + dev: true + + /value-or-promise@1.0.12: + resolution: {integrity: sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q==} + engines: {node: '>=12'} + dev: true + + /vscode-oniguruma@1.6.2: + resolution: {integrity: sha512-KH8+KKov5eS/9WhofZR8M8dMHWN2gTxjMsG4jd04YhpbPR91fUj7rYQ2/XjeHCJWbg7X++ApRIU9NUwM2vTvLA==} + dev: true + + /vscode-textmate@6.0.0: + resolution: {integrity: sha512-gu73tuZfJgu+mvCSy4UZwd2JXykjK9zAZsfmDeut5dx/1a7FeTk0XwJsSuqQn+cuMCGVbIBfl+s53X4T19DnzQ==} + dev: true + + /walker@1.0.8: + resolution: {integrity: sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==} + dependencies: + makeerror: 1.0.12 + dev: true + + /watchpack@2.4.0: + resolution: {integrity: sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg==} + engines: {node: '>=10.13.0'} + dependencies: + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.10 + dev: true + + /wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + dependencies: + defaults: 1.0.4 + dev: true + + /web-streams-polyfill@3.2.1: + resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} + engines: {node: '>= 8'} + dev: true + + /webcrypto-core@1.7.5: + resolution: {integrity: sha512-gaExY2/3EHQlRNNNVSrbG2Cg94Rutl7fAaKILS1w8ZDhGxdFOaw6EbCfHIxPy9vt/xwp5o0VQAx9aySPF6hU1A==} + dependencies: + '@peculiar/asn1-schema': 2.3.3 + '@peculiar/json-schema': 1.1.12 + asn1js: 3.0.5 + pvtsutils: 1.3.2 + tslib: 2.5.0 + dev: true + + /webidl-conversions@3.0.1: + resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} + dev: true + + /webidl-conversions@4.0.2: + resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} + dev: true + + /webpack-sources@3.2.3: + resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} + engines: {node: '>=10.13.0'} + dev: true + + /webpack@5.80.0(esbuild@0.15.13): + resolution: {integrity: sha512-OIMiq37XK1rWO8mH9ssfFKZsXg4n6klTEDL7S8/HqbAOBBaiy8ABvXvz0dDCXeEF9gqwxSvVk611zFPjS8hJxA==} + engines: {node: '>=10.13.0'} + hasBin: true + peerDependencies: + webpack-cli: '*' + peerDependenciesMeta: + webpack-cli: + optional: true + dependencies: + '@types/eslint-scope': 3.7.4 + '@types/estree': 1.0.1 + '@webassemblyjs/ast': 1.11.5 + '@webassemblyjs/wasm-edit': 1.11.5 + '@webassemblyjs/wasm-parser': 1.11.5 + acorn: 8.8.1 + acorn-import-assertions: 1.8.0(acorn@8.8.1) + browserslist: 4.21.4 + chrome-trace-event: 1.0.3 + enhanced-resolve: 5.13.0 + es-module-lexer: 1.2.1 + eslint-scope: 5.1.1 + events: 3.3.0 + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.10 + json-parse-even-better-errors: 2.3.1 + loader-runner: 4.3.0 + mime-types: 2.1.35 + neo-async: 2.6.2 + schema-utils: 3.1.2 + tapable: 2.2.1 + terser-webpack-plugin: 5.3.7(esbuild@0.15.13)(webpack@5.80.0) + watchpack: 2.4.0 + webpack-sources: 3.2.3 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - uglify-js + dev: true + + /whatwg-fetch@3.6.2: + resolution: {integrity: sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA==} + dev: true + + /whatwg-url@5.0.0: + resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} + dependencies: + tr46: 0.0.3 + webidl-conversions: 3.0.1 + dev: true + + /whatwg-url@7.1.0: + resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} + dependencies: + lodash.sortby: 4.7.0 + tr46: 1.0.1 + webidl-conversions: 4.0.2 + dev: true + + /which-boxed-primitive@1.0.2: + resolution: {integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==} + dependencies: + is-bigint: 1.0.4 + is-boolean-object: 1.1.2 + is-number-object: 1.0.7 + is-string: 1.0.7 + is-symbol: 1.0.4 + dev: true + + /which-module@2.0.0: + resolution: {integrity: sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==} + dev: true + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /word-wrap@1.2.3: + resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==} + engines: {node: '>=0.10.0'} + dev: true + + /wordwrap@1.0.0: + resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} + dev: true + + /wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + + /wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: true + + /write-file-atomic@4.0.2: + resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} + engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} + dependencies: + imurmurhash: 0.1.4 + signal-exit: 3.0.7 + dev: true + + /ws@8.12.0: + resolution: {integrity: sha512-kU62emKIdKVeEIOIKVegvqpXMSTAMLJozpHZaJNDYqBjzlSYXQGviYwN1osDLJ9av68qHd4a2oSjd7yD4pacig==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + dev: true + + /y18n@4.0.3: + resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} + dev: true + + /y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + dev: true + + /yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + dev: true + + /yallist@4.0.0: + resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} + dev: true + + /yaml-ast-parser@0.0.43: + resolution: {integrity: sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A==} + dev: true + + /yaml@1.10.2: + resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} + engines: {node: '>= 6'} + dev: true + + /yargs-parser@18.1.3: + resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} + engines: {node: '>=6'} + dependencies: + camelcase: 5.3.1 + decamelize: 1.2.0 + dev: true + + /yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + dev: true + + /yargs@15.4.1: + resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==} + engines: {node: '>=8'} + dependencies: + cliui: 6.0.0 + decamelize: 1.2.0 + find-up: 4.1.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + require-main-filename: 2.0.0 + set-blocking: 2.0.0 + string-width: 4.2.3 + which-module: 2.0.0 + y18n: 4.0.3 + yargs-parser: 18.1.3 + dev: true + + /yargs@17.6.2: + resolution: {integrity: sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==} + engines: {node: '>=12'} + dependencies: + cliui: 8.0.1 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + dev: true + + /yn@3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + dev: true + + /yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + dev: true + + '@github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz': + resolution: {tarball: https://github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz} + name: openapi-typescript-codegen + version: 0.24.0 + hasBin: true + dependencies: + camelcase: 6.3.0 + commander: 9.4.1 + fs-extra: 10.1.0 + handlebars: 4.7.7 + json-schema-ref-parser: 9.0.9 + dev: true diff --git a/m1/JavaScript-client/scripts/check.sh b/m1/JavaScript-client/scripts/check.sh new file mode 100644 index 00000000..456f8302 --- /dev/null +++ b/m1/JavaScript-client/scripts/check.sh @@ -0,0 +1,40 @@ +#!/bin/sh + +# This script performs various validity checks prior to publishing a package +# to npm.js, such as checking the version and the changelog. + +set -e + +cd "$(dirname "$0")" +cd .. + +# Get the latest version of the package on npm.js +PUBLISHED_VERSION=`npm show aptos version` + +# Get the version from the local package.json file. +NEW_VERSION=`node -p -e "require('./package.json').version"` + +# Exit happily if the version is the same. +if [ "$NEW_VERSION" = "$PUBLISHED_VERSION" ]; then + echo "Version is the same. Exiting gracefully." + exit 0 +fi + +# Functions to help check if the version went backwards. +verlte() { + [ "$1" = "$(printf "$1\n$2" | sort -V | head -n1)" ] +} + +# Ensure the version didn't go backwards. +if verlte $NEW_VERSION $PUBLISHED_VERSION; then + echo "ERROR: The version number went backwards. Aborting." + exit 1 +fi + +# Ensure there is an entry for the new version in the changelog. +if ! grep -q "# $NEW_VERSION" CHANGELOG.md; then + echo "ERROR: The changelog does not contain an entry for the new version. Aborting." + exit 1 +fi + +echo "Version and changelog look good" diff --git a/m1/JavaScript-client/scripts/checked_publish.sh b/m1/JavaScript-client/scripts/checked_publish.sh new file mode 100644 index 00000000..62fdec1c --- /dev/null +++ b/m1/JavaScript-client/scripts/checked_publish.sh @@ -0,0 +1,16 @@ +#!/bin/sh + +# This script publishes the package to npm.js, first perfoming validity checks. +# This script can be used locally or in CI safely. +# It assumes the package has already been installed, built, and tested. + +set -e + +cd "$(dirname "$0")" +cd .. + +# Make sure everything is valid. +. scripts/check.sh + +# Finally, publish the package. We assume it has been built +pnpm publish --non-interactive diff --git a/m1/JavaScript-client/scripts/generate_ts_docs.sh b/m1/JavaScript-client/scripts/generate_ts_docs.sh new file mode 100644 index 00000000..01992018 --- /dev/null +++ b/m1/JavaScript-client/scripts/generate_ts_docs.sh @@ -0,0 +1,32 @@ +#!/bin/sh + +# This script helps you regenerate the TS docs at https://github.com/aptos-labs/ts-sdk-doc. + +DOCS_DIR=/tmp/ts-sdk-doc + +set -e + +cd "$(dirname "$0")" +cd .. + +# Generate the TS docs to a temporary directory. +rm -rf /tmp/generated-ts-docs +typedoc src/index.ts --out /tmp/generated-ts-docs + +# Clone the ts-sdk-doc repo. +rm -rf /tmp/ts-sdk-doc +git clone git@github.com:aptos-labs/ts-sdk-doc.git $DOCS_DIR + +# Copy the generated docs into the ts-sdk-doc repo. +rm -rf $DOCS_DIR/* +mv /tmp/generated-ts-docs/* $DOCS_DIR + +# Copy in a basic README +echo "# TS SDK Docs" > $DOCS_DIR/README.md +echo "" >> $DOCS_DIR/README.md +echo 'Generated from `ecosystem/typescript/sdk/` in [aptos-core](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk) using `pnpm generate-ts-docs`.' >> $DOCS_DIR/README.md + +# Done! +echo +echo "Generated docs to $DOCS_DIR" +echo "From here, ensure that the changes look good. If so, copy the changes into a checkout of the ts-sdk-doc repo and make a PR!" diff --git a/m1/JavaScript-client/scripts/publish_ans_contract.ts b/m1/JavaScript-client/scripts/publish_ans_contract.ts new file mode 100644 index 00000000..f851e13e --- /dev/null +++ b/m1/JavaScript-client/scripts/publish_ans_contract.ts @@ -0,0 +1,63 @@ +const { execSync } = require("child_process"); +require("dotenv").config(); + +/** + * TS SDK supports ANS. Since ANS contract is not part of aptos-framework + * we need to get the ANS contract, publish it to local testnet and test against it. + * This script clones the aptos-names-contracts repo {@link https://github.com/aptos-labs/aptos-names-contracts}, + * uses a pre created account address and private key to fund that account and + * then publish the contract under that account. + * After the contract is published, we delete the cloned repo folder. + * + * This script runs when testing locally and on CI (as part of sdk-release.yaml) using `pnpm test`. + */ + +// on local publishing we want to use `aptos` commnads and on CI we want to use `docker` +const APTOS_INVOCATION = process.env.APTOS_INVOCATION || "aptos"; +// environment we use when testing +const APTOS_NODE_URL = process.env.APTOS_NODE_URL; +const APTOS_FAUCET_URL = process.env.APTOS_FAUCET_URL; +// ans account we use to publish the contract +const ANS_REPO_LOCATION = process.env.ANS_REPO_LOCATION || "/tmp/ans"; +const ANS_TEST_ACCOUNT_PRIVATE_KEY = + process.env.ANS_TEST_ACCOUNT_PRIVATE_KEY || "0x37368b46ce665362562c6d1d4ec01a08c8644c488690df5a17e13ba163e20221"; +const ANS_TEST_ACCOUNT_ADDRESS = + process.env.ANS_TEST_ACCOUNT_ADDRESS || "585fc9f0f0c54183b039ffc770ca282ebd87307916c215a3e692f2f8e4305e82"; + +try { + deleteAnsFolder(); + // 1. Clone ANS repository into the current directory + console.log("---clone ANS repository---"); + execSync(`git clone https://github.com/aptos-labs/aptos-names-contracts.git ${ANS_REPO_LOCATION}`, { + stdio: "inherit", + }); + + // 2. fund ans account + console.log("---funding account---"); + execSync( + `${APTOS_INVOCATION} account fund-with-faucet --account ${ANS_TEST_ACCOUNT_ADDRESS} --faucet-url ${APTOS_FAUCET_URL} --url ${APTOS_NODE_URL}`, + { stdio: "inherit" }, + ); + + // 3. publish ans modules under the ans account + console.log("---publish ans modules---"); + execSync( + `${APTOS_INVOCATION} move publish --package-dir /tmp/ans/core --assume-yes --private-key=${ANS_TEST_ACCOUNT_PRIVATE_KEY} --named-addresses aptos_names=0x${ANS_TEST_ACCOUNT_ADDRESS},aptos_names_admin=0x${ANS_TEST_ACCOUNT_ADDRESS},aptos_names_funds=0x${ANS_TEST_ACCOUNT_ADDRESS} --url=${APTOS_NODE_URL}`, + { stdio: "inherit" }, + ); + + // 4. Delete aptos-names-contracts folder created by the git clone command + console.log("---module published, deleting aptos-names-contracts folder---"); + deleteAnsFolder(); +} catch (error: any) { + console.error("An error occurred:"); + console.error("Status", error?.status); + console.error("parsed stdout", error?.stdout?.toString("utf8")); + console.error("parsed stderr", error?.stderr?.toString("utf8")); + deleteAnsFolder(); + process.exit(1); +} + +function deleteAnsFolder() { + execSync("rm -rf /tmp/ans", { stdio: "inherit" }); +} diff --git a/m1/JavaScript-client/src/account/aptos_account.ts b/m1/JavaScript-client/src/account/aptos_account.ts new file mode 100644 index 00000000..f7490f05 --- /dev/null +++ b/m1/JavaScript-client/src/account/aptos_account.ts @@ -0,0 +1,191 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import nacl from "tweetnacl"; +import * as bip39 from "@scure/bip39"; +import { bytesToHex } from "@noble/hashes/utils"; +import { sha3_256 as sha3Hash } from "@noble/hashes/sha3"; +import { derivePath } from "../utils/hd-key"; +import { HexString, MaybeHexString, Memoize } from "../utils"; +import * as Gen from "../generated/index"; +import { AccountAddress, AuthenticationKey, Ed25519PublicKey } from "../aptos_types"; +import { bcsToBytes } from "../bcs"; + +export interface AptosAccountObject { + address?: Gen.HexEncodedBytes; + publicKeyHex?: Gen.HexEncodedBytes; + privateKeyHex: Gen.HexEncodedBytes; +} + +/** + * Class for creating and managing Aptos account + */ +export class AptosAccount { + /** + * A private key and public key, associated with the given account + */ + readonly signingKey: nacl.SignKeyPair; + + /** + * Address associated with the given account + */ + private readonly accountAddress: HexString; + + static fromAptosAccountObject(obj: AptosAccountObject): AptosAccount { + return new AptosAccount(HexString.ensure(obj.privateKeyHex).toUint8Array(), obj.address); + } + + /** + * Test derive path + */ + static isValidPath(path: string): boolean { + return /^m\/44'\/637'\/[0-9]+'\/[0-9]+'\/[0-9]+'+$/.test(path); + } + + /** + * Creates new account with bip44 path and mnemonics, + * @param path. (e.g. m/44'/637'/0'/0'/0') + * Detailed description: {@link https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki} + * @param mnemonics. + * @returns AptosAccount + */ + static fromDerivePath(path: string, mnemonics: string): AptosAccount { + if (!AptosAccount.isValidPath(path)) { + throw new Error("Invalid derivation path"); + } + + const normalizeMnemonics = mnemonics + .trim() + .split(/\s+/) + .map((part) => part.toLowerCase()) + .join(" "); + + const { key } = derivePath(path, bytesToHex(bip39.mnemonicToSeedSync(normalizeMnemonics))); + + return new AptosAccount(key); + } + + /** + * Creates new account instance. Constructor allows passing in an address, + * to handle account key rotation, where auth_key != public_key + * @param privateKeyBytes Private key from which account key pair will be generated. + * If not specified, new key pair is going to be created. + * @param address Account address (e.g. 0xe8012714cd17606cee7188a2a365eef3fe760be598750678c8c5954eb548a591). + * If not specified, a new one will be generated from public key + */ + constructor(privateKeyBytes?: Uint8Array | undefined, address?: MaybeHexString) { + if (privateKeyBytes) { + this.signingKey = nacl.sign.keyPair.fromSeed(privateKeyBytes.slice(0, 32)); + } else { + this.signingKey = nacl.sign.keyPair(); + } + this.accountAddress = HexString.ensure(address || this.authKey().hex()); + } + + /** + * This is the key by which Aptos account is referenced. + * It is the 32-byte of the SHA-3 256 cryptographic hash + * of the public key(s) concatenated with a signature scheme identifier byte + * @returns Address associated with the given account + */ + address(): HexString { + return this.accountAddress; + } + + /** + * This key enables account owners to rotate their private key(s) + * associated with the account without changing the address that hosts their account. + * See here for more info: {@link https://aptos.dev/concepts/accounts#single-signer-authentication} + * @returns Authentication key for the associated account + */ + @Memoize() + authKey(): HexString { + const pubKey = new Ed25519PublicKey(this.signingKey.publicKey); + const authKey = AuthenticationKey.fromEd25519PublicKey(pubKey); + return authKey.derivedAddress(); + } + + /** + * Takes source address and seeds and returns the resource account address + * @param sourceAddress Address used to derive the resource account + * @param seed The seed bytes + * @returns The resource account address + */ + + static getResourceAccountAddress(sourceAddress: MaybeHexString, seed: Uint8Array): HexString { + const source = bcsToBytes(AccountAddress.fromHex(sourceAddress)); + + const bytes = new Uint8Array([...source, ...seed, AuthenticationKey.DERIVE_RESOURCE_ACCOUNT_SCHEME]); + + const hash = sha3Hash.create(); + hash.update(bytes); + + return HexString.fromUint8Array(hash.digest()); + } + + /** + * This key is generated with Ed25519 scheme. + * Public key is used to check a signature of transaction, signed by given account + * @returns The public key for the associated account + */ + pubKey(): HexString { + return HexString.fromUint8Array(this.signingKey.publicKey); + } + + /** + * Signs specified `buffer` with account's private key + * @param buffer A buffer to sign + * @returns A signature HexString + */ + signBuffer(buffer: Uint8Array): HexString { + const signature = nacl.sign.detached(buffer, this.signingKey.secretKey); + return HexString.fromUint8Array(signature); + } + + /** + * Signs specified `hexString` with account's private key + * @param hexString A regular string or HexString to sign + * @returns A signature HexString + */ + signHexString(hexString: MaybeHexString): HexString { + const toSign = HexString.ensure(hexString).toUint8Array(); + return this.signBuffer(toSign); + } + + /** + * Verifies the signature of the message with the public key of the account + * @param message a signed message + * @param signature the signature of the message + */ + verifySignature(message: MaybeHexString, signature: MaybeHexString): boolean { + const rawMessage = HexString.ensure(message).toUint8Array(); + const rawSignature = HexString.ensure(signature).toUint8Array(); + return nacl.sign.detached.verify(rawMessage, rawSignature, this.signingKey.publicKey); + } + + /** + * Derives account address, public key and private key + * @returns AptosAccountObject instance. + * @example An example of the returned AptosAccountObject object + * ``` + * { + * address: "0xe8012714cd17606cee7188a2a365eef3fe760be598750678c8c5954eb548a591", + * publicKeyHex: "0xf56d8524faf79fbc0f48c13aeed3b0ce5dd376b4db93b8130a107c0a5e04ba04", + * privateKeyHex: `0x009c9f7c992a06cfafe916f125d8adb7a395fca243e264a8e56a4b3e6accf940 + * d2b11e9ece3049ce60e3c7b4a1c58aebfa9298e29a30a58a67f1998646135204` + * } + * ``` + */ + toPrivateKeyObject(): AptosAccountObject { + return { + address: this.address().hex(), + publicKeyHex: this.pubKey().hex(), + privateKeyHex: HexString.fromUint8Array(this.signingKey.secretKey.slice(0, 32)).hex(), + }; + } +} + +// Returns an account address as a HexString given either an AptosAccount or a MaybeHexString. +export function getAddressFromAccountOrAddress(accountOrAddress: AptosAccount | MaybeHexString): HexString { + return accountOrAddress instanceof AptosAccount ? accountOrAddress.address() : HexString.ensure(accountOrAddress); +} diff --git a/m1/JavaScript-client/src/account/index.ts b/m1/JavaScript-client/src/account/index.ts new file mode 100644 index 00000000..3af1a448 --- /dev/null +++ b/m1/JavaScript-client/src/account/index.ts @@ -0,0 +1 @@ +export * from "./aptos_account"; diff --git a/m1/JavaScript-client/src/aptos_types/abi.ts b/m1/JavaScript-client/src/aptos_types/abi.ts new file mode 100644 index 00000000..1dd3ad23 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/abi.ts @@ -0,0 +1,137 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Deserializer, Serializer, Bytes, Seq, deserializeVector, serializeVector } from "../bcs"; + +import { ModuleId } from "./transaction"; + +import { TypeTag } from "./type_tag"; + +export class TypeArgumentABI { + /** + * Constructs a TypeArgumentABI instance. + * @param name + */ + constructor(public readonly name: string) {} + + serialize(serializer: Serializer): void { + serializer.serializeStr(this.name); + } + + static deserialize(deserializer: Deserializer): TypeArgumentABI { + const name = deserializer.deserializeStr(); + return new TypeArgumentABI(name); + } +} + +export class ArgumentABI { + /** + * Constructs an ArgumentABI instance. + * @param name + * @param type_tag + */ + constructor(public readonly name: string, public readonly type_tag: TypeTag) {} + + serialize(serializer: Serializer): void { + serializer.serializeStr(this.name); + this.type_tag.serialize(serializer); + } + + static deserialize(deserializer: Deserializer): ArgumentABI { + const name = deserializer.deserializeStr(); + const typeTag = TypeTag.deserialize(deserializer); + return new ArgumentABI(name, typeTag); + } +} + +export abstract class ScriptABI { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): ScriptABI { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return TransactionScriptABI.load(deserializer); + case 1: + return EntryFunctionABI.load(deserializer); + default: + throw new Error(`Unknown variant index for TransactionPayload: ${index}`); + } + } +} + +export class TransactionScriptABI extends ScriptABI { + /** + * Constructs a TransactionScriptABI instance. + * @param name Entry function name + * @param doc + * @param code + * @param ty_args + * @param args + */ + constructor( + public readonly name: string, + public readonly doc: string, + public readonly code: Bytes, + public readonly ty_args: Seq, + public readonly args: Seq, + ) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + serializer.serializeStr(this.name); + serializer.serializeStr(this.doc); + serializer.serializeBytes(this.code); + serializeVector(this.ty_args, serializer); + serializeVector(this.args, serializer); + } + + static load(deserializer: Deserializer): TransactionScriptABI { + const name = deserializer.deserializeStr(); + const doc = deserializer.deserializeStr(); + const code = deserializer.deserializeBytes(); + const tyArgs = deserializeVector(deserializer, TypeArgumentABI); + const args = deserializeVector(deserializer, ArgumentABI); + return new TransactionScriptABI(name, doc, code, tyArgs, args); + } +} + +export class EntryFunctionABI extends ScriptABI { + /** + * Constructs a EntryFunctionABI instance + * @param name + * @param module_name Fully qualified module id + * @param doc + * @param ty_args + * @param args + */ + constructor( + public readonly name: string, + public readonly module_name: ModuleId, + public readonly doc: string, + public readonly ty_args: Seq, + public readonly args: Seq, + ) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + serializer.serializeStr(this.name); + this.module_name.serialize(serializer); + serializer.serializeStr(this.doc); + serializeVector(this.ty_args, serializer); + serializeVector(this.args, serializer); + } + + static load(deserializer: Deserializer): EntryFunctionABI { + const name = deserializer.deserializeStr(); + const moduleName = ModuleId.deserialize(deserializer); + const doc = deserializer.deserializeStr(); + const tyArgs = deserializeVector(deserializer, TypeArgumentABI); + const args = deserializeVector(deserializer, ArgumentABI); + return new EntryFunctionABI(name, moduleName, doc, tyArgs, args); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/account_address.ts b/m1/JavaScript-client/src/aptos_types/account_address.ts new file mode 100644 index 00000000..3417f0d3 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/account_address.ts @@ -0,0 +1,88 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { HexString, MaybeHexString } from "../utils"; +import { Serializer, Deserializer, Bytes } from "../bcs"; + +export class AccountAddress { + static readonly LENGTH: number = 32; + + readonly address: Bytes; + + static CORE_CODE_ADDRESS: AccountAddress = AccountAddress.fromHex("0x1"); + + constructor(address: Bytes) { + if (address.length !== AccountAddress.LENGTH) { + throw new Error("Expected address of length 32"); + } + this.address = address; + } + + /** + * Creates AccountAddress from a hex string. + * @param addr Hex string can be with a prefix or without a prefix, + * e.g. '0x1aa' or '1aa'. Hex string will be left padded with 0s if too short. + */ + static fromHex(addr: MaybeHexString): AccountAddress { + let address = HexString.ensure(addr); + + // If an address hex has odd number of digits, padd the hex string with 0 + // e.g. '1aa' would become '01aa'. + if (address.noPrefix().length % 2 !== 0) { + address = new HexString(`0${address.noPrefix()}`); + } + + const addressBytes = address.toUint8Array(); + + if (addressBytes.length > AccountAddress.LENGTH) { + // eslint-disable-next-line quotes + throw new Error("Hex string is too long. Address's length is 32 bytes."); + } else if (addressBytes.length === AccountAddress.LENGTH) { + return new AccountAddress(addressBytes); + } + + const res: Bytes = new Uint8Array(AccountAddress.LENGTH); + res.set(addressBytes, AccountAddress.LENGTH - addressBytes.length); + + return new AccountAddress(res); + } + + /** + * Checks if the string is a valid AccountAddress + * @param addr Hex string can be with a prefix or without a prefix, + * e.g. '0x1aa' or '1aa'. Hex string will be left padded with 0s if too short. + */ + static isValid(addr: MaybeHexString): boolean { + // At least one zero is required + if (addr === "") { + return false; + } + + let address = HexString.ensure(addr); + + // If an address hex has odd number of digits, padd the hex string with 0 + // e.g. '1aa' would become '01aa'. + if (address.noPrefix().length % 2 !== 0) { + address = new HexString(`0${address.noPrefix()}`); + } + + const addressBytes = address.toUint8Array(); + + return addressBytes.length <= AccountAddress.LENGTH; + } + + /** + * Return a hex string from account Address. + */ + toHexString(): MaybeHexString { + return HexString.fromUint8Array(this.address).hex(); + } + + serialize(serializer: Serializer): void { + serializer.serializeFixedBytes(this.address); + } + + static deserialize(deserializer: Deserializer): AccountAddress { + return new AccountAddress(deserializer.deserializeFixedBytes(AccountAddress.LENGTH)); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/authentication_key.ts b/m1/JavaScript-client/src/aptos_types/authentication_key.ts new file mode 100644 index 00000000..74f9b768 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/authentication_key.ts @@ -0,0 +1,73 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { sha3_256 as sha3Hash } from "@noble/hashes/sha3"; +import { HexString } from "../utils"; +import { Bytes } from "../bcs"; +import { MultiEd25519PublicKey } from "./multi_ed25519"; +import { Ed25519PublicKey } from "./ed25519"; + +/** + * Each account stores an authentication key. Authentication key enables account owners to rotate + * their private key(s) associated with the account without changing the address that hosts their account. + * @see {@link * https://aptos.dev/concepts/accounts | Account Basics} + * + * Account addresses can be derived from AuthenticationKey + */ +export class AuthenticationKey { + static readonly LENGTH: number = 32; + + static readonly MULTI_ED25519_SCHEME: number = 1; + + static readonly ED25519_SCHEME: number = 0; + + static readonly DERIVE_RESOURCE_ACCOUNT_SCHEME: number = 255; + + readonly bytes: Bytes; + + constructor(bytes: Bytes) { + if (bytes.length !== AuthenticationKey.LENGTH) { + throw new Error("Expected a byte array of length 32"); + } + this.bytes = bytes; + } + + /** + * Converts a K-of-N MultiEd25519PublicKey to AuthenticationKey with: + * `auth_key = sha3-256(p_1 | … | p_n | K | 0x01)`. `K` represents the K-of-N required for + * authenticating the transaction. `0x01` is the 1-byte scheme for multisig. + */ + static fromMultiEd25519PublicKey(publicKey: MultiEd25519PublicKey): AuthenticationKey { + const pubKeyBytes = publicKey.toBytes(); + + const bytes = new Uint8Array(pubKeyBytes.length + 1); + bytes.set(pubKeyBytes); + bytes.set([AuthenticationKey.MULTI_ED25519_SCHEME], pubKeyBytes.length); + + const hash = sha3Hash.create(); + hash.update(bytes); + + return new AuthenticationKey(hash.digest()); + } + + static fromEd25519PublicKey(publicKey: Ed25519PublicKey): AuthenticationKey { + const pubKeyBytes = publicKey.value; + + const bytes = new Uint8Array(pubKeyBytes.length + 1); + bytes.set(pubKeyBytes); + bytes.set([AuthenticationKey.ED25519_SCHEME], pubKeyBytes.length); + + const hash = sha3Hash.create(); + hash.update(bytes); + + return new AuthenticationKey(hash.digest()); + } + + /** + * Derives an account address from AuthenticationKey. Since current AccountAddress is 32 bytes, + * AuthenticationKey bytes are directly translated to AccountAddress. + */ + derivedAddress(): HexString { + return HexString.fromUint8Array(this.bytes); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/authenticator.ts b/m1/JavaScript-client/src/aptos_types/authenticator.ts new file mode 100644 index 00000000..ffbe5791 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/authenticator.ts @@ -0,0 +1,153 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable @typescript-eslint/naming-convention */ +import { AccountAddress } from "./account_address"; +import { Serializer, Deserializer, Seq, deserializeVector, serializeVector } from "../bcs"; +import { Ed25519PublicKey, Ed25519Signature } from "./ed25519"; +import { MultiEd25519PublicKey, MultiEd25519Signature } from "./multi_ed25519"; + +export abstract class TransactionAuthenticator { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): TransactionAuthenticator { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return TransactionAuthenticatorEd25519.load(deserializer); + case 1: + return TransactionAuthenticatorMultiEd25519.load(deserializer); + case 2: + return TransactionAuthenticatorMultiAgent.load(deserializer); + default: + throw new Error(`Unknown variant index for TransactionAuthenticator: ${index}`); + } + } +} + +export class TransactionAuthenticatorEd25519 extends TransactionAuthenticator { + /** + * An authenticator for single signature. + * + * @param public_key Client's public key. + * @param signature Signature of a raw transaction. + * @see {@link https://aptos.dev/guides/creating-a-signed-transaction/ | Creating a Signed Transaction} + * for details about generating a signature. + */ + constructor(public readonly public_key: Ed25519PublicKey, public readonly signature: Ed25519Signature) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + this.public_key.serialize(serializer); + this.signature.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionAuthenticatorEd25519 { + const public_key = Ed25519PublicKey.deserialize(deserializer); + const signature = Ed25519Signature.deserialize(deserializer); + return new TransactionAuthenticatorEd25519(public_key, signature); + } +} + +export class TransactionAuthenticatorMultiEd25519 extends TransactionAuthenticator { + /** + * An authenticator for multiple signatures. + * + * @param public_key + * @param signature + * + */ + constructor(public readonly public_key: MultiEd25519PublicKey, public readonly signature: MultiEd25519Signature) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + this.public_key.serialize(serializer); + this.signature.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionAuthenticatorMultiEd25519 { + const public_key = MultiEd25519PublicKey.deserialize(deserializer); + const signature = MultiEd25519Signature.deserialize(deserializer); + return new TransactionAuthenticatorMultiEd25519(public_key, signature); + } +} + +export class TransactionAuthenticatorMultiAgent extends TransactionAuthenticator { + constructor( + public readonly sender: AccountAuthenticator, + public readonly secondary_signer_addresses: Seq, + public readonly secondary_signers: Seq, + ) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(2); + this.sender.serialize(serializer); + serializeVector(this.secondary_signer_addresses, serializer); + serializeVector(this.secondary_signers, serializer); + } + + static load(deserializer: Deserializer): TransactionAuthenticatorMultiAgent { + const sender = AccountAuthenticator.deserialize(deserializer); + const secondary_signer_addresses = deserializeVector(deserializer, AccountAddress); + const secondary_signers = deserializeVector(deserializer, AccountAuthenticator); + return new TransactionAuthenticatorMultiAgent(sender, secondary_signer_addresses, secondary_signers); + } +} + +export abstract class AccountAuthenticator { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): AccountAuthenticator { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return AccountAuthenticatorEd25519.load(deserializer); + case 1: + return AccountAuthenticatorMultiEd25519.load(deserializer); + default: + throw new Error(`Unknown variant index for AccountAuthenticator: ${index}`); + } + } +} + +export class AccountAuthenticatorEd25519 extends AccountAuthenticator { + constructor(public readonly public_key: Ed25519PublicKey, public readonly signature: Ed25519Signature) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + this.public_key.serialize(serializer); + this.signature.serialize(serializer); + } + + static load(deserializer: Deserializer): AccountAuthenticatorEd25519 { + const public_key = Ed25519PublicKey.deserialize(deserializer); + const signature = Ed25519Signature.deserialize(deserializer); + return new AccountAuthenticatorEd25519(public_key, signature); + } +} + +export class AccountAuthenticatorMultiEd25519 extends AccountAuthenticator { + constructor(public readonly public_key: MultiEd25519PublicKey, public readonly signature: MultiEd25519Signature) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + this.public_key.serialize(serializer); + this.signature.serialize(serializer); + } + + static load(deserializer: Deserializer): AccountAuthenticatorMultiEd25519 { + const public_key = MultiEd25519PublicKey.deserialize(deserializer); + const signature = MultiEd25519Signature.deserialize(deserializer); + return new AccountAuthenticatorMultiEd25519(public_key, signature); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/ed25519.ts b/m1/JavaScript-client/src/aptos_types/ed25519.ts new file mode 100644 index 00000000..1385e289 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/ed25519.ts @@ -0,0 +1,49 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Bytes, Deserializer, Serializer } from "../bcs"; + +export class Ed25519PublicKey { + static readonly LENGTH: number = 32; + + readonly value: Bytes; + + constructor(value: Bytes) { + if (value.length !== Ed25519PublicKey.LENGTH) { + throw new Error(`Ed25519PublicKey length should be ${Ed25519PublicKey.LENGTH}`); + } + this.value = value; + } + + toBytes(): Bytes { + return this.value; + } + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.value); + } + + static deserialize(deserializer: Deserializer): Ed25519PublicKey { + const value = deserializer.deserializeBytes(); + return new Ed25519PublicKey(value); + } +} + +export class Ed25519Signature { + static readonly LENGTH = 64; + + constructor(public readonly value: Bytes) { + if (value.length !== Ed25519Signature.LENGTH) { + throw new Error(`Ed25519Signature length should be ${Ed25519Signature.LENGTH}`); + } + } + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.value); + } + + static deserialize(deserializer: Deserializer): Ed25519Signature { + const value = deserializer.deserializeBytes(); + return new Ed25519Signature(value); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/identifier.ts b/m1/JavaScript-client/src/aptos_types/identifier.ts new file mode 100644 index 00000000..229f155f --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/identifier.ts @@ -0,0 +1,17 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Deserializer, Serializer } from "../bcs"; + +export class Identifier { + constructor(public value: string) {} + + public serialize(serializer: Serializer): void { + serializer.serializeStr(this.value); + } + + static deserialize(deserializer: Deserializer): Identifier { + const value = deserializer.deserializeStr(); + return new Identifier(value); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/index.ts b/m1/JavaScript-client/src/aptos_types/index.ts new file mode 100644 index 00000000..4c4ff983 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/index.ts @@ -0,0 +1,15 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export * from "./abi"; +export * from "./account_address"; +export * from "./authenticator"; +export * from "./transaction"; +export * from "./type_tag"; +export * from "./identifier"; +export * from "./ed25519"; +export * from "./multi_ed25519"; +export * from "./authentication_key"; +export * from "./rotation_proof_challenge"; + +export type SigningMessage = Uint8Array; diff --git a/m1/JavaScript-client/src/aptos_types/multi_ed25519.ts b/m1/JavaScript-client/src/aptos_types/multi_ed25519.ts new file mode 100644 index 00000000..ad72c7a0 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/multi_ed25519.ts @@ -0,0 +1,158 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable no-bitwise */ +import { Bytes, Deserializer, Seq, Serializer, Uint8 } from "../bcs"; +import { Ed25519PublicKey, Ed25519Signature } from "./ed25519"; + +/** + * MultiEd25519 currently supports at most 32 signatures. + */ +const MAX_SIGNATURES_SUPPORTED = 32; + +export class MultiEd25519PublicKey { + /** + * Public key for a K-of-N multisig transaction. A K-of-N multisig transaction means that for such a + * transaction to be executed, at least K out of the N authorized signers have signed the transaction + * and passed the check conducted by the chain. + * + * @see {@link + * https://aptos.dev/guides/creating-a-signed-transaction#multisignature-transactions | Creating a Signed Transaction} + * + * @param public_keys A list of public keys + * @param threshold At least "threshold" signatures must be valid + */ + constructor(public readonly public_keys: Seq, public readonly threshold: Uint8) { + if (threshold > MAX_SIGNATURES_SUPPORTED) { + throw new Error(`"threshold" cannot be larger than ${MAX_SIGNATURES_SUPPORTED}`); + } + } + + /** + * Converts a MultiEd25519PublicKey into bytes with: bytes = p1_bytes | ... | pn_bytes | threshold + */ + toBytes(): Bytes { + const bytes = new Uint8Array(this.public_keys.length * Ed25519PublicKey.LENGTH + 1); + this.public_keys.forEach((k: Ed25519PublicKey, i: number) => { + bytes.set(k.value, i * Ed25519PublicKey.LENGTH); + }); + + bytes[this.public_keys.length * Ed25519PublicKey.LENGTH] = this.threshold; + + return bytes; + } + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.toBytes()); + } + + static deserialize(deserializer: Deserializer): MultiEd25519PublicKey { + const bytes = deserializer.deserializeBytes(); + const threshold = bytes[bytes.length - 1]; + + const keys: Seq = []; + + for (let i = 0; i < bytes.length - 1; i += Ed25519PublicKey.LENGTH) { + const begin = i; + keys.push(new Ed25519PublicKey(bytes.subarray(begin, begin + Ed25519PublicKey.LENGTH))); + } + return new MultiEd25519PublicKey(keys, threshold); + } +} + +export class MultiEd25519Signature { + static BITMAP_LEN: Uint8 = 4; + + /** + * Signature for a K-of-N multisig transaction. + * + * @see {@link + * https://aptos.dev/guides/creating-a-signed-transaction#multisignature-transactions | Creating a Signed Transaction} + * + * @param signatures A list of ed25519 signatures + * @param bitmap 4 bytes, at most 32 signatures are supported. If Nth bit value is `1`, the Nth + * signature should be provided in `signatures`. Bits are read from left to right + */ + constructor(public readonly signatures: Seq, public readonly bitmap: Uint8Array) { + if (bitmap.length !== MultiEd25519Signature.BITMAP_LEN) { + throw new Error(`"bitmap" length should be ${MultiEd25519Signature.BITMAP_LEN}`); + } + } + + /** + * Converts a MultiEd25519Signature into bytes with `bytes = s1_bytes | ... | sn_bytes | bitmap` + */ + toBytes(): Bytes { + const bytes = new Uint8Array(this.signatures.length * Ed25519Signature.LENGTH + MultiEd25519Signature.BITMAP_LEN); + this.signatures.forEach((k: Ed25519Signature, i: number) => { + bytes.set(k.value, i * Ed25519Signature.LENGTH); + }); + + bytes.set(this.bitmap, this.signatures.length * Ed25519Signature.LENGTH); + + return bytes; + } + + /** + * Helper method to create a bitmap out of the specified bit positions + * @param bits The bitmap positions that should be set. A position starts at index 0. + * Valid position should range between 0 and 31. + * @example + * Here's an example of valid `bits` + * ``` + * [0, 2, 31] + * ``` + * `[0, 2, 31]` means the 1st, 3rd and 32nd bits should be set in the bitmap. + * The result bitmap should be 0b1010000000000000000000000000001 + * + * @returns bitmap that is 32bit long + */ + static createBitmap(bits: Seq): Uint8Array { + // Bits are read from left to right. e.g. 0b10000000 represents the first bit is set in one byte. + // The decimal value of 0b10000000 is 128. + const firstBitInByte = 128; + const bitmap = new Uint8Array([0, 0, 0, 0]); + + // Check if duplicates exist in bits + const dupCheckSet = new Set(); + + bits.forEach((bit: number) => { + if (bit >= MAX_SIGNATURES_SUPPORTED) { + throw new Error(`Invalid bit value ${bit}.`); + } + + if (dupCheckSet.has(bit)) { + throw new Error("Duplicated bits detected."); + } + + dupCheckSet.add(bit); + + const byteOffset = Math.floor(bit / 8); + + let byte = bitmap[byteOffset]; + + byte |= firstBitInByte >> bit % 8; + + bitmap[byteOffset] = byte; + }); + + return bitmap; + } + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.toBytes()); + } + + static deserialize(deserializer: Deserializer): MultiEd25519Signature { + const bytes = deserializer.deserializeBytes(); + const bitmap = bytes.subarray(bytes.length - 4); + + const sigs: Seq = []; + + for (let i = 0; i < bytes.length - bitmap.length; i += Ed25519Signature.LENGTH) { + const begin = i; + sigs.push(new Ed25519Signature(bytes.subarray(begin, begin + Ed25519Signature.LENGTH))); + } + return new MultiEd25519Signature(sigs, bitmap); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/rotation_proof_challenge.ts b/m1/JavaScript-client/src/aptos_types/rotation_proof_challenge.ts new file mode 100644 index 00000000..eb3b6942 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/rotation_proof_challenge.ts @@ -0,0 +1,27 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AccountAddress } from "./account_address"; +import { Serializer } from "../bcs"; + +export class RotationProofChallenge { + constructor( + public readonly accountAddress: AccountAddress, + public readonly moduleName: string, + public readonly structName: string, + public readonly sequenceNumber: number | bigint, + public readonly originator: AccountAddress, + public readonly currentAuthKey: AccountAddress, + public readonly newPublicKey: Uint8Array, + ) {} + + serialize(serializer: Serializer): void { + this.accountAddress.serialize(serializer); + serializer.serializeStr(this.moduleName); + serializer.serializeStr(this.structName); + serializer.serializeU64(this.sequenceNumber); + this.originator.serialize(serializer); + this.currentAuthKey.serialize(serializer); + serializer.serializeBytes(this.newPublicKey); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/token_types.ts b/m1/JavaScript-client/src/aptos_types/token_types.ts new file mode 100644 index 00000000..3200c2bb --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/token_types.ts @@ -0,0 +1,88 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { deserializePropertyMap, PropertyMap, PropertyValue } from "../utils/property_map_serde"; + +export { PropertyMap, PropertyValue }; +export class TokenData { + /** Unique name within this creator's account for this Token's collection */ + collection: string; + + /** Description of Token */ + description: string; + + /** Name of Token */ + name: string; + + /** Optional maximum number of this Token */ + maximum?: number; + + /** Total number of this type of Token */ + supply: number; + + /** URL for additional information / media */ + uri: string; + + /** default properties of token data */ + default_properties: PropertyMap; + + /** mutability config of tokendata fields */ + mutability_config: boolean[]; + + constructor( + collection: string, + description: string, + name: string, + maximum: number, + supply: number, + uri: string, + default_properties: any, + mutability_config: boolean[], + ) { + this.collection = collection; + this.description = description; + this.name = name; + this.maximum = maximum; + this.supply = supply; + this.uri = uri; + this.default_properties = deserializePropertyMap(default_properties); + this.mutability_config = mutability_config; + } +} + +export interface TokenDataId { + /** Token creator address */ + creator: string; + + /** Unique name within this creator's account for this Token's collection */ + collection: string; + + /** Name of Token */ + name: string; +} + +export interface TokenId { + token_data_id: TokenDataId; + + /** version number of the property map */ + property_version: string; +} + +/** server will return string for u64 */ +type U64 = string; + +export class Token { + id: TokenId; + + /** server will return string for u64 */ + amount: U64; + + /** the property map of the token */ + token_properties: PropertyMap; + + constructor(id: TokenId, amount: U64, token_properties: any) { + this.id = id; + this.amount = amount; + this.token_properties = deserializePropertyMap(token_properties); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/transaction.ts b/m1/JavaScript-client/src/aptos_types/transaction.ts new file mode 100644 index 00000000..c3d611dd --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/transaction.ts @@ -0,0 +1,700 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable @typescript-eslint/naming-convention */ +/* eslint-disable class-methods-use-this */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +/* eslint-disable max-classes-per-file */ +import { sha3_256 as sha3Hash } from "@noble/hashes/sha3"; +import { HexString } from "../utils"; +import { + Deserializer, + Serializer, + Uint64, + Bytes, + Seq, + Uint8, + Uint128, + deserializeVector, + serializeVector, + bcsToBytes, + Uint16, + Uint256, +} from "../bcs"; +import { TransactionAuthenticator } from "./authenticator"; +import { Identifier } from "./identifier"; +import { TypeTag } from "./type_tag"; +import { AccountAddress } from "./account_address"; + +export class RawTransaction { + /** + * RawTransactions contain the metadata and payloads that can be submitted to Aptos chain for execution. + * RawTransactions must be signed before Aptos chain can execute them. + * + * @param sender Account address of the sender. + * @param sequence_number Sequence number of this transaction. This must match the sequence number stored in + * the sender's account at the time the transaction executes. + * @param payload Instructions for the Aptos Blockchain, including publishing a module, + * execute a entry function or execute a script payload. + * @param max_gas_amount Maximum total gas to spend for this transaction. The account must have more + * than this gas or the transaction will be discarded during validation. + * @param gas_unit_price Price to be paid per gas unit. + * @param expiration_timestamp_secs The blockchain timestamp at which the blockchain would discard this transaction. + * @param chain_id The chain ID of the blockchain that this transaction is intended to be run on. + */ + constructor( + public readonly sender: AccountAddress, + public readonly sequence_number: Uint64, + public readonly payload: TransactionPayload, + public readonly max_gas_amount: Uint64, + public readonly gas_unit_price: Uint64, + public readonly expiration_timestamp_secs: Uint64, + public readonly chain_id: ChainId, + ) {} + + serialize(serializer: Serializer): void { + this.sender.serialize(serializer); + serializer.serializeU64(this.sequence_number); + this.payload.serialize(serializer); + serializer.serializeU64(this.max_gas_amount); + serializer.serializeU64(this.gas_unit_price); + serializer.serializeU64(this.expiration_timestamp_secs); + this.chain_id.serialize(serializer); + } + + static deserialize(deserializer: Deserializer): RawTransaction { + const sender = AccountAddress.deserialize(deserializer); + const sequence_number = deserializer.deserializeU64(); + const payload = TransactionPayload.deserialize(deserializer); + const max_gas_amount = deserializer.deserializeU64(); + const gas_unit_price = deserializer.deserializeU64(); + const expiration_timestamp_secs = deserializer.deserializeU64(); + const chain_id = ChainId.deserialize(deserializer); + return new RawTransaction( + sender, + sequence_number, + payload, + max_gas_amount, + gas_unit_price, + expiration_timestamp_secs, + chain_id, + ); + } +} + +export class Script { + /** + * Scripts contain the Move bytecodes payload that can be submitted to Aptos chain for execution. + * @param code Move bytecode + * @param ty_args Type arguments that bytecode requires. + * + * @example + * A coin transfer function has one type argument "CoinType". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + * @param args Arugments to bytecode function. + * + * @example + * A coin transfer function has three arugments "from", "to" and "amount". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + */ + constructor( + public readonly code: Bytes, + public readonly ty_args: Seq, + public readonly args: Seq, + ) {} + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.code); + serializeVector(this.ty_args, serializer); + serializeVector(this.args, serializer); + } + + static deserialize(deserializer: Deserializer): Script { + const code = deserializer.deserializeBytes(); + const ty_args = deserializeVector(deserializer, TypeTag); + const args = deserializeVector(deserializer, TransactionArgument); + return new Script(code, ty_args, args); + } +} + +export class EntryFunction { + /** + * Contains the payload to run a function within a module. + * @param module_name Fully qualified module name. ModuleId consists of account address and module name. + * @param function_name The function to run. + * @param ty_args Type arguments that move function requires. + * + * @example + * A coin transfer function has one type argument "CoinType". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + * @param args Arugments to the move function. + * + * @example + * A coin transfer function has three arugments "from", "to" and "amount". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + */ + constructor( + public readonly module_name: ModuleId, + public readonly function_name: Identifier, + public readonly ty_args: Seq, + public readonly args: Seq, + ) {} + + /** + * + * @param module Fully qualified module name in format "AccountAddress::module_name" e.g. "0x1::coin" + * @param func Function name + * @param ty_args Type arguments that move function requires. + * + * @example + * A coin transfer function has one type argument "CoinType". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + * @param args Arugments to the move function. + * + * @example + * A coin transfer function has three arugments "from", "to" and "amount". + * ``` + * public(script) fun transfer(from: &signer, to: address, amount: u64,) + * ``` + * @returns + */ + static natural(module: string, func: string, ty_args: Seq, args: Seq): EntryFunction { + return new EntryFunction(ModuleId.fromStr(module), new Identifier(func), ty_args, args); + } + + /** + * `natual` is deprecated, please use `natural` + * + * @deprecated. + */ + static natual(module: string, func: string, ty_args: Seq, args: Seq): EntryFunction { + return EntryFunction.natural(module, func, ty_args, args); + } + + serialize(serializer: Serializer): void { + this.module_name.serialize(serializer); + this.function_name.serialize(serializer); + serializeVector(this.ty_args, serializer); + + serializer.serializeU32AsUleb128(this.args.length); + this.args.forEach((item: Bytes) => { + serializer.serializeBytes(item); + }); + } + + static deserialize(deserializer: Deserializer): EntryFunction { + const module_name = ModuleId.deserialize(deserializer); + const function_name = Identifier.deserialize(deserializer); + const ty_args = deserializeVector(deserializer, TypeTag); + + const length = deserializer.deserializeUleb128AsU32(); + const list: Seq = []; + for (let i = 0; i < length; i += 1) { + list.push(deserializer.deserializeBytes()); + } + + const args = list; + return new EntryFunction(module_name, function_name, ty_args, args); + } +} + +export class MultiSigTransactionPayload { + /** + * Contains the payload to run a multisig account transaction. + * @param transaction_payload The payload of the multisig transaction. This can only be EntryFunction for now but + * Script might be supported in the future. + */ + constructor(public readonly transaction_payload: EntryFunction) {} + + serialize(serializer: Serializer): void { + // We can support multiple types of inner transaction payload in the future. + // For now it's only EntryFunction but if we support more types, we need to serialize with the right enum values + // here + serializer.serializeU32AsUleb128(0); + this.transaction_payload.serialize(serializer); + } + + static deserialize(deserializer: Deserializer): MultiSigTransactionPayload { + // TODO: Support other types of payload beside EntryFunction. + // This is the enum value indicating which type of payload the multisig tx contains. + deserializer.deserializeUleb128AsU32(); + return new MultiSigTransactionPayload(EntryFunction.deserialize(deserializer)); + } +} + +export class MultiSig { + /** + * Contains the payload to run a multisig account transaction. + * @param multisig_address The multisig account address the transaction will be executed as. + * @param transaction_payload The payload of the multisig transaction. This is optional when executing a multisig + * transaction whose payload is already stored on chain. + */ + constructor( + public readonly multisig_address: AccountAddress, + public readonly transaction_payload?: MultiSigTransactionPayload, + ) {} + + serialize(serializer: Serializer): void { + this.multisig_address.serialize(serializer); + // Options are encoded with an extra u8 field before the value - 0x0 is none and 0x1 is present. + // We use serializeBool below to create this prefix value. + if (this.transaction_payload === undefined) { + serializer.serializeBool(false); + } else { + serializer.serializeBool(true); + this.transaction_payload.serialize(serializer); + } + } + + static deserialize(deserializer: Deserializer): MultiSig { + const multisig_address = AccountAddress.deserialize(deserializer); + const payloadPresent = deserializer.deserializeBool(); + let transaction_payload; + if (payloadPresent) { + transaction_payload = MultiSigTransactionPayload.deserialize(deserializer); + } + return new MultiSig(multisig_address, transaction_payload); + } +} + +export class Module { + /** + * Contains the bytecode of a Move module that can be published to the Aptos chain. + * @param code Move bytecode of a module. + */ + constructor(public readonly code: Bytes) {} + + serialize(serializer: Serializer): void { + serializer.serializeBytes(this.code); + } + + static deserialize(deserializer: Deserializer): Module { + const code = deserializer.deserializeBytes(); + return new Module(code); + } +} + +export class ModuleId { + /** + * Full name of a module. + * @param address The account address. + * @param name The name of the module under the account at "address". + */ + constructor(public readonly address: AccountAddress, public readonly name: Identifier) {} + + /** + * Converts a string literal to a ModuleId + * @param moduleId String literal in format "AccountAddress::module_name", + * e.g. "0x1::coin" + * @returns + */ + static fromStr(moduleId: string): ModuleId { + const parts = moduleId.split("::"); + if (parts.length !== 2) { + throw new Error("Invalid module id."); + } + return new ModuleId(AccountAddress.fromHex(new HexString(parts[0])), new Identifier(parts[1])); + } + + serialize(serializer: Serializer): void { + this.address.serialize(serializer); + this.name.serialize(serializer); + } + + static deserialize(deserializer: Deserializer): ModuleId { + const address = AccountAddress.deserialize(deserializer); + const name = Identifier.deserialize(deserializer); + return new ModuleId(address, name); + } +} + +export class ChangeSet { + serialize(serializer: Serializer): void { + throw new Error("Not implemented."); + } + + static deserialize(deserializer: Deserializer): ChangeSet { + throw new Error("Not implemented."); + } +} + +export class WriteSet { + serialize(serializer: Serializer): void { + throw new Error("Not implmented."); + } + + static deserialize(deserializer: Deserializer): WriteSet { + throw new Error("Not implmented."); + } +} + +export class SignedTransaction { + /** + * A SignedTransaction consists of a raw transaction and an authenticator. The authenticator + * contains a client's public key and the signature of the raw transaction. + * + * @see {@link https://aptos.dev/guides/creating-a-signed-transaction/ | Creating a Signed Transaction} + * + * @param raw_txn + * @param authenticator Contains a client's public key and the signature of the raw transaction. + * Authenticator has 3 flavors: single signature, multi-signature and multi-agent. + * @see authenticator.ts for details. + */ + constructor(public readonly raw_txn: RawTransaction, public readonly authenticator: TransactionAuthenticator) {} + + serialize(serializer: Serializer): void { + this.raw_txn.serialize(serializer); + this.authenticator.serialize(serializer); + } + + static deserialize(deserializer: Deserializer): SignedTransaction { + const raw_txn = RawTransaction.deserialize(deserializer); + const authenticator = TransactionAuthenticator.deserialize(deserializer); + return new SignedTransaction(raw_txn, authenticator); + } +} + +export abstract class RawTransactionWithData { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): RawTransactionWithData { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return MultiAgentRawTransaction.load(deserializer); + default: + throw new Error(`Unknown variant index for RawTransactionWithData: ${index}`); + } + } +} + +export class MultiAgentRawTransaction extends RawTransactionWithData { + constructor( + public readonly raw_txn: RawTransaction, + public readonly secondary_signer_addresses: Seq, + ) { + super(); + } + + serialize(serializer: Serializer): void { + // enum variant index + serializer.serializeU32AsUleb128(0); + this.raw_txn.serialize(serializer); + serializeVector(this.secondary_signer_addresses, serializer); + } + + static load(deserializer: Deserializer): MultiAgentRawTransaction { + const rawTxn = RawTransaction.deserialize(deserializer); + const secondarySignerAddresses = deserializeVector(deserializer, AccountAddress); + + return new MultiAgentRawTransaction(rawTxn, secondarySignerAddresses); + } +} + +export abstract class TransactionPayload { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): TransactionPayload { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return TransactionPayloadScript.load(deserializer); + // TODO: change to 1 once ModuleBundle has been removed from rust + case 2: + return TransactionPayloadEntryFunction.load(deserializer); + case 3: + return TransactionPayloadMultisig.load(deserializer); + default: + throw new Error(`Unknown variant index for TransactionPayload: ${index}`); + } + } +} + +export class TransactionPayloadScript extends TransactionPayload { + constructor(public readonly value: Script) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionPayloadScript { + const value = Script.deserialize(deserializer); + return new TransactionPayloadScript(value); + } +} + +export class TransactionPayloadEntryFunction extends TransactionPayload { + constructor(public readonly value: EntryFunction) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(2); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionPayloadEntryFunction { + const value = EntryFunction.deserialize(deserializer); + return new TransactionPayloadEntryFunction(value); + } +} + +export class TransactionPayloadMultisig extends TransactionPayload { + constructor(public readonly value: MultiSig) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(3); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionPayloadMultisig { + const value = MultiSig.deserialize(deserializer); + return new TransactionPayloadMultisig(value); + } +} + +export class ChainId { + constructor(public readonly value: Uint8) {} + + serialize(serializer: Serializer): void { + serializer.serializeU8(this.value); + } + + static deserialize(deserializer: Deserializer): ChainId { + const value = deserializer.deserializeU8(); + return new ChainId(value); + } +} + +export abstract class TransactionArgument { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): TransactionArgument { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return TransactionArgumentU8.load(deserializer); + case 1: + return TransactionArgumentU64.load(deserializer); + case 2: + return TransactionArgumentU128.load(deserializer); + case 3: + return TransactionArgumentAddress.load(deserializer); + case 4: + return TransactionArgumentU8Vector.load(deserializer); + case 5: + return TransactionArgumentBool.load(deserializer); + case 6: + return TransactionArgumentU16.load(deserializer); + case 7: + return TransactionArgumentU32.load(deserializer); + case 8: + return TransactionArgumentU256.load(deserializer); + default: + throw new Error(`Unknown variant index for TransactionArgument: ${index}`); + } + } +} + +export class TransactionArgumentU8 extends TransactionArgument { + constructor(public readonly value: Uint8) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + serializer.serializeU8(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU8 { + const value = deserializer.deserializeU8(); + return new TransactionArgumentU8(value); + } +} + +export class TransactionArgumentU16 extends TransactionArgument { + constructor(public readonly value: Uint16) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(6); + serializer.serializeU16(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU16 { + const value = deserializer.deserializeU16(); + return new TransactionArgumentU16(value); + } +} + +export class TransactionArgumentU32 extends TransactionArgument { + constructor(public readonly value: Uint16) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(7); + serializer.serializeU32(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU32 { + const value = deserializer.deserializeU32(); + return new TransactionArgumentU32(value); + } +} + +export class TransactionArgumentU64 extends TransactionArgument { + constructor(public readonly value: Uint64) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + serializer.serializeU64(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU64 { + const value = deserializer.deserializeU64(); + return new TransactionArgumentU64(value); + } +} + +export class TransactionArgumentU128 extends TransactionArgument { + constructor(public readonly value: Uint128) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(2); + serializer.serializeU128(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU128 { + const value = deserializer.deserializeU128(); + return new TransactionArgumentU128(value); + } +} + +export class TransactionArgumentU256 extends TransactionArgument { + constructor(public readonly value: Uint256) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(8); + serializer.serializeU256(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU256 { + const value = deserializer.deserializeU256(); + return new TransactionArgumentU256(value); + } +} + +export class TransactionArgumentAddress extends TransactionArgument { + constructor(public readonly value: AccountAddress) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(3); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TransactionArgumentAddress { + const value = AccountAddress.deserialize(deserializer); + return new TransactionArgumentAddress(value); + } +} + +export class TransactionArgumentU8Vector extends TransactionArgument { + constructor(public readonly value: Bytes) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(4); + serializer.serializeBytes(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentU8Vector { + const value = deserializer.deserializeBytes(); + return new TransactionArgumentU8Vector(value); + } +} + +export class TransactionArgumentBool extends TransactionArgument { + constructor(public readonly value: boolean) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(5); + serializer.serializeBool(this.value); + } + + static load(deserializer: Deserializer): TransactionArgumentBool { + const value = deserializer.deserializeBool(); + return new TransactionArgumentBool(value); + } +} + +export abstract class Transaction { + abstract serialize(serializer: Serializer): void; + + abstract hash(): Bytes; + + getHashSalt(): Bytes { + const hash = sha3Hash.create(); + hash.update("APTOS::Transaction"); + return hash.digest(); + } + + static deserialize(deserializer: Deserializer): Transaction { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return UserTransaction.load(deserializer); + default: + throw new Error(`Unknown variant index for Transaction: ${index}`); + } + } +} + +export class UserTransaction extends Transaction { + constructor(public readonly value: SignedTransaction) { + super(); + } + + hash(): Bytes { + const hash = sha3Hash.create(); + hash.update(this.getHashSalt()); + hash.update(bcsToBytes(this)); + return hash.digest(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): UserTransaction { + return new UserTransaction(SignedTransaction.deserialize(deserializer)); + } +} diff --git a/m1/JavaScript-client/src/aptos_types/type_tag.ts b/m1/JavaScript-client/src/aptos_types/type_tag.ts new file mode 100644 index 00000000..29310360 --- /dev/null +++ b/m1/JavaScript-client/src/aptos_types/type_tag.ts @@ -0,0 +1,458 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable @typescript-eslint/no-unused-vars */ +/* eslint-disable class-methods-use-this */ +/* eslint-disable max-classes-per-file */ +import { AccountAddress } from "./account_address"; +import { Deserializer, Seq, Serializer, deserializeVector, serializeVector } from "../bcs"; +import { Identifier } from "./identifier"; + +export abstract class TypeTag { + abstract serialize(serializer: Serializer): void; + + static deserialize(deserializer: Deserializer): TypeTag { + const index = deserializer.deserializeUleb128AsU32(); + switch (index) { + case 0: + return TypeTagBool.load(deserializer); + case 1: + return TypeTagU8.load(deserializer); + case 2: + return TypeTagU64.load(deserializer); + case 3: + return TypeTagU128.load(deserializer); + case 4: + return TypeTagAddress.load(deserializer); + case 5: + return TypeTagSigner.load(deserializer); + case 6: + return TypeTagVector.load(deserializer); + case 7: + return TypeTagStruct.load(deserializer); + case 8: + return TypeTagU16.load(deserializer); + case 9: + return TypeTagU32.load(deserializer); + case 10: + return TypeTagU256.load(deserializer); + default: + throw new Error(`Unknown variant index for TypeTag: ${index}`); + } + } +} + +export class TypeTagBool extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(0); + } + + static load(deserializer: Deserializer): TypeTagBool { + return new TypeTagBool(); + } +} + +export class TypeTagU8 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + } + + static load(_deserializer: Deserializer): TypeTagU8 { + return new TypeTagU8(); + } +} + +export class TypeTagU16 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + } + + static load(_deserializer: Deserializer): TypeTagU16 { + return new TypeTagU16(); + } +} + +export class TypeTagU32 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + } + + static load(_deserializer: Deserializer): TypeTagU32 { + return new TypeTagU32(); + } +} + +export class TypeTagU64 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(2); + } + + static load(_deserializer: Deserializer): TypeTagU64 { + return new TypeTagU64(); + } +} + +export class TypeTagU128 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(3); + } + + static load(_deserializer: Deserializer): TypeTagU128 { + return new TypeTagU128(); + } +} + +export class TypeTagU256 extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(1); + } + + static load(_deserializer: Deserializer): TypeTagU256 { + return new TypeTagU256(); + } +} + +export class TypeTagAddress extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(4); + } + + static load(_deserializer: Deserializer): TypeTagAddress { + return new TypeTagAddress(); + } +} + +export class TypeTagSigner extends TypeTag { + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(5); + } + + static load(_deserializer: Deserializer): TypeTagSigner { + return new TypeTagSigner(); + } +} + +export class TypeTagVector extends TypeTag { + constructor(public readonly value: TypeTag) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(6); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TypeTagVector { + const value = TypeTag.deserialize(deserializer); + return new TypeTagVector(value); + } +} + +export class TypeTagStruct extends TypeTag { + constructor(public readonly value: StructTag) { + super(); + } + + serialize(serializer: Serializer): void { + serializer.serializeU32AsUleb128(7); + this.value.serialize(serializer); + } + + static load(deserializer: Deserializer): TypeTagStruct { + const value = StructTag.deserialize(deserializer); + return new TypeTagStruct(value); + } + + isStringTypeTag(): boolean { + if ( + this.value.module_name.value === "string" && + this.value.name.value === "String" && + this.value.address.toHexString() === AccountAddress.fromHex("0x1").toHexString() + ) { + return true; + } + return false; + } +} + +export class StructTag { + constructor( + public readonly address: AccountAddress, + public readonly module_name: Identifier, + public readonly name: Identifier, + public readonly type_args: Seq, + ) {} + + /** + * Converts a string literal to a StructTag + * @param structTag String literal in format "AcountAddress::module_name::ResourceName", + * e.g. "0x1::aptos_coin::AptosCoin" + * @returns + */ + static fromString(structTag: string): StructTag { + // Use the TypeTagParser to parse the string literal into a TypeTagStruct + const typeTagStruct = new TypeTagParser(structTag).parseTypeTag() as TypeTagStruct; + + // Convert and return as a StructTag + return new StructTag( + typeTagStruct.value.address, + typeTagStruct.value.module_name, + typeTagStruct.value.name, + typeTagStruct.value.type_args, + ); + } + + serialize(serializer: Serializer): void { + this.address.serialize(serializer); + this.module_name.serialize(serializer); + this.name.serialize(serializer); + serializeVector(this.type_args, serializer); + } + + static deserialize(deserializer: Deserializer): StructTag { + const address = AccountAddress.deserialize(deserializer); + const moduleName = Identifier.deserialize(deserializer); + const name = Identifier.deserialize(deserializer); + const typeArgs = deserializeVector(deserializer, TypeTag); + return new StructTag(address, moduleName, name, typeArgs); + } +} + +export const stringStructTag = new StructTag( + AccountAddress.fromHex("0x1"), + new Identifier("string"), + new Identifier("String"), + [], +); + +function bail(message: string) { + throw new TypeTagParserError(message); +} + +function isWhiteSpace(c: string): boolean { + if (c.match(/\s/)) { + return true; + } + return false; +} + +function isValidAlphabetic(c: string): boolean { + if (c.match(/[_A-Za-z0-9]/g)) { + return true; + } + return false; +} + +// Generic format is T - for example T1, T2, T10 +function isGeneric(c: string): boolean { + if (c.match(/T\d+/g)) { + return true; + } + return false; +} + +type TokenType = string; +type TokenValue = string; +type Token = [TokenType, TokenValue]; + +// Returns Token and Token byte size +function nextToken(tagStr: string, pos: number): [Token, number] { + const c = tagStr[pos]; + if (c === ":") { + if (tagStr.slice(pos, pos + 2) === "::") { + return [["COLON", "::"], 2]; + } + bail("Unrecognized token."); + } else if (c === "<") { + return [["LT", "<"], 1]; + } else if (c === ">") { + return [["GT", ">"], 1]; + } else if (c === ",") { + return [["COMMA", ","], 1]; + } else if (isWhiteSpace(c)) { + let res = ""; + for (let i = pos; i < tagStr.length; i += 1) { + const char = tagStr[i]; + if (isWhiteSpace(char)) { + res = `${res}${char}`; + } else { + break; + } + } + return [["SPACE", res], res.length]; + } else if (isValidAlphabetic(c)) { + let res = ""; + for (let i = pos; i < tagStr.length; i += 1) { + const char = tagStr[i]; + if (isValidAlphabetic(char)) { + res = `${res}${char}`; + } else { + break; + } + } + if (isGeneric(res)) { + return [["GENERIC", res], res.length]; + } + return [["IDENT", res], res.length]; + } + throw new Error("Unrecognized token."); +} + +function tokenize(tagStr: string): Token[] { + let pos = 0; + const tokens = []; + while (pos < tagStr.length) { + const [token, size] = nextToken(tagStr, pos); + if (token[0] !== "SPACE") { + tokens.push(token); + } + pos += size; + } + return tokens; +} + +/** + * Parser to parse a type tag string + */ +export class TypeTagParser { + private readonly tokens: Token[]; + + private readonly typeTags: string[] = []; + + constructor(tagStr: string, typeTags?: string[]) { + this.tokens = tokenize(tagStr); + this.typeTags = typeTags || []; + } + + private consume(targetToken: string) { + const token = this.tokens.shift(); + if (!token || token[1] !== targetToken) { + bail("Invalid type tag."); + } + } + + private parseCommaList(endToken: TokenValue, allowTraillingComma: boolean): TypeTag[] { + const res: TypeTag[] = []; + if (this.tokens.length <= 0) { + bail("Invalid type tag."); + } + + while (this.tokens[0][1] !== endToken) { + res.push(this.parseTypeTag()); + + if (this.tokens.length > 0 && this.tokens[0][1] === endToken) { + break; + } + + this.consume(","); + if (this.tokens.length > 0 && this.tokens[0][1] === endToken && allowTraillingComma) { + break; + } + + if (this.tokens.length <= 0) { + bail("Invalid type tag."); + } + } + return res; + } + + parseTypeTag(): TypeTag { + if (this.tokens.length === 0) { + bail("Invalid type tag."); + } + + // Pop left most element out + const [tokenTy, tokenVal] = this.tokens.shift()!; + + if (tokenVal === "u8") { + return new TypeTagU8(); + } + if (tokenVal === "u16") { + return new TypeTagU16(); + } + if (tokenVal === "u32") { + return new TypeTagU32(); + } + if (tokenVal === "u64") { + return new TypeTagU64(); + } + if (tokenVal === "u128") { + return new TypeTagU128(); + } + if (tokenVal === "u256") { + return new TypeTagU256(); + } + if (tokenVal === "bool") { + return new TypeTagBool(); + } + if (tokenVal === "address") { + return new TypeTagAddress(); + } + if (tokenVal === "vector") { + this.consume("<"); + const res = this.parseTypeTag(); + this.consume(">"); + return new TypeTagVector(res); + } + if (tokenVal === "string") { + return new StructTag(AccountAddress.fromHex("0x1"), new Identifier("string"), new Identifier("String"), []); + } + if (tokenTy === "IDENT" && (tokenVal.startsWith("0x") || tokenVal.startsWith("0X"))) { + const address = tokenVal; + this.consume("::"); + const [moduleTokenTy, module] = this.tokens.shift()!; + if (moduleTokenTy !== "IDENT") { + bail("Invalid type tag."); + } + this.consume("::"); + const [nameTokenTy, name] = this.tokens.shift()!; + if (nameTokenTy !== "IDENT") { + bail("Invalid type tag."); + } + + // an Object `0x1::object::Object` doesn't hold a real type, it points to an address + // therefore, we parse it as an address and dont need to care/parse the `T` type + if (module === "object" && name === "Object") { + // to support a nested type tag, i.e 0x1::some_module::SomeResource<0x1::object::Object>, we want + // to remove the `` part from the tokens list so we dont parse it and can keep parse the type tag. + this.tokens.splice(0, 3); + return new TypeTagAddress(); + } + + let tyTags: TypeTag[] = []; + // Check if the struct has ty args + if (this.tokens.length > 0 && this.tokens[0][1] === "<") { + this.consume("<"); + tyTags = this.parseCommaList(">", true); + this.consume(">"); + } + + const structTag = new StructTag( + AccountAddress.fromHex(address), + new Identifier(module), + new Identifier(name), + tyTags, + ); + return new TypeTagStruct(structTag); + } + if (tokenTy === "GENERIC") { + if (this.typeTags.length === 0) { + bail("Can't convert generic type since no typeTags were specified."); + } + // a generic tokenVal has the format of `T`, for example `T1`. + // The digit (i.e 1) indicates the the index of this type in the typeTags array. + // For a tokenVal == T1, should be parsed as the type in typeTags[1] + const idx = parseInt(tokenVal.substring(1), 10); + return new TypeTagParser(this.typeTags[idx]).parseTypeTag(); + } + + throw new Error("Invalid type tag."); + } +} + +export class TypeTagParserError extends Error { + constructor(message: string) { + super(message); + this.name = "TypeTagParserError"; + } +} diff --git a/m1/JavaScript-client/src/bcs/consts.ts b/m1/JavaScript-client/src/bcs/consts.ts new file mode 100644 index 00000000..85ed2a8b --- /dev/null +++ b/m1/JavaScript-client/src/bcs/consts.ts @@ -0,0 +1,12 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Uint128, Uint16, Uint32, Uint64, Uint8, Uint256 } from "./types"; + +// Upper bound values for uint8, uint16, uint64 and uint128 +export const MAX_U8_NUMBER: Uint8 = 2 ** 8 - 1; +export const MAX_U16_NUMBER: Uint16 = 2 ** 16 - 1; +export const MAX_U32_NUMBER: Uint32 = 2 ** 32 - 1; +export const MAX_U64_BIG_INT: Uint64 = BigInt(2 ** 64) - BigInt(1); +export const MAX_U128_BIG_INT: Uint128 = BigInt(2 ** 128) - BigInt(1); +export const MAX_U256_BIG_INT: Uint256 = BigInt(2 ** 256) - BigInt(1); diff --git a/m1/JavaScript-client/src/bcs/deserializer.ts b/m1/JavaScript-client/src/bcs/deserializer.ts new file mode 100644 index 00000000..337011a6 --- /dev/null +++ b/m1/JavaScript-client/src/bcs/deserializer.ts @@ -0,0 +1,188 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable no-bitwise */ +import { MAX_U32_NUMBER } from "./consts"; +import { Bytes, Uint128, Uint16, Uint256, Uint32, Uint64, Uint8 } from "./types"; + +export class Deserializer { + private buffer: ArrayBuffer; + + private offset: number; + + constructor(data: Bytes) { + // copies data to prevent outside mutation of buffer. + this.buffer = new ArrayBuffer(data.length); + new Uint8Array(this.buffer).set(data, 0); + this.offset = 0; + } + + private read(length: number): ArrayBuffer { + if (this.offset + length > this.buffer.byteLength) { + throw new Error("Reached to the end of buffer"); + } + + const bytes = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return bytes; + } + + /** + * Deserializes a string. UTF8 string is supported. Reads the string's bytes length "l" first, + * and then reads "l" bytes of content. Decodes the byte array into a string. + * + * BCS layout for "string": string_length | string_content. string_length is the bytes length of + * the string that is uleb128 encoded. string_length is a u32 integer. + * + * @example + * ```ts + * const deserializer = new Deserializer(new Uint8Array([24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, + * 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, 0x88, 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab])); + * assert(deserializer.deserializeStr() === "çå∞≠¢õß∂ƒ∫"); + * ``` + */ + deserializeStr(): string { + const value = this.deserializeBytes(); + const textDecoder = new TextDecoder(); + return textDecoder.decode(value); + } + + /** + * Deserializes an array of bytes. + * + * BCS layout for "bytes": bytes_length | bytes. bytes_length is the length of the bytes array that is + * uleb128 encoded. bytes_length is a u32 integer. + */ + deserializeBytes(): Bytes { + const len = this.deserializeUleb128AsU32(); + return new Uint8Array(this.read(len)); + } + + /** + * Deserializes an array of bytes. The number of bytes to read is already known. + * + */ + deserializeFixedBytes(len: number): Bytes { + return new Uint8Array(this.read(len)); + } + + /** + * Deserializes a boolean value. + * + * BCS layout for "boolean": One byte. "0x01" for True and "0x00" for False. + */ + deserializeBool(): boolean { + const bool = new Uint8Array(this.read(1))[0]; + if (bool !== 1 && bool !== 0) { + throw new Error("Invalid boolean value"); + } + return bool === 1; + } + + /** + * Deserializes a uint8 number. + * + * BCS layout for "uint8": One byte. Binary format in little-endian representation. + */ + deserializeU8(): Uint8 { + return new DataView(this.read(1)).getUint8(0); + } + + /** + * Deserializes a uint16 number. + * + * BCS layout for "uint16": Two bytes. Binary format in little-endian representation. + * @example + * ```ts + * const deserializer = new Deserializer(new Uint8Array([0x34, 0x12])); + * assert(deserializer.deserializeU16() === 4660); + * ``` + */ + deserializeU16(): Uint16 { + return new DataView(this.read(2)).getUint16(0, true); + } + + /** + * Deserializes a uint32 number. + * + * BCS layout for "uint32": Four bytes. Binary format in little-endian representation. + * @example + * ```ts + * const deserializer = new Deserializer(new Uint8Array([0x78, 0x56, 0x34, 0x12])); + * assert(deserializer.deserializeU32() === 305419896); + * ``` + */ + deserializeU32(): Uint32 { + return new DataView(this.read(4)).getUint32(0, true); + } + + /** + * Deserializes a uint64 number. + * + * BCS layout for "uint64": Eight bytes. Binary format in little-endian representation. + * @example + * ```ts + * const deserializer = new Deserializer(new Uint8Array([0x00, 0xEF, 0xCD, 0xAB, 0x78, 0x56, 0x34, 0x12])); + * assert(deserializer.deserializeU64() === 1311768467750121216); + * ``` + */ + deserializeU64(): Uint64 { + const low = this.deserializeU32(); + const high = this.deserializeU32(); + + // combine the two 32-bit values and return (little endian) + return BigInt((BigInt(high) << BigInt(32)) | BigInt(low)); + } + + /** + * Deserializes a uint128 number. + * + * BCS layout for "uint128": Sixteen bytes. Binary format in little-endian representation. + */ + deserializeU128(): Uint128 { + const low = this.deserializeU64(); + const high = this.deserializeU64(); + + // combine the two 64-bit values and return (little endian) + return BigInt((high << BigInt(64)) | low); + } + + /** + * Deserializes a uint256 number. + * + * BCS layout for "uint256": Thirty-two bytes. Binary format in little-endian representation. + */ + deserializeU256(): Uint256 { + const low = this.deserializeU128(); + const high = this.deserializeU128(); + + // combine the two 128-bit values and return (little endian) + return BigInt((high << BigInt(128)) | low); + } + + /** + * Deserializes a uleb128 encoded uint32 number. + * + * BCS use uleb128 encoding in two cases: (1) lengths of variable-length sequences and (2) tags of enum values + */ + deserializeUleb128AsU32(): Uint32 { + let value: bigint = BigInt(0); + let shift = 0; + + while (value < MAX_U32_NUMBER) { + const byte = this.deserializeU8(); + value |= BigInt(byte & 0x7f) << BigInt(shift); + + if ((byte & 0x80) === 0) { + break; + } + shift += 7; + } + + if (value > MAX_U32_NUMBER) { + throw new Error("Overflow while parsing uleb128-encoded uint32 value"); + } + + return Number(value); + } +} diff --git a/m1/JavaScript-client/src/bcs/helper.ts b/m1/JavaScript-client/src/bcs/helper.ts new file mode 100644 index 00000000..21059c0a --- /dev/null +++ b/m1/JavaScript-client/src/bcs/helper.ts @@ -0,0 +1,106 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Deserializer } from "./deserializer"; +import { Serializer } from "./serializer"; +import { AnyNumber, Bytes, Seq, Uint16, Uint32, Uint8 } from "./types"; + +interface Serializable { + serialize(serializer: Serializer): void; +} + +/** + * Serializes a vector values that are "Serializable". + */ +export function serializeVector(value: Seq, serializer: Serializer): void { + serializer.serializeU32AsUleb128(value.length); + value.forEach((item: T) => { + item.serialize(serializer); + }); +} + +/** + * Serializes a vector with specified item serialization function. + * Very dynamic function and bypasses static typechecking. + */ +export function serializeVectorWithFunc(value: any[], func: string): Bytes { + const serializer = new Serializer(); + serializer.serializeU32AsUleb128(value.length); + const f = (serializer as any)[func]; + value.forEach((item) => { + f.call(serializer, item); + }); + return serializer.getBytes(); +} + +/** + * Deserializes a vector of values. + */ +export function deserializeVector(deserializer: Deserializer, cls: any): any[] { + const length = deserializer.deserializeUleb128AsU32(); + const list: Seq = []; + for (let i = 0; i < length; i += 1) { + list.push(cls.deserialize(deserializer)); + } + return list; +} + +export function bcsToBytes(value: T): Bytes { + const serializer = new Serializer(); + value.serialize(serializer); + return serializer.getBytes(); +} + +export function bcsSerializeUint64(value: AnyNumber): Bytes { + const serializer = new Serializer(); + serializer.serializeU64(value); + return serializer.getBytes(); +} + +export function bcsSerializeU8(value: Uint8): Bytes { + const serializer = new Serializer(); + serializer.serializeU8(value); + return serializer.getBytes(); +} + +export function bcsSerializeU16(value: Uint16): Bytes { + const serializer = new Serializer(); + serializer.serializeU16(value); + return serializer.getBytes(); +} + +export function bcsSerializeU32(value: Uint32): Bytes { + const serializer = new Serializer(); + serializer.serializeU32(value); + return serializer.getBytes(); +} + +export function bcsSerializeU128(value: AnyNumber): Bytes { + const serializer = new Serializer(); + serializer.serializeU128(value); + return serializer.getBytes(); +} + +export function bcsSerializeBool(value: boolean): Bytes { + const serializer = new Serializer(); + serializer.serializeBool(value); + return serializer.getBytes(); +} + +export function bcsSerializeStr(value: string): Bytes { + const serializer = new Serializer(); + serializer.serializeStr(value); + return serializer.getBytes(); +} + +export function bcsSerializeBytes(value: Bytes): Bytes { + const serializer = new Serializer(); + serializer.serializeBytes(value); + return serializer.getBytes(); +} + +export function bcsSerializeFixedBytes(value: Bytes): Bytes { + const serializer = new Serializer(); + serializer.serializeFixedBytes(value); + return serializer.getBytes(); +} diff --git a/m1/JavaScript-client/src/bcs/index.ts b/m1/JavaScript-client/src/bcs/index.ts new file mode 100644 index 00000000..db399051 --- /dev/null +++ b/m1/JavaScript-client/src/bcs/index.ts @@ -0,0 +1,7 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export * from "./types"; +export * from "./serializer"; +export * from "./deserializer"; +export * from "./helper"; diff --git a/m1/JavaScript-client/src/bcs/serializer.ts b/m1/JavaScript-client/src/bcs/serializer.ts new file mode 100644 index 00000000..b60bdbb6 --- /dev/null +++ b/m1/JavaScript-client/src/bcs/serializer.ts @@ -0,0 +1,240 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable no-bitwise */ +import { + MAX_U128_BIG_INT, + MAX_U16_NUMBER, + MAX_U32_NUMBER, + MAX_U64_BIG_INT, + MAX_U8_NUMBER, + MAX_U256_BIG_INT, +} from "./consts"; +import { AnyNumber, Bytes, Uint16, Uint32, Uint8 } from "./types"; + +export class Serializer { + private buffer: ArrayBuffer; + + private offset: number; + + constructor() { + this.buffer = new ArrayBuffer(64); + this.offset = 0; + } + + private ensureBufferWillHandleSize(bytes: number) { + while (this.buffer.byteLength < this.offset + bytes) { + const newBuffer = new ArrayBuffer(this.buffer.byteLength * 2); + new Uint8Array(newBuffer).set(new Uint8Array(this.buffer)); + this.buffer = newBuffer; + } + } + + protected serialize(values: Bytes) { + this.ensureBufferWillHandleSize(values.length); + new Uint8Array(this.buffer, this.offset).set(values); + this.offset += values.length; + } + + private serializeWithFunction( + fn: (byteOffset: number, value: number, littleEndian?: boolean) => void, + bytesLength: number, + value: number, + ) { + this.ensureBufferWillHandleSize(bytesLength); + const dv = new DataView(this.buffer, this.offset); + fn.apply(dv, [0, value, true]); + this.offset += bytesLength; + } + + /** + * Serializes a string. UTF8 string is supported. Serializes the string's bytes length "l" first, + * and then serializes "l" bytes of the string content. + * + * BCS layout for "string": string_length | string_content. string_length is the bytes length of + * the string that is uleb128 encoded. string_length is a u32 integer. + * + * @example + * ```ts + * const serializer = new Serializer(); + * serializer.serializeStr("çå∞≠¢õß∂ƒ∫"); + * assert(serializer.getBytes() === new Uint8Array([24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, + * 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, 0x88, 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab])); + * ``` + */ + serializeStr(value: string): void { + const textEncoder = new TextEncoder(); + this.serializeBytes(textEncoder.encode(value)); + } + + /** + * Serializes an array of bytes. + * + * BCS layout for "bytes": bytes_length | bytes. bytes_length is the length of the bytes array that is + * uleb128 encoded. bytes_length is a u32 integer. + */ + serializeBytes(value: Bytes): void { + this.serializeU32AsUleb128(value.length); + this.serialize(value); + } + + /** + * Serializes an array of bytes with known length. Therefore length doesn't need to be + * serialized to help deserialization. When deserializing, the number of + * bytes to deserialize needs to be passed in. + */ + serializeFixedBytes(value: Bytes): void { + this.serialize(value); + } + + /** + * Serializes a boolean value. + * + * BCS layout for "boolean": One byte. "0x01" for True and "0x00" for False. + */ + serializeBool(value: boolean): void { + if (typeof value !== "boolean") { + throw new Error("Value needs to be a boolean"); + } + const byteValue = value ? 1 : 0; + this.serialize(new Uint8Array([byteValue])); + } + + /** + * Serializes a uint8 number. + * + * BCS layout for "uint8": One byte. Binary format in little-endian representation. + */ + @checkNumberRange(0, MAX_U8_NUMBER) + serializeU8(value: Uint8): void { + this.serialize(new Uint8Array([value])); + } + + /** + * Serializes a uint16 number. + * + * BCS layout for "uint16": Two bytes. Binary format in little-endian representation. + * @example + * ```ts + * const serializer = new Serializer(); + * serializer.serializeU16(4660); + * assert(serializer.getBytes() === new Uint8Array([0x34, 0x12])); + * ``` + */ + @checkNumberRange(0, MAX_U16_NUMBER) + serializeU16(value: Uint16): void { + this.serializeWithFunction(DataView.prototype.setUint16, 2, value); + } + + /** + * Serializes a uint32 number. + * + * BCS layout for "uint32": Four bytes. Binary format in little-endian representation. + * @example + * ```ts + * const serializer = new Serializer(); + * serializer.serializeU32(305419896); + * assert(serializer.getBytes() === new Uint8Array([0x78, 0x56, 0x34, 0x12])); + * ``` + */ + @checkNumberRange(0, MAX_U32_NUMBER) + serializeU32(value: Uint32): void { + this.serializeWithFunction(DataView.prototype.setUint32, 4, value); + } + + /** + * Serializes a uint64 number. + * + * BCS layout for "uint64": Eight bytes. Binary format in little-endian representation. + * @example + * ```ts + * const serializer = new Serializer(); + * serializer.serializeU64(1311768467750121216); + * assert(serializer.getBytes() === new Uint8Array([0x00, 0xEF, 0xCD, 0xAB, 0x78, 0x56, 0x34, 0x12])); + * ``` + */ + @checkNumberRange(BigInt(0), MAX_U64_BIG_INT) + serializeU64(value: AnyNumber): void { + const low = BigInt(value.toString()) & BigInt(MAX_U32_NUMBER); + const high = BigInt(value.toString()) >> BigInt(32); + + // write little endian number + this.serializeU32(Number(low)); + this.serializeU32(Number(high)); + } + + /** + * Serializes a uint128 number. + * + * BCS layout for "uint128": Sixteen bytes. Binary format in little-endian representation. + */ + @checkNumberRange(BigInt(0), MAX_U128_BIG_INT) + serializeU128(value: AnyNumber): void { + const low = BigInt(value.toString()) & MAX_U64_BIG_INT; + const high = BigInt(value.toString()) >> BigInt(64); + + // write little endian number + this.serializeU64(low); + this.serializeU64(high); + } + + /** + * Serializes a uint256 number. + * + * BCS layout for "uint256": Sixteen bytes. Binary format in little-endian representation. + */ + @checkNumberRange(BigInt(0), MAX_U256_BIG_INT) + serializeU256(value: AnyNumber): void { + const low = BigInt(value.toString()) & MAX_U128_BIG_INT; + const high = BigInt(value.toString()) >> BigInt(128); + + // write little endian number + this.serializeU128(low); + this.serializeU128(high); + } + + /** + * Serializes a uint32 number with uleb128. + * + * BCS use uleb128 encoding in two cases: (1) lengths of variable-length sequences and (2) tags of enum values + */ + @checkNumberRange(0, MAX_U32_NUMBER) + serializeU32AsUleb128(val: Uint32): void { + let value = val; + const valueArray = []; + while (value >>> 7 !== 0) { + valueArray.push((value & 0x7f) | 0x80); + value >>>= 7; + } + valueArray.push(value); + this.serialize(new Uint8Array(valueArray)); + } + + /** + * Returns the buffered bytes + */ + getBytes(): Bytes { + return new Uint8Array(this.buffer).slice(0, this.offset); + } +} + +/** + * Creates a decorator to make sure the arg value of the decorated function is within a range. + * @param minValue The arg value of decorated function must >= minValue + * @param maxValue The arg value of decorated function must <= maxValue + * @param message Error message + */ +function checkNumberRange(minValue: T, maxValue: T, message?: string) { + return (target: unknown, propertyKey: string, descriptor: PropertyDescriptor) => { + const childFunction = descriptor.value; + // eslint-disable-next-line no-param-reassign + descriptor.value = function deco(value: AnyNumber) { + const valueBigInt = BigInt(value.toString()); + if (valueBigInt > BigInt(maxValue.toString()) || valueBigInt < BigInt(minValue.toString())) { + throw new Error(message || "Value is out of range"); + } + childFunction.apply(this, [value]); + }; + return descriptor; + }; +} diff --git a/m1/JavaScript-client/src/bcs/types.ts b/m1/JavaScript-client/src/bcs/types.ts new file mode 100644 index 00000000..f834aa97 --- /dev/null +++ b/m1/JavaScript-client/src/bcs/types.ts @@ -0,0 +1,13 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export type Seq = T[]; + +export type Uint8 = number; +export type Uint16 = number; +export type Uint32 = number; +export type Uint64 = bigint; +export type Uint128 = bigint; +export type Uint256 = bigint; +export type AnyNumber = bigint | number; +export type Bytes = Uint8Array; diff --git a/m1/JavaScript-client/src/generated/AptosGeneratedClient.ts b/m1/JavaScript-client/src/generated/AptosGeneratedClient.ts new file mode 100644 index 00000000..d7d0952b --- /dev/null +++ b/m1/JavaScript-client/src/generated/AptosGeneratedClient.ts @@ -0,0 +1,52 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { BaseHttpRequest } from './core/BaseHttpRequest'; +import type { OpenAPIConfig } from './core/OpenAPI'; +import { AxiosHttpRequest } from './core/AxiosHttpRequest'; + +import { AccountsService } from './services/AccountsService'; +import { BlocksService } from './services/BlocksService'; +import { EventsService } from './services/EventsService'; +import { GeneralService } from './services/GeneralService'; +import { TablesService } from './services/TablesService'; +import { TransactionsService } from './services/TransactionsService'; +import { ViewService } from './services/ViewService'; + +type HttpRequestConstructor = new (config: OpenAPIConfig) => BaseHttpRequest; + +export class AptosGeneratedClient { + + public readonly accounts: AccountsService; + public readonly blocks: BlocksService; + public readonly events: EventsService; + public readonly general: GeneralService; + public readonly tables: TablesService; + public readonly transactions: TransactionsService; + public readonly view: ViewService; + + public readonly request: BaseHttpRequest; + + constructor(config?: Partial, HttpRequest: HttpRequestConstructor = AxiosHttpRequest) { + this.request = new HttpRequest({ + BASE: config?.BASE ?? '/v1', + VERSION: config?.VERSION ?? '1.2.0', + WITH_CREDENTIALS: config?.WITH_CREDENTIALS ?? false, + CREDENTIALS: config?.CREDENTIALS ?? 'include', + TOKEN: config?.TOKEN, + USERNAME: config?.USERNAME, + PASSWORD: config?.PASSWORD, + HEADERS: config?.HEADERS, + ENCODE_PATH: config?.ENCODE_PATH, + }); + + this.accounts = new AccountsService(this.request); + this.blocks = new BlocksService(this.request); + this.events = new EventsService(this.request); + this.general = new GeneralService(this.request); + this.tables = new TablesService(this.request); + this.transactions = new TransactionsService(this.request); + this.view = new ViewService(this.request); + } +} + diff --git a/m1/JavaScript-client/src/generated/core/ApiError.ts b/m1/JavaScript-client/src/generated/core/ApiError.ts new file mode 100644 index 00000000..99d79299 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/ApiError.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { ApiRequestOptions } from './ApiRequestOptions'; +import type { ApiResult } from './ApiResult'; + +export class ApiError extends Error { + public readonly url: string; + public readonly status: number; + public readonly statusText: string; + public readonly body: any; + public readonly request: ApiRequestOptions; + + constructor(request: ApiRequestOptions, response: ApiResult, message: string) { + super(message); + + this.name = 'ApiError'; + this.url = response.url; + this.status = response.status; + this.statusText = response.statusText; + this.body = response.body; + this.request = request; + } +} diff --git a/m1/JavaScript-client/src/generated/core/ApiRequestOptions.ts b/m1/JavaScript-client/src/generated/core/ApiRequestOptions.ts new file mode 100644 index 00000000..c7b77538 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/ApiRequestOptions.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type ApiRequestOptions = { + readonly method: 'GET' | 'PUT' | 'POST' | 'DELETE' | 'OPTIONS' | 'HEAD' | 'PATCH'; + readonly url: string; + readonly path?: Record; + readonly cookies?: Record; + readonly headers?: Record; + readonly query?: Record; + readonly formData?: Record; + readonly body?: any; + readonly mediaType?: string; + readonly responseHeader?: string; + readonly errors?: Record; +}; diff --git a/m1/JavaScript-client/src/generated/core/ApiResult.ts b/m1/JavaScript-client/src/generated/core/ApiResult.ts new file mode 100644 index 00000000..b095dc77 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/ApiResult.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export type ApiResult = { + readonly url: string; + readonly ok: boolean; + readonly status: number; + readonly statusText: string; + readonly body: any; +}; diff --git a/m1/JavaScript-client/src/generated/core/AxiosHttpRequest.ts b/m1/JavaScript-client/src/generated/core/AxiosHttpRequest.ts new file mode 100644 index 00000000..9f6cb314 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/AxiosHttpRequest.ts @@ -0,0 +1,25 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { ApiRequestOptions } from './ApiRequestOptions'; +import { BaseHttpRequest } from './BaseHttpRequest'; +import type { CancelablePromise } from './CancelablePromise'; +import type { OpenAPIConfig } from './OpenAPI'; +import { request as __request } from './request'; + +export class AxiosHttpRequest extends BaseHttpRequest { + + constructor(config: OpenAPIConfig) { + super(config); + } + + /** + * Request method + * @param options The request options from the service + * @returns CancelablePromise + * @throws ApiError + */ + public request(options: ApiRequestOptions): CancelablePromise { + return __request(this.config, options); + } +} diff --git a/m1/JavaScript-client/src/generated/core/BaseHttpRequest.ts b/m1/JavaScript-client/src/generated/core/BaseHttpRequest.ts new file mode 100644 index 00000000..1b970047 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/BaseHttpRequest.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { ApiRequestOptions } from './ApiRequestOptions'; +import type { CancelablePromise } from './CancelablePromise'; +import type { OpenAPIConfig } from './OpenAPI'; + +export abstract class BaseHttpRequest { + + constructor(public readonly config: OpenAPIConfig) {} + + public abstract request(options: ApiRequestOptions): CancelablePromise; +} diff --git a/m1/JavaScript-client/src/generated/core/CancelablePromise.ts b/m1/JavaScript-client/src/generated/core/CancelablePromise.ts new file mode 100644 index 00000000..26ad3039 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/CancelablePromise.ts @@ -0,0 +1,128 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export class CancelError extends Error { + + constructor(message: string) { + super(message); + this.name = 'CancelError'; + } + + public get isCancelled(): boolean { + return true; + } +} + +export interface OnCancel { + readonly isResolved: boolean; + readonly isRejected: boolean; + readonly isCancelled: boolean; + + (cancelHandler: () => void): void; +} + +export class CancelablePromise implements Promise { + readonly [Symbol.toStringTag]!: string; + + private _isResolved: boolean; + private _isRejected: boolean; + private _isCancelled: boolean; + private readonly _cancelHandlers: (() => void)[]; + private readonly _promise: Promise; + private _resolve?: (value: T | PromiseLike) => void; + private _reject?: (reason?: any) => void; + + constructor( + executor: ( + resolve: (value: T | PromiseLike) => void, + reject: (reason?: any) => void, + onCancel: OnCancel + ) => void + ) { + this._isResolved = false; + this._isRejected = false; + this._isCancelled = false; + this._cancelHandlers = []; + this._promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + + const onResolve = (value: T | PromiseLike): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isResolved = true; + this._resolve?.(value); + }; + + const onReject = (reason?: any): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isRejected = true; + this._reject?.(reason); + }; + + const onCancel = (cancelHandler: () => void): void => { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._cancelHandlers.push(cancelHandler); + }; + + Object.defineProperty(onCancel, 'isResolved', { + get: (): boolean => this._isResolved, + }); + + Object.defineProperty(onCancel, 'isRejected', { + get: (): boolean => this._isRejected, + }); + + Object.defineProperty(onCancel, 'isCancelled', { + get: (): boolean => this._isCancelled, + }); + + return executor(onResolve, onReject, onCancel as OnCancel); + }); + } + + public then( + onFulfilled?: ((value: T) => TResult1 | PromiseLike) | null, + onRejected?: ((reason: any) => TResult2 | PromiseLike) | null + ): Promise { + return this._promise.then(onFulfilled, onRejected); + } + + public catch( + onRejected?: ((reason: any) => TResult | PromiseLike) | null + ): Promise { + return this._promise.catch(onRejected); + } + + public finally(onFinally?: (() => void) | null): Promise { + return this._promise.finally(onFinally); + } + + public cancel(): void { + if (this._isResolved || this._isRejected || this._isCancelled) { + return; + } + this._isCancelled = true; + if (this._cancelHandlers.length) { + try { + for (const cancelHandler of this._cancelHandlers) { + cancelHandler(); + } + } catch (error) { + console.warn('Cancellation threw an error', error); + return; + } + } + this._cancelHandlers.length = 0; + this._reject?.(new CancelError('Request aborted')); + } + + public get isCancelled(): boolean { + return this._isCancelled; + } +} diff --git a/m1/JavaScript-client/src/generated/core/OpenAPI.ts b/m1/JavaScript-client/src/generated/core/OpenAPI.ts new file mode 100644 index 00000000..33a73c18 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/OpenAPI.ts @@ -0,0 +1,31 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { ApiRequestOptions } from './ApiRequestOptions'; + +type Resolver = (options: ApiRequestOptions) => Promise; +type Headers = Record; + +export type OpenAPIConfig = { + BASE: string; + VERSION: string; + WITH_CREDENTIALS: boolean; + CREDENTIALS: 'include' | 'omit' | 'same-origin'; + TOKEN?: string | Resolver; + USERNAME?: string | Resolver; + PASSWORD?: string | Resolver; + HEADERS?: Headers | Resolver; + ENCODE_PATH?: (path: string) => string; +}; + +export const OpenAPI: OpenAPIConfig = { + BASE: '/v1', + VERSION: '1.2.0', + WITH_CREDENTIALS: false, + CREDENTIALS: 'include', + TOKEN: undefined, + USERNAME: undefined, + PASSWORD: undefined, + HEADERS: undefined, + ENCODE_PATH: undefined, +}; diff --git a/m1/JavaScript-client/src/generated/core/request.ts b/m1/JavaScript-client/src/generated/core/request.ts new file mode 100644 index 00000000..286644d1 --- /dev/null +++ b/m1/JavaScript-client/src/generated/core/request.ts @@ -0,0 +1,418 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import axios from 'axios'; +import type { AxiosError, AxiosRequestConfig, AxiosResponse } from 'axios'; +import FormData from 'form-data'; + +import { ApiError } from './ApiError'; +import type { ApiRequestOptions } from './ApiRequestOptions'; +import type { ApiResult } from './ApiResult'; +import { CancelablePromise } from './CancelablePromise'; +import type { OnCancel } from './CancelablePromise'; +import type { OpenAPIConfig } from './OpenAPI'; + +interface Cookie { + name: string; + value: string; + expires?: Date; + path?: string; + sameSite?: "Lax" | "None" | "Strict"; + secure?: boolean; +} + +class CookieJar { + constructor(private jar = new Map()) {} + + setCookie(url: URL, cookieStr: string) { + const key = url.origin.toLowerCase(); + if (!this.jar.has(key)) { + this.jar.set(key, []); + } + + const cookie = CookieJar.parse(cookieStr); + this.jar.set(key, [...(this.jar.get(key)?.filter((c) => c.name !== cookie.name) || []), cookie]); + } + + getCookies(url: URL): Cookie[] { + const key = url.origin.toLowerCase(); + if (!this.jar.get(key)) { + return []; + } + + // Filter out expired cookies + return this.jar.get(key)?.filter((cookie) => !cookie.expires || cookie.expires > new Date()) || []; + } + + static parse(str: string): Cookie { + if (typeof str !== "string") { + throw new Error("argument str must be a string"); + } + + const parts = str.split(";").map((part) => part.trim()); + + let cookie: Cookie; + + if (parts.length > 0) { + const [name, value] = parts[0].split("="); + if (!name || !value) { + throw new Error("Invalid cookie"); + } + + cookie = { + name, + value, + }; + } else { + throw new Error("Invalid cookie"); + } + + parts.slice(1).forEach((part) => { + const [name, value] = part.split("="); + if (!name.trim()) { + throw new Error("Invalid cookie"); + } + + const nameLow = name.toLowerCase(); + // eslint-disable-next-line quotes + const val = value?.charAt(0) === "'" || value?.charAt(0) === '"' ? value?.slice(1, -1) : value; + if (nameLow === "expires") { + cookie.expires = new Date(val); + } + if (nameLow === "path") { + cookie.path = val; + } + if (nameLow === "samesite") { + if (val !== "Lax" && val !== "None" && val !== "Strict") { + throw new Error("Invalid cookie SameSite value"); + } + cookie.sameSite = val; + } + if (nameLow === "secure") { + cookie.secure = true; + } + }); + + return cookie; + } +} + +const jar = new CookieJar(); + +axios.interceptors.response.use((response) => { + if (Array.isArray(response.headers["set-cookie"])) { + response.headers["set-cookie"].forEach((c) => { + jar.setCookie(new URL(response.config.url!), c); + }); + } + return response; +}); + +axios.interceptors.request.use(function (config) { + const cookies = jar.getCookies(new URL(config.url!)); + + if (cookies?.length > 0 && config.headers) { + config.headers.cookie = cookies.map((cookie) => `${cookie.name}=${cookie.value}`).join("; "); + } + return config; +}); + +const isDefined = (value: T | null | undefined): value is Exclude => { + return value !== undefined && value !== null; +}; + +const isString = (value: any): value is string => { + return typeof value === 'string'; +}; + +const isStringWithValue = (value: any): value is string => { + return isString(value) && value !== ''; +}; + +const isBlob = (value: any): value is Blob => { + return ( + typeof value === 'object' && + typeof value.type === 'string' && + typeof value.stream === 'function' && + typeof value.arrayBuffer === 'function' && + typeof value.constructor === 'function' && + typeof value.constructor.name === 'string' && + /^(Blob|File)$/.test(value.constructor.name) && + /^(Blob|File)$/.test(value[Symbol.toStringTag]) + ); +}; + +const isFormData = (value: any): value is FormData => { + return value instanceof FormData; +}; + +const isSuccess = (status: number): boolean => { + return status >= 200 && status < 300; +}; + +const base64 = (str: string): string => { return btoa(str); }; + +const getQueryString = (params: Record): string => { + const qs: string[] = []; + + const append = (key: string, value: any) => { + qs.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`); + }; + + const process = (key: string, value: any) => { + if (isDefined(value)) { + if (Array.isArray(value)) { + value.forEach(v => { + process(key, v); + }); + } else if (typeof value === 'object') { + Object.entries(value).forEach(([k, v]) => { + process(`${key}[${k}]`, v); + }); + } else { + append(key, value); + } + } + }; + + Object.entries(params).forEach(([key, value]) => { + process(key, value); + }); + + if (qs.length > 0) { + return `?${qs.join('&')}`; + } + + return ''; +}; + +const getUrl = (config: OpenAPIConfig, options: ApiRequestOptions): string => { + const encoder = config.ENCODE_PATH || encodeURI; + + const path = options.url + .replace('{api-version}', config.VERSION) + .replace(/{(.*?)}/g, (substring: string, group: string) => { + if (options.path?.hasOwnProperty(group)) { + return encoder(String(options.path[group])); + } + return substring; + }); + + const url = `${config.BASE}${path}`; + if (options.query) { + return `${url}${getQueryString(options.query)}`; + } + return url; +}; + +const getFormData = (options: ApiRequestOptions): FormData | undefined => { + if (options.formData) { + const formData = new FormData(); + + const process = (key: string, value: any) => { + if (isString(value) || isBlob(value)) { + formData.append(key, value); + } else { + formData.append(key, JSON.stringify(value)); + } + }; + + Object.entries(options.formData) + .filter(([_, value]) => isDefined(value)) + .forEach(([key, value]) => { + if (Array.isArray(value)) { + value.forEach(v => process(key, v)); + } else { + process(key, value); + } + }); + + return formData; + } + return undefined; +}; + +type Resolver = (options: ApiRequestOptions) => Promise; + +const resolve = async (options: ApiRequestOptions, resolver?: T | Resolver): Promise => { + if (typeof resolver === 'function') { + return (resolver as Resolver)(options); + } + return resolver; +}; + +const getHeaders = async (config: OpenAPIConfig, options: ApiRequestOptions, formData?: FormData): Promise> => { + const token = await resolve(options, config.TOKEN); + const username = await resolve(options, config.USERNAME); + const password = await resolve(options, config.PASSWORD); + const additionalHeaders = await resolve(options, config.HEADERS); + const formHeaders = typeof formData?.getHeaders === 'function' && formData?.getHeaders() || {} + + const headers = Object.entries({ + Accept: 'application/json', + ...additionalHeaders, + ...options.headers, + ...formHeaders, + }) + .filter(([_, value]) => isDefined(value)) + .reduce((headers, [key, value]) => ({ + ...headers, + [key]: String(value), + }), {} as Record); + + if (isStringWithValue(token)) { + headers['Authorization'] = `Bearer ${token}`; + } + + if (isStringWithValue(username) && isStringWithValue(password)) { + const credentials = base64(`${username}:${password}`); + headers['Authorization'] = `Basic ${credentials}`; + } + + if (options.body) { + if (options.mediaType) { + headers['Content-Type'] = options.mediaType; + } else if (isBlob(options.body)) { + headers['Content-Type'] = options.body.type || 'application/octet-stream'; + } else if (isString(options.body)) { + headers['Content-Type'] = 'text/plain'; + } else if (!isFormData(options.body)) { + headers['Content-Type'] = 'application/json'; + } + } + + return headers; +}; + +const getRequestBody = (options: ApiRequestOptions): any => { + if (options.body) { + return options.body; + } + return undefined; +}; + +const sendRequest = async ( + config: OpenAPIConfig, + options: ApiRequestOptions, + url: string, + body: any, + formData: FormData | undefined, + headers: Record, + onCancel: OnCancel +): Promise> => { + const source = axios.CancelToken.source(); + + const requestConfig: AxiosRequestConfig = { + url, + headers, + data: body ?? formData, + method: options.method, + withCredentials: config.WITH_CREDENTIALS, + cancelToken: source.token, + }; + + const isBCS = Object.keys(config.HEADERS || {}) + .filter((k) => k.toLowerCase() === "accept") + .map((k) => (config.HEADERS as Record)[k]) + .includes("application/x-bcs"); + if (isBCS) { + requestConfig.responseType = "arraybuffer"; + } + + onCancel(() => source.cancel('The user aborted a request.')); + + try { + return await axios.request(requestConfig); + } catch (error) { + const axiosError = error as AxiosError; + if (axiosError.response) { + return axiosError.response; + } + throw error; + } +}; + +const getResponseHeader = (response: AxiosResponse, responseHeader?: string): string | undefined => { + if (responseHeader) { + const content = response.headers[responseHeader]; + if (isString(content)) { + return content; + } + } + return undefined; +}; + +const getResponseBody = (response: AxiosResponse): any => { + if (response.status !== 204) { + return response.data; + } + return undefined; +}; + +const catchErrorCodes = (options: ApiRequestOptions, result: ApiResult): void => { + const errors: Record = { + 400: 'Bad Request', + 401: 'Unauthorized', + 403: 'Forbidden', + 404: 'Not Found', + 429: 'Too Many Requests', + 500: 'Internal Server Error', + 502: 'Bad Gateway', + 503: 'Service Unavailable', + ...options.errors, + } + + const error = errors[result.status]; + if (error) { + throw new ApiError(options, result, error); + } + + if (!result.ok) { + throw new ApiError(options, result, 'Generic Error'); + } +}; + +/** + * Request method + * @param config The OpenAPI configuration object + * @param options The request options from the service + * @returns CancelablePromise + * @throws ApiError + */ +export const request = (config: OpenAPIConfig, options: ApiRequestOptions): CancelablePromise => { + return new CancelablePromise(async (resolve, reject, onCancel) => { + try { + const url = getUrl(config, options); + const formData = getFormData(options); + const body = getRequestBody(options); + const headers = await getHeaders(config, options, formData); + + if (!onCancel.isCancelled) { + const response = await sendRequest(config, options, url, body, formData, headers, onCancel); + const responseBody = getResponseBody(response); + const responseHeader = getResponseHeader(response, options.responseHeader); + + const result: ApiResult = { + url, + ok: isSuccess(response.status), + status: response.status, + statusText: response.statusText, + body: responseHeader ?? responseBody, + }; + + catchErrorCodes(options, result); + + // Attach the response headers to the output. This is a hack to fix + // https://github.com/ferdikoomen/openapi-typescript-codegen/issues/1295 + const out = result.body; + try { + out["__headers"] = response.headers; + } catch (_) {} + + resolve(out); + } + } catch (error) { + reject(error); + } + }); +}; diff --git a/m1/JavaScript-client/src/generated/index.ts b/m1/JavaScript-client/src/generated/index.ts new file mode 100644 index 00000000..e925af3a --- /dev/null +++ b/m1/JavaScript-client/src/generated/index.ts @@ -0,0 +1,214 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export { AptosGeneratedClient } from './AptosGeneratedClient'; + +export { ApiError } from './core/ApiError'; +export { BaseHttpRequest } from './core/BaseHttpRequest'; +export { CancelablePromise, CancelError } from './core/CancelablePromise'; +export { OpenAPI } from './core/OpenAPI'; +export type { OpenAPIConfig } from './core/OpenAPI'; + +export type { AccountData } from './models/AccountData'; +export type { AccountSignature } from './models/AccountSignature'; +export type { AccountSignature_Ed25519Signature } from './models/AccountSignature_Ed25519Signature'; +export type { AccountSignature_MultiEd25519Signature } from './models/AccountSignature_MultiEd25519Signature'; +export type { Address } from './models/Address'; +export type { AptosError } from './models/AptosError'; +export { AptosErrorCode } from './models/AptosErrorCode'; +export type { Block } from './models/Block'; +export type { BlockMetadataTransaction } from './models/BlockMetadataTransaction'; +export type { DecodedTableData } from './models/DecodedTableData'; +export type { DeletedTableData } from './models/DeletedTableData'; +export type { DeleteModule } from './models/DeleteModule'; +export type { DeleteResource } from './models/DeleteResource'; +export type { DeleteTableItem } from './models/DeleteTableItem'; +export type { DirectWriteSet } from './models/DirectWriteSet'; +export type { Ed25519Signature } from './models/Ed25519Signature'; +export type { EncodeSubmissionRequest } from './models/EncodeSubmissionRequest'; +export type { EntryFunctionId } from './models/EntryFunctionId'; +export type { EntryFunctionPayload } from './models/EntryFunctionPayload'; +export type { Event } from './models/Event'; +export type { EventGuid } from './models/EventGuid'; +export type { GasEstimation } from './models/GasEstimation'; +export type { GenesisPayload } from './models/GenesisPayload'; +export type { GenesisPayload_WriteSetPayload } from './models/GenesisPayload_WriteSetPayload'; +export type { GenesisTransaction } from './models/GenesisTransaction'; +export type { HashValue } from './models/HashValue'; +export type { HealthCheckSuccess } from './models/HealthCheckSuccess'; +export type { HexEncodedBytes } from './models/HexEncodedBytes'; +export type { IdentifierWrapper } from './models/IdentifierWrapper'; +export type { IndexResponse } from './models/IndexResponse'; +export type { ModuleBundlePayload } from './models/ModuleBundlePayload'; +export type { MoveAbility } from './models/MoveAbility'; +export type { MoveFunction } from './models/MoveFunction'; +export type { MoveFunctionGenericTypeParam } from './models/MoveFunctionGenericTypeParam'; +export { MoveFunctionVisibility } from './models/MoveFunctionVisibility'; +export type { MoveModule } from './models/MoveModule'; +export type { MoveModuleBytecode } from './models/MoveModuleBytecode'; +export type { MoveModuleId } from './models/MoveModuleId'; +export type { MoveResource } from './models/MoveResource'; +export type { MoveScriptBytecode } from './models/MoveScriptBytecode'; +export type { MoveStruct } from './models/MoveStruct'; +export type { MoveStructField } from './models/MoveStructField'; +export type { MoveStructGenericTypeParam } from './models/MoveStructGenericTypeParam'; +export type { MoveStructTag } from './models/MoveStructTag'; +export type { MoveStructValue } from './models/MoveStructValue'; +export type { MoveType } from './models/MoveType'; +export type { MoveValue } from './models/MoveValue'; +export type { MultiAgentSignature } from './models/MultiAgentSignature'; +export type { MultiEd25519Signature } from './models/MultiEd25519Signature'; +export type { MultisigPayload } from './models/MultisigPayload'; +export type { MultisigTransactionPayload } from './models/MultisigTransactionPayload'; +export type { PendingTransaction } from './models/PendingTransaction'; +export type { RawTableItemRequest } from './models/RawTableItemRequest'; +export { RoleType } from './models/RoleType'; +export type { ScriptPayload } from './models/ScriptPayload'; +export type { ScriptWriteSet } from './models/ScriptWriteSet'; +export type { StateCheckpointTransaction } from './models/StateCheckpointTransaction'; +export type { StateKeyWrapper } from './models/StateKeyWrapper'; +export type { SubmitTransactionRequest } from './models/SubmitTransactionRequest'; +export type { TableItemRequest } from './models/TableItemRequest'; +export type { Transaction } from './models/Transaction'; +export type { Transaction_BlockMetadataTransaction } from './models/Transaction_BlockMetadataTransaction'; +export type { Transaction_GenesisTransaction } from './models/Transaction_GenesisTransaction'; +export type { Transaction_PendingTransaction } from './models/Transaction_PendingTransaction'; +export type { Transaction_StateCheckpointTransaction } from './models/Transaction_StateCheckpointTransaction'; +export type { Transaction_UserTransaction } from './models/Transaction_UserTransaction'; +export type { TransactionPayload } from './models/TransactionPayload'; +export type { TransactionPayload_EntryFunctionPayload } from './models/TransactionPayload_EntryFunctionPayload'; +export type { TransactionPayload_ModuleBundlePayload } from './models/TransactionPayload_ModuleBundlePayload'; +export type { TransactionPayload_MultisigPayload } from './models/TransactionPayload_MultisigPayload'; +export type { TransactionPayload_ScriptPayload } from './models/TransactionPayload_ScriptPayload'; +export type { TransactionsBatchSingleSubmissionFailure } from './models/TransactionsBatchSingleSubmissionFailure'; +export type { TransactionsBatchSubmissionResult } from './models/TransactionsBatchSubmissionResult'; +export type { TransactionSignature } from './models/TransactionSignature'; +export type { TransactionSignature_Ed25519Signature } from './models/TransactionSignature_Ed25519Signature'; +export type { TransactionSignature_MultiAgentSignature } from './models/TransactionSignature_MultiAgentSignature'; +export type { TransactionSignature_MultiEd25519Signature } from './models/TransactionSignature_MultiEd25519Signature'; +export type { U128 } from './models/U128'; +export type { U256 } from './models/U256'; +export type { U64 } from './models/U64'; +export type { UserTransaction } from './models/UserTransaction'; +export type { VersionedEvent } from './models/VersionedEvent'; +export type { ViewRequest } from './models/ViewRequest'; +export type { WriteModule } from './models/WriteModule'; +export type { WriteResource } from './models/WriteResource'; +export type { WriteSet } from './models/WriteSet'; +export type { WriteSet_DirectWriteSet } from './models/WriteSet_DirectWriteSet'; +export type { WriteSet_ScriptWriteSet } from './models/WriteSet_ScriptWriteSet'; +export type { WriteSetChange } from './models/WriteSetChange'; +export type { WriteSetChange_DeleteModule } from './models/WriteSetChange_DeleteModule'; +export type { WriteSetChange_DeleteResource } from './models/WriteSetChange_DeleteResource'; +export type { WriteSetChange_DeleteTableItem } from './models/WriteSetChange_DeleteTableItem'; +export type { WriteSetChange_WriteModule } from './models/WriteSetChange_WriteModule'; +export type { WriteSetChange_WriteResource } from './models/WriteSetChange_WriteResource'; +export type { WriteSetChange_WriteTableItem } from './models/WriteSetChange_WriteTableItem'; +export type { WriteSetPayload } from './models/WriteSetPayload'; +export type { WriteTableItem } from './models/WriteTableItem'; + +export { $AccountData } from './schemas/$AccountData'; +export { $AccountSignature } from './schemas/$AccountSignature'; +export { $AccountSignature_Ed25519Signature } from './schemas/$AccountSignature_Ed25519Signature'; +export { $AccountSignature_MultiEd25519Signature } from './schemas/$AccountSignature_MultiEd25519Signature'; +export { $Address } from './schemas/$Address'; +export { $AptosError } from './schemas/$AptosError'; +export { $AptosErrorCode } from './schemas/$AptosErrorCode'; +export { $Block } from './schemas/$Block'; +export { $BlockMetadataTransaction } from './schemas/$BlockMetadataTransaction'; +export { $DecodedTableData } from './schemas/$DecodedTableData'; +export { $DeletedTableData } from './schemas/$DeletedTableData'; +export { $DeleteModule } from './schemas/$DeleteModule'; +export { $DeleteResource } from './schemas/$DeleteResource'; +export { $DeleteTableItem } from './schemas/$DeleteTableItem'; +export { $DirectWriteSet } from './schemas/$DirectWriteSet'; +export { $Ed25519Signature } from './schemas/$Ed25519Signature'; +export { $EncodeSubmissionRequest } from './schemas/$EncodeSubmissionRequest'; +export { $EntryFunctionId } from './schemas/$EntryFunctionId'; +export { $EntryFunctionPayload } from './schemas/$EntryFunctionPayload'; +export { $Event } from './schemas/$Event'; +export { $EventGuid } from './schemas/$EventGuid'; +export { $GasEstimation } from './schemas/$GasEstimation'; +export { $GenesisPayload } from './schemas/$GenesisPayload'; +export { $GenesisPayload_WriteSetPayload } from './schemas/$GenesisPayload_WriteSetPayload'; +export { $GenesisTransaction } from './schemas/$GenesisTransaction'; +export { $HashValue } from './schemas/$HashValue'; +export { $HealthCheckSuccess } from './schemas/$HealthCheckSuccess'; +export { $HexEncodedBytes } from './schemas/$HexEncodedBytes'; +export { $IdentifierWrapper } from './schemas/$IdentifierWrapper'; +export { $IndexResponse } from './schemas/$IndexResponse'; +export { $ModuleBundlePayload } from './schemas/$ModuleBundlePayload'; +export { $MoveAbility } from './schemas/$MoveAbility'; +export { $MoveFunction } from './schemas/$MoveFunction'; +export { $MoveFunctionGenericTypeParam } from './schemas/$MoveFunctionGenericTypeParam'; +export { $MoveFunctionVisibility } from './schemas/$MoveFunctionVisibility'; +export { $MoveModule } from './schemas/$MoveModule'; +export { $MoveModuleBytecode } from './schemas/$MoveModuleBytecode'; +export { $MoveModuleId } from './schemas/$MoveModuleId'; +export { $MoveResource } from './schemas/$MoveResource'; +export { $MoveScriptBytecode } from './schemas/$MoveScriptBytecode'; +export { $MoveStruct } from './schemas/$MoveStruct'; +export { $MoveStructField } from './schemas/$MoveStructField'; +export { $MoveStructGenericTypeParam } from './schemas/$MoveStructGenericTypeParam'; +export { $MoveStructTag } from './schemas/$MoveStructTag'; +export { $MoveStructValue } from './schemas/$MoveStructValue'; +export { $MoveType } from './schemas/$MoveType'; +export { $MoveValue } from './schemas/$MoveValue'; +export { $MultiAgentSignature } from './schemas/$MultiAgentSignature'; +export { $MultiEd25519Signature } from './schemas/$MultiEd25519Signature'; +export { $MultisigPayload } from './schemas/$MultisigPayload'; +export { $MultisigTransactionPayload } from './schemas/$MultisigTransactionPayload'; +export { $PendingTransaction } from './schemas/$PendingTransaction'; +export { $RawTableItemRequest } from './schemas/$RawTableItemRequest'; +export { $RoleType } from './schemas/$RoleType'; +export { $ScriptPayload } from './schemas/$ScriptPayload'; +export { $ScriptWriteSet } from './schemas/$ScriptWriteSet'; +export { $StateCheckpointTransaction } from './schemas/$StateCheckpointTransaction'; +export { $StateKeyWrapper } from './schemas/$StateKeyWrapper'; +export { $SubmitTransactionRequest } from './schemas/$SubmitTransactionRequest'; +export { $TableItemRequest } from './schemas/$TableItemRequest'; +export { $Transaction } from './schemas/$Transaction'; +export { $Transaction_BlockMetadataTransaction } from './schemas/$Transaction_BlockMetadataTransaction'; +export { $Transaction_GenesisTransaction } from './schemas/$Transaction_GenesisTransaction'; +export { $Transaction_PendingTransaction } from './schemas/$Transaction_PendingTransaction'; +export { $Transaction_StateCheckpointTransaction } from './schemas/$Transaction_StateCheckpointTransaction'; +export { $Transaction_UserTransaction } from './schemas/$Transaction_UserTransaction'; +export { $TransactionPayload } from './schemas/$TransactionPayload'; +export { $TransactionPayload_EntryFunctionPayload } from './schemas/$TransactionPayload_EntryFunctionPayload'; +export { $TransactionPayload_ModuleBundlePayload } from './schemas/$TransactionPayload_ModuleBundlePayload'; +export { $TransactionPayload_MultisigPayload } from './schemas/$TransactionPayload_MultisigPayload'; +export { $TransactionPayload_ScriptPayload } from './schemas/$TransactionPayload_ScriptPayload'; +export { $TransactionsBatchSingleSubmissionFailure } from './schemas/$TransactionsBatchSingleSubmissionFailure'; +export { $TransactionsBatchSubmissionResult } from './schemas/$TransactionsBatchSubmissionResult'; +export { $TransactionSignature } from './schemas/$TransactionSignature'; +export { $TransactionSignature_Ed25519Signature } from './schemas/$TransactionSignature_Ed25519Signature'; +export { $TransactionSignature_MultiAgentSignature } from './schemas/$TransactionSignature_MultiAgentSignature'; +export { $TransactionSignature_MultiEd25519Signature } from './schemas/$TransactionSignature_MultiEd25519Signature'; +export { $U128 } from './schemas/$U128'; +export { $U256 } from './schemas/$U256'; +export { $U64 } from './schemas/$U64'; +export { $UserTransaction } from './schemas/$UserTransaction'; +export { $VersionedEvent } from './schemas/$VersionedEvent'; +export { $ViewRequest } from './schemas/$ViewRequest'; +export { $WriteModule } from './schemas/$WriteModule'; +export { $WriteResource } from './schemas/$WriteResource'; +export { $WriteSet } from './schemas/$WriteSet'; +export { $WriteSet_DirectWriteSet } from './schemas/$WriteSet_DirectWriteSet'; +export { $WriteSet_ScriptWriteSet } from './schemas/$WriteSet_ScriptWriteSet'; +export { $WriteSetChange } from './schemas/$WriteSetChange'; +export { $WriteSetChange_DeleteModule } from './schemas/$WriteSetChange_DeleteModule'; +export { $WriteSetChange_DeleteResource } from './schemas/$WriteSetChange_DeleteResource'; +export { $WriteSetChange_DeleteTableItem } from './schemas/$WriteSetChange_DeleteTableItem'; +export { $WriteSetChange_WriteModule } from './schemas/$WriteSetChange_WriteModule'; +export { $WriteSetChange_WriteResource } from './schemas/$WriteSetChange_WriteResource'; +export { $WriteSetChange_WriteTableItem } from './schemas/$WriteSetChange_WriteTableItem'; +export { $WriteSetPayload } from './schemas/$WriteSetPayload'; +export { $WriteTableItem } from './schemas/$WriteTableItem'; + +export { AccountsService } from './services/AccountsService'; +export { BlocksService } from './services/BlocksService'; +export { EventsService } from './services/EventsService'; +export { GeneralService } from './services/GeneralService'; +export { TablesService } from './services/TablesService'; +export { TransactionsService } from './services/TransactionsService'; +export { ViewService } from './services/ViewService'; diff --git a/m1/JavaScript-client/src/generated/models/AccountData.ts b/m1/JavaScript-client/src/generated/models/AccountData.ts new file mode 100644 index 00000000..8df40aa8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AccountData.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; +import type { U64 } from './U64'; + +/** + * Account data + * + * A simplified version of the onchain Account resource + */ +export type AccountData = { + sequence_number: U64; + authentication_key: HexEncodedBytes; +}; + diff --git a/m1/JavaScript-client/src/generated/models/AccountSignature.ts b/m1/JavaScript-client/src/generated/models/AccountSignature.ts new file mode 100644 index 00000000..576d1507 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AccountSignature.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { AccountSignature_Ed25519Signature } from './AccountSignature_Ed25519Signature'; +import type { AccountSignature_MultiEd25519Signature } from './AccountSignature_MultiEd25519Signature'; + +/** + * Account signature scheme + * + * The account signature scheme allows you to have two types of accounts: + * + * 1. A single Ed25519 key account, one private key + * 2. A k-of-n multi-Ed25519 key account, multiple private keys, such that k-of-n must sign a transaction. + */ +export type AccountSignature = (AccountSignature_Ed25519Signature | AccountSignature_MultiEd25519Signature); + diff --git a/m1/JavaScript-client/src/generated/models/AccountSignature_Ed25519Signature.ts b/m1/JavaScript-client/src/generated/models/AccountSignature_Ed25519Signature.ts new file mode 100644 index 00000000..3d91da51 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AccountSignature_Ed25519Signature.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Ed25519Signature } from './Ed25519Signature'; + +export type AccountSignature_Ed25519Signature = ({ + type: string; +} & Ed25519Signature); + diff --git a/m1/JavaScript-client/src/generated/models/AccountSignature_MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/models/AccountSignature_MultiEd25519Signature.ts new file mode 100644 index 00000000..11acab2e --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AccountSignature_MultiEd25519Signature.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MultiEd25519Signature } from './MultiEd25519Signature'; + +export type AccountSignature_MultiEd25519Signature = ({ + type: string; +} & MultiEd25519Signature); + diff --git a/m1/JavaScript-client/src/generated/models/Address.ts b/m1/JavaScript-client/src/generated/models/Address.ts new file mode 100644 index 00000000..03c32f6b --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Address.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * A hex encoded 32 byte Aptos account address. + * + * This is represented in a string as a 64 character hex string, sometimes + * shortened by stripping leading 0s, and adding a 0x. + * + * For example, address 0x0000000000000000000000000000000000000000000000000000000000000001 is represented as 0x1. + * + */ +export type Address = string; diff --git a/m1/JavaScript-client/src/generated/models/AptosError.ts b/m1/JavaScript-client/src/generated/models/AptosError.ts new file mode 100644 index 00000000..ed24d552 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AptosError.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { AptosErrorCode } from './AptosErrorCode'; + +/** + * This is the generic struct we use for all API errors, it contains a string + * message and an Aptos API specific error code. + */ +export type AptosError = { + /** + * A message describing the error + */ + message: string; + error_code: AptosErrorCode; + /** + * A code providing VM error details when submitting transactions to the VM + */ + vm_error_code?: number; +}; + diff --git a/m1/JavaScript-client/src/generated/models/AptosErrorCode.ts b/m1/JavaScript-client/src/generated/models/AptosErrorCode.ts new file mode 100644 index 00000000..317b13b1 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/AptosErrorCode.ts @@ -0,0 +1,30 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * These codes provide more granular error information beyond just the HTTP + * status code of the response. + */ +export enum AptosErrorCode { + ACCOUNT_NOT_FOUND = 'account_not_found', + RESOURCE_NOT_FOUND = 'resource_not_found', + MODULE_NOT_FOUND = 'module_not_found', + STRUCT_FIELD_NOT_FOUND = 'struct_field_not_found', + VERSION_NOT_FOUND = 'version_not_found', + TRANSACTION_NOT_FOUND = 'transaction_not_found', + TABLE_ITEM_NOT_FOUND = 'table_item_not_found', + BLOCK_NOT_FOUND = 'block_not_found', + VERSION_PRUNED = 'version_pruned', + BLOCK_PRUNED = 'block_pruned', + INVALID_INPUT = 'invalid_input', + INVALID_TRANSACTION_UPDATE = 'invalid_transaction_update', + SEQUENCE_NUMBER_TOO_OLD = 'sequence_number_too_old', + VM_ERROR = 'vm_error', + HEALTH_CHECK_FAILED = 'health_check_failed', + MEMPOOL_IS_FULL = 'mempool_is_full', + INTERNAL_ERROR = 'internal_error', + WEB_FRAMEWORK_ERROR = 'web_framework_error', + BCS_NOT_SUPPORTED = 'bcs_not_supported', + API_DISABLED = 'api_disabled', +} diff --git a/m1/JavaScript-client/src/generated/models/Block.ts b/m1/JavaScript-client/src/generated/models/Block.ts new file mode 100644 index 00000000..e6c2a57e --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Block.ts @@ -0,0 +1,26 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HashValue } from './HashValue'; +import type { Transaction } from './Transaction'; +import type { U64 } from './U64'; + +/** + * A Block with or without transactions + * + * This contains the information about a transactions along with + * associated transactions if requested + */ +export type Block = { + block_height: U64; + block_hash: HashValue; + block_timestamp: U64; + first_version: U64; + last_version: U64; + /** + * The transactions in the block in sequential order + */ + transactions?: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/BlockMetadataTransaction.ts b/m1/JavaScript-client/src/generated/models/BlockMetadataTransaction.ts new file mode 100644 index 00000000..9ec5828d --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/BlockMetadataTransaction.ts @@ -0,0 +1,55 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { Event } from './Event'; +import type { HashValue } from './HashValue'; +import type { U64 } from './U64'; +import type { WriteSetChange } from './WriteSetChange'; + +/** + * A block metadata transaction + * + * This signifies the beginning of a block, and contains information + * about the specific block + */ +export type BlockMetadataTransaction = { + version: U64; + hash: HashValue; + state_change_hash: HashValue; + event_root_hash: HashValue; + state_checkpoint_hash?: HashValue; + gas_used: U64; + /** + * Whether the transaction was successful + */ + success: boolean; + /** + * The VM status of the transaction, can tell useful information in a failure + */ + vm_status: string; + accumulator_root_hash: HashValue; + /** + * Final state of resources changed by the transaction + */ + changes: Array; + id: HashValue; + epoch: U64; + round: U64; + /** + * The events emitted at the block creation + */ + events: Array; + /** + * Previous block votes + */ + previous_block_votes_bitvec: Array; + proposer: Address; + /** + * The indices of the proposers who failed to propose + */ + failed_proposer_indices: Array; + timestamp: U64; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DecodedTableData.ts b/m1/JavaScript-client/src/generated/models/DecodedTableData.ts new file mode 100644 index 00000000..83571f4f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DecodedTableData.ts @@ -0,0 +1,26 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Decoded table data + */ +export type DecodedTableData = { + /** + * Key of table in JSON + */ + key: any; + /** + * Type of key + */ + key_type: string; + /** + * Value of table in JSON + */ + value: any; + /** + * Type of value + */ + value_type: string; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DeleteModule.ts b/m1/JavaScript-client/src/generated/models/DeleteModule.ts new file mode 100644 index 00000000..44d49ce0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DeleteModule.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { MoveModuleId } from './MoveModuleId'; + +/** + * Delete a module + */ +export type DeleteModule = { + address: Address; + /** + * State key hash + */ + state_key_hash: string; + module: MoveModuleId; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DeleteResource.ts b/m1/JavaScript-client/src/generated/models/DeleteResource.ts new file mode 100644 index 00000000..ff863a94 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DeleteResource.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { MoveStructTag } from './MoveStructTag'; + +/** + * Delete a resource + */ +export type DeleteResource = { + address: Address; + /** + * State key hash + */ + state_key_hash: string; + resource: MoveStructTag; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DeleteTableItem.ts b/m1/JavaScript-client/src/generated/models/DeleteTableItem.ts new file mode 100644 index 00000000..c2655d4f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DeleteTableItem.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DeletedTableData } from './DeletedTableData'; +import type { HexEncodedBytes } from './HexEncodedBytes'; + +/** + * Delete a table item + */ +export type DeleteTableItem = { + state_key_hash: string; + handle: HexEncodedBytes; + key: HexEncodedBytes; + data?: DeletedTableData; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DeletedTableData.ts b/m1/JavaScript-client/src/generated/models/DeletedTableData.ts new file mode 100644 index 00000000..ccf4fd6e --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DeletedTableData.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Deleted table data + */ +export type DeletedTableData = { + /** + * Deleted key + */ + key: any; + /** + * Deleted key type + */ + key_type: string; +}; + diff --git a/m1/JavaScript-client/src/generated/models/DirectWriteSet.ts b/m1/JavaScript-client/src/generated/models/DirectWriteSet.ts new file mode 100644 index 00000000..1c09731f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/DirectWriteSet.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Event } from './Event'; +import type { WriteSetChange } from './WriteSetChange'; + +export type DirectWriteSet = { + changes: Array; + events: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/Ed25519Signature.ts b/m1/JavaScript-client/src/generated/models/Ed25519Signature.ts new file mode 100644 index 00000000..27686cec --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Ed25519Signature.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; + +/** + * A single Ed25519 signature + */ +export type Ed25519Signature = { + public_key: HexEncodedBytes; + signature: HexEncodedBytes; +}; + diff --git a/m1/JavaScript-client/src/generated/models/EncodeSubmissionRequest.ts b/m1/JavaScript-client/src/generated/models/EncodeSubmissionRequest.ts new file mode 100644 index 00000000..52e6f903 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/EncodeSubmissionRequest.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { TransactionPayload } from './TransactionPayload'; +import type { U64 } from './U64'; + +/** + * Request to encode a submission + */ +export type EncodeSubmissionRequest = { + sender: Address; + sequence_number: U64; + max_gas_amount: U64; + gas_unit_price: U64; + expiration_timestamp_secs: U64; + payload: TransactionPayload; + /** + * Secondary signer accounts of the request for Multi-agent + */ + secondary_signers?: Array
; +}; + diff --git a/m1/JavaScript-client/src/generated/models/EntryFunctionId.ts b/m1/JavaScript-client/src/generated/models/EntryFunctionId.ts new file mode 100644 index 00000000..f38310d0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/EntryFunctionId.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Entry function id is string representation of a entry function defined on-chain. + * + * Format: `{address}::{module name}::{function name}` + * + * Both `module name` and `function name` are case-sensitive. + * + */ +export type EntryFunctionId = string; diff --git a/m1/JavaScript-client/src/generated/models/EntryFunctionPayload.ts b/m1/JavaScript-client/src/generated/models/EntryFunctionPayload.ts new file mode 100644 index 00000000..78a3ee67 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/EntryFunctionPayload.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EntryFunctionId } from './EntryFunctionId'; +import type { MoveType } from './MoveType'; + +/** + * Payload which runs a single entry function + */ +export type EntryFunctionPayload = { + function: EntryFunctionId; + /** + * Type arguments of the function + */ + type_arguments: Array; + /** + * Arguments of the function + */ + arguments: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/Event.ts b/m1/JavaScript-client/src/generated/models/Event.ts new file mode 100644 index 00000000..a160a00c --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Event.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EventGuid } from './EventGuid'; +import type { MoveType } from './MoveType'; +import type { U64 } from './U64'; + +/** + * An event from a transaction + */ +export type Event = { + guid: EventGuid; + sequence_number: U64; + type: MoveType; + /** + * The JSON representation of the event + */ + data: any; +}; + diff --git a/m1/JavaScript-client/src/generated/models/EventGuid.ts b/m1/JavaScript-client/src/generated/models/EventGuid.ts new file mode 100644 index 00000000..928ad847 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/EventGuid.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { U64 } from './U64'; + +export type EventGuid = { + creation_number: U64; + account_address: Address; +}; + diff --git a/m1/JavaScript-client/src/generated/models/GasEstimation.ts b/m1/JavaScript-client/src/generated/models/GasEstimation.ts new file mode 100644 index 00000000..f8ebd699 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/GasEstimation.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Struct holding the outputs of the estimate gas API + */ +export type GasEstimation = { + /** + * The deprioritized estimate for the gas unit price + */ + deprioritized_gas_estimate?: number; + /** + * The current estimate for the gas unit price + */ + gas_estimate: number; + /** + * The prioritized estimate for the gas unit price + */ + prioritized_gas_estimate?: number; +}; + diff --git a/m1/JavaScript-client/src/generated/models/GenesisPayload.ts b/m1/JavaScript-client/src/generated/models/GenesisPayload.ts new file mode 100644 index 00000000..66d653a6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/GenesisPayload.ts @@ -0,0 +1,11 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { GenesisPayload_WriteSetPayload } from './GenesisPayload_WriteSetPayload'; + +/** + * The writeset payload of the Genesis transaction + */ +export type GenesisPayload = GenesisPayload_WriteSetPayload; + diff --git a/m1/JavaScript-client/src/generated/models/GenesisPayload_WriteSetPayload.ts b/m1/JavaScript-client/src/generated/models/GenesisPayload_WriteSetPayload.ts new file mode 100644 index 00000000..a6127506 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/GenesisPayload_WriteSetPayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteSetPayload } from './WriteSetPayload'; + +export type GenesisPayload_WriteSetPayload = ({ + type: string; +} & WriteSetPayload); + diff --git a/m1/JavaScript-client/src/generated/models/GenesisTransaction.ts b/m1/JavaScript-client/src/generated/models/GenesisTransaction.ts new file mode 100644 index 00000000..563a8be8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/GenesisTransaction.ts @@ -0,0 +1,42 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Event } from './Event'; +import type { GenesisPayload } from './GenesisPayload'; +import type { HashValue } from './HashValue'; +import type { U64 } from './U64'; +import type { WriteSetChange } from './WriteSetChange'; + +/** + * The genesis transaction + * + * This only occurs at the genesis transaction (version 0) + */ +export type GenesisTransaction = { + version: U64; + hash: HashValue; + state_change_hash: HashValue; + event_root_hash: HashValue; + state_checkpoint_hash?: HashValue; + gas_used: U64; + /** + * Whether the transaction was successful + */ + success: boolean; + /** + * The VM status of the transaction, can tell useful information in a failure + */ + vm_status: string; + accumulator_root_hash: HashValue; + /** + * Final state of resources changed by the transaction + */ + changes: Array; + payload: GenesisPayload; + /** + * Events emitted during genesis + */ + events: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/HashValue.ts b/m1/JavaScript-client/src/generated/models/HashValue.ts new file mode 100644 index 00000000..a296dd84 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/HashValue.ts @@ -0,0 +1,5 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export type HashValue = string; diff --git a/m1/JavaScript-client/src/generated/models/HealthCheckSuccess.ts b/m1/JavaScript-client/src/generated/models/HealthCheckSuccess.ts new file mode 100644 index 00000000..ff98f0ed --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/HealthCheckSuccess.ts @@ -0,0 +1,11 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Representation of a successful healthcheck + */ +export type HealthCheckSuccess = { + message: string; +}; + diff --git a/m1/JavaScript-client/src/generated/models/HexEncodedBytes.ts b/m1/JavaScript-client/src/generated/models/HexEncodedBytes.ts new file mode 100644 index 00000000..d4a21baf --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/HexEncodedBytes.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * All bytes (Vec) data is represented as hex-encoded string prefixed with `0x` and fulfilled with + * two hex digits per byte. + * + * Unlike the `Address` type, HexEncodedBytes will not trim any zeros. + * + */ +export type HexEncodedBytes = string; diff --git a/m1/JavaScript-client/src/generated/models/IdentifierWrapper.ts b/m1/JavaScript-client/src/generated/models/IdentifierWrapper.ts new file mode 100644 index 00000000..8bf5d9a7 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/IdentifierWrapper.ts @@ -0,0 +1,5 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export type IdentifierWrapper = string; diff --git a/m1/JavaScript-client/src/generated/models/IndexResponse.ts b/m1/JavaScript-client/src/generated/models/IndexResponse.ts new file mode 100644 index 00000000..1ca2519b --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/IndexResponse.ts @@ -0,0 +1,30 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { RoleType } from './RoleType'; +import type { U64 } from './U64'; + +/** + * The struct holding all data returned to the client by the + * index endpoint (i.e., GET "/"). Only for responding in JSON + */ +export type IndexResponse = { + /** + * Chain ID of the current chain + */ + chain_id: number; + epoch: U64; + ledger_version: U64; + oldest_ledger_version: U64; + ledger_timestamp: U64; + node_role: RoleType; + oldest_block_height: U64; + block_height: U64; + /** + * Git hash of the build of the API endpoint. Can be used to determine the exact + * software version used by the API endpoint. + */ + git_hash?: string; +}; + diff --git a/m1/JavaScript-client/src/generated/models/ModuleBundlePayload.ts b/m1/JavaScript-client/src/generated/models/ModuleBundlePayload.ts new file mode 100644 index 00000000..be34a1f4 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/ModuleBundlePayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveModuleBytecode } from './MoveModuleBytecode'; + +export type ModuleBundlePayload = { + modules: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveAbility.ts b/m1/JavaScript-client/src/generated/models/MoveAbility.ts new file mode 100644 index 00000000..311c5db9 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveAbility.ts @@ -0,0 +1,5 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export type MoveAbility = string; diff --git a/m1/JavaScript-client/src/generated/models/MoveFunction.ts b/m1/JavaScript-client/src/generated/models/MoveFunction.ts new file mode 100644 index 00000000..4ce3ad72 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveFunction.ts @@ -0,0 +1,37 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { IdentifierWrapper } from './IdentifierWrapper'; +import type { MoveFunctionGenericTypeParam } from './MoveFunctionGenericTypeParam'; +import type { MoveFunctionVisibility } from './MoveFunctionVisibility'; +import type { MoveType } from './MoveType'; + +/** + * Move function + */ +export type MoveFunction = { + name: IdentifierWrapper; + visibility: MoveFunctionVisibility; + /** + * Whether the function can be called as an entry function directly in a transaction + */ + is_entry: boolean; + /** + * Whether the function is a view function or not + */ + is_view: boolean; + /** + * Generic type params associated with the Move function + */ + generic_type_params: Array; + /** + * Parameters associated with the move function + */ + params: Array; + /** + * Return type of the function + */ + return: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveFunctionGenericTypeParam.ts b/m1/JavaScript-client/src/generated/models/MoveFunctionGenericTypeParam.ts new file mode 100644 index 00000000..c0776fe5 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveFunctionGenericTypeParam.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveAbility } from './MoveAbility'; + +/** + * Move function generic type param + */ +export type MoveFunctionGenericTypeParam = { + /** + * Move abilities tied to the generic type param and associated with the function that uses it + */ + constraints: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveFunctionVisibility.ts b/m1/JavaScript-client/src/generated/models/MoveFunctionVisibility.ts new file mode 100644 index 00000000..a1684d5c --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveFunctionVisibility.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Move function visibility + */ +export enum MoveFunctionVisibility { + PRIVATE = 'private', + PUBLIC = 'public', + FRIEND = 'friend', +} diff --git a/m1/JavaScript-client/src/generated/models/MoveModule.ts b/m1/JavaScript-client/src/generated/models/MoveModule.ts new file mode 100644 index 00000000..f2d127ff --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveModule.ts @@ -0,0 +1,30 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { IdentifierWrapper } from './IdentifierWrapper'; +import type { MoveFunction } from './MoveFunction'; +import type { MoveModuleId } from './MoveModuleId'; +import type { MoveStruct } from './MoveStruct'; + +/** + * A Move module + */ +export type MoveModule = { + address: Address; + name: IdentifierWrapper; + /** + * Friends of the module + */ + friends: Array; + /** + * Public functions of the module + */ + exposed_functions: Array; + /** + * Structs of the module + */ + structs: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveModuleBytecode.ts b/m1/JavaScript-client/src/generated/models/MoveModuleBytecode.ts new file mode 100644 index 00000000..9348d105 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveModuleBytecode.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; +import type { MoveModule } from './MoveModule'; + +/** + * Move module bytecode along with it's ABI + */ +export type MoveModuleBytecode = { + bytecode: HexEncodedBytes; + abi?: MoveModule; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveModuleId.ts b/m1/JavaScript-client/src/generated/models/MoveModuleId.ts new file mode 100644 index 00000000..5ad74349 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveModuleId.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Move module id is a string representation of Move module. + * + * Format: `{address}::{module name}` + * + * `address` should be hex-encoded 32 byte account address that is prefixed with `0x`. + * + * Module name is case-sensitive. + * + */ +export type MoveModuleId = string; diff --git a/m1/JavaScript-client/src/generated/models/MoveResource.ts b/m1/JavaScript-client/src/generated/models/MoveResource.ts new file mode 100644 index 00000000..544bfa47 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveResource.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveStructTag } from './MoveStructTag'; +import type { MoveStructValue } from './MoveStructValue'; + +/** + * A parsed Move resource + */ +export type MoveResource = { + type: MoveStructTag; + data: MoveStructValue; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveScriptBytecode.ts b/m1/JavaScript-client/src/generated/models/MoveScriptBytecode.ts new file mode 100644 index 00000000..109a6cf6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveScriptBytecode.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; +import type { MoveFunction } from './MoveFunction'; + +/** + * Move script bytecode + */ +export type MoveScriptBytecode = { + bytecode: HexEncodedBytes; + abi?: MoveFunction; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveStruct.ts b/m1/JavaScript-client/src/generated/models/MoveStruct.ts new file mode 100644 index 00000000..74dd1d87 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveStruct.ts @@ -0,0 +1,32 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { IdentifierWrapper } from './IdentifierWrapper'; +import type { MoveAbility } from './MoveAbility'; +import type { MoveStructField } from './MoveStructField'; +import type { MoveStructGenericTypeParam } from './MoveStructGenericTypeParam'; + +/** + * A move struct + */ +export type MoveStruct = { + name: IdentifierWrapper; + /** + * Whether the struct is a native struct of Move + */ + is_native: boolean; + /** + * Abilities associated with the struct + */ + abilities: Array; + /** + * Generic types associated with the struct + */ + generic_type_params: Array; + /** + * Fields associated with the struct + */ + fields: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveStructField.ts b/m1/JavaScript-client/src/generated/models/MoveStructField.ts new file mode 100644 index 00000000..3f20b0bf --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveStructField.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { IdentifierWrapper } from './IdentifierWrapper'; +import type { MoveType } from './MoveType'; + +/** + * Move struct field + */ +export type MoveStructField = { + name: IdentifierWrapper; + type: MoveType; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveStructGenericTypeParam.ts b/m1/JavaScript-client/src/generated/models/MoveStructGenericTypeParam.ts new file mode 100644 index 00000000..5ff3317d --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveStructGenericTypeParam.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveAbility } from './MoveAbility'; + +/** + * Move generic type param + */ +export type MoveStructGenericTypeParam = { + /** + * Move abilities tied to the generic type param and associated with the type that uses it + */ + constraints: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MoveStructTag.ts b/m1/JavaScript-client/src/generated/models/MoveStructTag.ts new file mode 100644 index 00000000..254235dd --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveStructTag.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * String representation of a MoveStructTag (on-chain Move struct type). This exists so you + * can specify MoveStructTags as path / query parameters, e.g. for get_events_by_event_handle. + * + * It is a combination of: + * 1. `move_module_address`, `module_name` and `struct_name`, all joined by `::` + * 2. `struct generic type parameters` joined by `, ` + * + * Examples: + * * `0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>` + * * `0x1::account::Account` + * + * Note: + * 1. Empty chars should be ignored when comparing 2 struct tag ids. + * 2. When used in an URL path, should be encoded by url-encoding (AKA percent-encoding). + * + * See [doc](https://aptos.dev/concepts/accounts) for more details. + * + */ +export type MoveStructTag = string; diff --git a/m1/JavaScript-client/src/generated/models/MoveStructValue.ts b/m1/JavaScript-client/src/generated/models/MoveStructValue.ts new file mode 100644 index 00000000..11d4da45 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveStructValue.ts @@ -0,0 +1,49 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * This is a JSON representation of some data within an account resource. More specifically, + * it is a map of strings to arbitrary JSON values / objects, where the keys are top level + * fields within the given resource. + * + * To clarify, you might query for 0x1::account::Account and see the example data. + * + * Move `bool` type value is serialized into `boolean`. + * + * Move `u8`, `u16` and `u32` type value is serialized into `integer`. + * + * Move `u64`, `u128` and `u256` type value is serialized into `string`. + * + * Move `address` type value (32 byte Aptos account address) is serialized into a HexEncodedBytes string. + * For example: + * - `0x1` + * - `0x1668f6be25668c1a17cd8caf6b8d2f25` + * + * Move `vector` type value is serialized into `array`, except `vector` which is serialized into a + * HexEncodedBytes string with `0x` prefix. + * For example: + * - `vector{255, 255}` => `["255", "255"]` + * - `vector{255, 255}` => `0xffff` + * + * Move `struct` type value is serialized into `object` that looks like this (except some Move stdlib types, see the following section): + * ```json + * { + * field1_name: field1_value, + * field2_name: field2_value, + * ...... + * } + * ``` + * + * For example: + * `{ "created": "0xa550c18", "role_id": "0" }` + * + * **Special serialization for Move stdlib types**: + * - [0x1::string::String](https://github.com/aptos-labs/aptos-core/blob/main/language/move-stdlib/docs/ascii.md) + * is serialized into `string`. For example, struct value `0x1::string::String{bytes: b"Hello World!"}` + * is serialized as `"Hello World!"` in JSON. + * + */ + export type MoveStructValue = { + }; + diff --git a/m1/JavaScript-client/src/generated/models/MoveType.ts b/m1/JavaScript-client/src/generated/models/MoveType.ts new file mode 100644 index 00000000..22618377 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveType.ts @@ -0,0 +1,34 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * String representation of an on-chain Move type tag that is exposed in transaction payload. + * Values: + * - bool + * - u8 + * - u16 + * - u32 + * - u64 + * - u128 + * - u256 + * - address + * - signer + * - vector: `vector<{non-reference MoveTypeId}>` + * - struct: `{address}::{module_name}::{struct_name}::<{generic types}>` + * + * Vector type value examples: + * - `vector` + * - `vector>` + * - `vector<0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>>` + * + * Struct type value examples: + * - `0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin> + * - `0x1::account::Account` + * + * Note: + * 1. Empty chars should be ignored when comparing 2 struct tag ids. + * 2. When used in an URL path, should be encoded by url-encoding (AKA percent-encoding). + * + */ +export type MoveType = string; diff --git a/m1/JavaScript-client/src/generated/models/MoveValue.ts b/m1/JavaScript-client/src/generated/models/MoveValue.ts new file mode 100644 index 00000000..899490be --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MoveValue.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { HexEncodedBytes } from './HexEncodedBytes'; +import type { MoveStructValue } from './MoveStructValue'; +import type { U128 } from './U128'; +import type { U256 } from './U256'; +import type { U64 } from './U64'; + +/** + * An enum of the possible Move value types + */ +export type MoveValue = (number | U64 | U128 | U256 | boolean | Address | Array | HexEncodedBytes | MoveStructValue | string); + diff --git a/m1/JavaScript-client/src/generated/models/MultiAgentSignature.ts b/m1/JavaScript-client/src/generated/models/MultiAgentSignature.ts new file mode 100644 index 00000000..f07b8173 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MultiAgentSignature.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { AccountSignature } from './AccountSignature'; +import type { Address } from './Address'; + +/** + * Multi agent signature for multi agent transactions + * + * This allows you to have transactions across multiple accounts + */ +export type MultiAgentSignature = { + sender: AccountSignature; + /** + * The other involved parties' addresses + */ + secondary_signer_addresses: Array
; + /** + * The associated signatures, in the same order as the secondary addresses + */ + secondary_signers: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/models/MultiEd25519Signature.ts new file mode 100644 index 00000000..29809a29 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MultiEd25519Signature.ts @@ -0,0 +1,27 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; + +/** + * A Ed25519 multi-sig signature + * + * This allows k-of-n signing for a transaction + */ +export type MultiEd25519Signature = { + /** + * The public keys for the Ed25519 signature + */ + public_keys: Array; + /** + * Signature associated with the public keys in the same order + */ + signatures: Array; + /** + * The number of signatures required for a successful transaction + */ + threshold: number; + bitmap: HexEncodedBytes; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MultisigPayload.ts b/m1/JavaScript-client/src/generated/models/MultisigPayload.ts new file mode 100644 index 00000000..9100a74b --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MultisigPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { MultisigTransactionPayload } from './MultisigTransactionPayload'; + +/** + * A multisig transaction that allows an owner of a multisig account to execute a pre-approved + * transaction as the multisig account. + */ +export type MultisigPayload = { + multisig_address: Address; + transaction_payload?: MultisigTransactionPayload; +}; + diff --git a/m1/JavaScript-client/src/generated/models/MultisigTransactionPayload.ts b/m1/JavaScript-client/src/generated/models/MultisigTransactionPayload.ts new file mode 100644 index 00000000..d3191b53 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/MultisigTransactionPayload.ts @@ -0,0 +1,8 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EntryFunctionPayload } from './EntryFunctionPayload'; + +export type MultisigTransactionPayload = EntryFunctionPayload; + diff --git a/m1/JavaScript-client/src/generated/models/PendingTransaction.ts b/m1/JavaScript-client/src/generated/models/PendingTransaction.ts new file mode 100644 index 00000000..2b4ad87f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/PendingTransaction.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { HashValue } from './HashValue'; +import type { TransactionPayload } from './TransactionPayload'; +import type { TransactionSignature } from './TransactionSignature'; +import type { U64 } from './U64'; + +/** + * A transaction waiting in mempool + */ +export type PendingTransaction = { + hash: HashValue; + sender: Address; + sequence_number: U64; + max_gas_amount: U64; + gas_unit_price: U64; + expiration_timestamp_secs: U64; + payload: TransactionPayload; + signature?: TransactionSignature; +}; + diff --git a/m1/JavaScript-client/src/generated/models/RawTableItemRequest.ts b/m1/JavaScript-client/src/generated/models/RawTableItemRequest.ts new file mode 100644 index 00000000..e8fc0f8e --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/RawTableItemRequest.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HexEncodedBytes } from './HexEncodedBytes'; + +/** + * Table Item request for the GetTableItemRaw API + */ +export type RawTableItemRequest = { + key: HexEncodedBytes; +}; + diff --git a/m1/JavaScript-client/src/generated/models/RoleType.ts b/m1/JavaScript-client/src/generated/models/RoleType.ts new file mode 100644 index 00000000..f0b5c207 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/RoleType.ts @@ -0,0 +1,8 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +export enum RoleType { + VALIDATOR = 'validator', + FULL_NODE = 'full_node', +} diff --git a/m1/JavaScript-client/src/generated/models/ScriptPayload.ts b/m1/JavaScript-client/src/generated/models/ScriptPayload.ts new file mode 100644 index 00000000..52cb0a05 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/ScriptPayload.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveScriptBytecode } from './MoveScriptBytecode'; +import type { MoveType } from './MoveType'; + +/** + * Payload which runs a script that can run multiple functions + */ +export type ScriptPayload = { + code: MoveScriptBytecode; + /** + * Type arguments of the function + */ + type_arguments: Array; + /** + * Arguments of the function + */ + arguments: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/ScriptWriteSet.ts b/m1/JavaScript-client/src/generated/models/ScriptWriteSet.ts new file mode 100644 index 00000000..469b7807 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/ScriptWriteSet.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { ScriptPayload } from './ScriptPayload'; + +export type ScriptWriteSet = { + execute_as: Address; + script: ScriptPayload; +}; + diff --git a/m1/JavaScript-client/src/generated/models/StateCheckpointTransaction.ts b/m1/JavaScript-client/src/generated/models/StateCheckpointTransaction.ts new file mode 100644 index 00000000..bfe5b5db --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/StateCheckpointTransaction.ts @@ -0,0 +1,34 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { HashValue } from './HashValue'; +import type { U64 } from './U64'; +import type { WriteSetChange } from './WriteSetChange'; + +/** + * A state checkpoint transaction + */ +export type StateCheckpointTransaction = { + version: U64; + hash: HashValue; + state_change_hash: HashValue; + event_root_hash: HashValue; + state_checkpoint_hash?: HashValue; + gas_used: U64; + /** + * Whether the transaction was successful + */ + success: boolean; + /** + * The VM status of the transaction, can tell useful information in a failure + */ + vm_status: string; + accumulator_root_hash: HashValue; + /** + * Final state of resources changed by the transaction + */ + changes: Array; + timestamp: U64; +}; + diff --git a/m1/JavaScript-client/src/generated/models/StateKeyWrapper.ts b/m1/JavaScript-client/src/generated/models/StateKeyWrapper.ts new file mode 100644 index 00000000..d3939a43 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/StateKeyWrapper.ts @@ -0,0 +1,9 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * Representation of a StateKey as a hex string. This is used for cursor based pagination. + * + */ +export type StateKeyWrapper = string; diff --git a/m1/JavaScript-client/src/generated/models/SubmitTransactionRequest.ts b/m1/JavaScript-client/src/generated/models/SubmitTransactionRequest.ts new file mode 100644 index 00000000..b850d3f5 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/SubmitTransactionRequest.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { TransactionPayload } from './TransactionPayload'; +import type { TransactionSignature } from './TransactionSignature'; +import type { U64 } from './U64'; + +/** + * A request to submit a transaction + * + * This requires a transaction and a signature of it + */ +export type SubmitTransactionRequest = { + sender: Address; + sequence_number: U64; + max_gas_amount: U64; + gas_unit_price: U64; + expiration_timestamp_secs: U64; + payload: TransactionPayload; + signature: TransactionSignature; +}; + diff --git a/m1/JavaScript-client/src/generated/models/TableItemRequest.ts b/m1/JavaScript-client/src/generated/models/TableItemRequest.ts new file mode 100644 index 00000000..18b44286 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TableItemRequest.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MoveType } from './MoveType'; + +/** + * Table Item request for the GetTableItem API + */ +export type TableItemRequest = { + key_type: MoveType; + value_type: MoveType; + /** + * The value of the table item's key + */ + key: any; +}; + diff --git a/m1/JavaScript-client/src/generated/models/Transaction.ts b/m1/JavaScript-client/src/generated/models/Transaction.ts new file mode 100644 index 00000000..0d6bd99e --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Transaction_BlockMetadataTransaction } from './Transaction_BlockMetadataTransaction'; +import type { Transaction_GenesisTransaction } from './Transaction_GenesisTransaction'; +import type { Transaction_PendingTransaction } from './Transaction_PendingTransaction'; +import type { Transaction_StateCheckpointTransaction } from './Transaction_StateCheckpointTransaction'; +import type { Transaction_UserTransaction } from './Transaction_UserTransaction'; + +/** + * Enum of the different types of transactions in Aptos + */ +export type Transaction = (Transaction_PendingTransaction | Transaction_UserTransaction | Transaction_GenesisTransaction | Transaction_BlockMetadataTransaction | Transaction_StateCheckpointTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionPayload.ts b/m1/JavaScript-client/src/generated/models/TransactionPayload.ts new file mode 100644 index 00000000..0115a297 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionPayload.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { TransactionPayload_EntryFunctionPayload } from './TransactionPayload_EntryFunctionPayload'; +import type { TransactionPayload_ModuleBundlePayload } from './TransactionPayload_ModuleBundlePayload'; +import type { TransactionPayload_MultisigPayload } from './TransactionPayload_MultisigPayload'; +import type { TransactionPayload_ScriptPayload } from './TransactionPayload_ScriptPayload'; + +/** + * An enum of the possible transaction payloads + */ +export type TransactionPayload = (TransactionPayload_EntryFunctionPayload | TransactionPayload_ScriptPayload | TransactionPayload_ModuleBundlePayload | TransactionPayload_MultisigPayload); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionPayload_EntryFunctionPayload.ts b/m1/JavaScript-client/src/generated/models/TransactionPayload_EntryFunctionPayload.ts new file mode 100644 index 00000000..d44e5d89 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionPayload_EntryFunctionPayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EntryFunctionPayload } from './EntryFunctionPayload'; + +export type TransactionPayload_EntryFunctionPayload = ({ + type: string; +} & EntryFunctionPayload); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionPayload_ModuleBundlePayload.ts b/m1/JavaScript-client/src/generated/models/TransactionPayload_ModuleBundlePayload.ts new file mode 100644 index 00000000..0193cb9d --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionPayload_ModuleBundlePayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { ModuleBundlePayload } from './ModuleBundlePayload'; + +export type TransactionPayload_ModuleBundlePayload = ({ + type: string; +} & ModuleBundlePayload); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionPayload_MultisigPayload.ts b/m1/JavaScript-client/src/generated/models/TransactionPayload_MultisigPayload.ts new file mode 100644 index 00000000..01023dd7 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionPayload_MultisigPayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MultisigPayload } from './MultisigPayload'; + +export type TransactionPayload_MultisigPayload = ({ + type: string; +} & MultisigPayload); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionPayload_ScriptPayload.ts b/m1/JavaScript-client/src/generated/models/TransactionPayload_ScriptPayload.ts new file mode 100644 index 00000000..9e7feb97 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionPayload_ScriptPayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { ScriptPayload } from './ScriptPayload'; + +export type TransactionPayload_ScriptPayload = ({ + type: string; +} & ScriptPayload); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionSignature.ts b/m1/JavaScript-client/src/generated/models/TransactionSignature.ts new file mode 100644 index 00000000..b3861fbd --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionSignature.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { TransactionSignature_Ed25519Signature } from './TransactionSignature_Ed25519Signature'; +import type { TransactionSignature_MultiAgentSignature } from './TransactionSignature_MultiAgentSignature'; +import type { TransactionSignature_MultiEd25519Signature } from './TransactionSignature_MultiEd25519Signature'; + +/** + * An enum representing the different transaction signatures available + */ +export type TransactionSignature = (TransactionSignature_Ed25519Signature | TransactionSignature_MultiEd25519Signature | TransactionSignature_MultiAgentSignature); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionSignature_Ed25519Signature.ts b/m1/JavaScript-client/src/generated/models/TransactionSignature_Ed25519Signature.ts new file mode 100644 index 00000000..0667d1e8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionSignature_Ed25519Signature.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Ed25519Signature } from './Ed25519Signature'; + +export type TransactionSignature_Ed25519Signature = ({ + type: string; +} & Ed25519Signature); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiAgentSignature.ts b/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiAgentSignature.ts new file mode 100644 index 00000000..e74d911d --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiAgentSignature.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MultiAgentSignature } from './MultiAgentSignature'; + +export type TransactionSignature_MultiAgentSignature = ({ + type: string; +} & MultiAgentSignature); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiEd25519Signature.ts new file mode 100644 index 00000000..1f6dc58a --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionSignature_MultiEd25519Signature.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { MultiEd25519Signature } from './MultiEd25519Signature'; + +export type TransactionSignature_MultiEd25519Signature = ({ + type: string; +} & MultiEd25519Signature); + diff --git a/m1/JavaScript-client/src/generated/models/Transaction_BlockMetadataTransaction.ts b/m1/JavaScript-client/src/generated/models/Transaction_BlockMetadataTransaction.ts new file mode 100644 index 00000000..82067d74 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction_BlockMetadataTransaction.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { BlockMetadataTransaction } from './BlockMetadataTransaction'; + +export type Transaction_BlockMetadataTransaction = ({ + type: string; +} & BlockMetadataTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/Transaction_GenesisTransaction.ts b/m1/JavaScript-client/src/generated/models/Transaction_GenesisTransaction.ts new file mode 100644 index 00000000..3bb44fd0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction_GenesisTransaction.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { GenesisTransaction } from './GenesisTransaction'; + +export type Transaction_GenesisTransaction = ({ + type: string; +} & GenesisTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/Transaction_PendingTransaction.ts b/m1/JavaScript-client/src/generated/models/Transaction_PendingTransaction.ts new file mode 100644 index 00000000..c593c764 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction_PendingTransaction.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { PendingTransaction } from './PendingTransaction'; + +export type Transaction_PendingTransaction = ({ + type: string; +} & PendingTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/Transaction_StateCheckpointTransaction.ts b/m1/JavaScript-client/src/generated/models/Transaction_StateCheckpointTransaction.ts new file mode 100644 index 00000000..90eeda70 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction_StateCheckpointTransaction.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { StateCheckpointTransaction } from './StateCheckpointTransaction'; + +export type Transaction_StateCheckpointTransaction = ({ + type: string; +} & StateCheckpointTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/Transaction_UserTransaction.ts b/m1/JavaScript-client/src/generated/models/Transaction_UserTransaction.ts new file mode 100644 index 00000000..8feeed7c --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/Transaction_UserTransaction.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { UserTransaction } from './UserTransaction'; + +export type Transaction_UserTransaction = ({ + type: string; +} & UserTransaction); + diff --git a/m1/JavaScript-client/src/generated/models/TransactionsBatchSingleSubmissionFailure.ts b/m1/JavaScript-client/src/generated/models/TransactionsBatchSingleSubmissionFailure.ts new file mode 100644 index 00000000..98f81af6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionsBatchSingleSubmissionFailure.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { AptosError } from './AptosError'; + +/** + * Information telling which batch submission transactions failed + */ +export type TransactionsBatchSingleSubmissionFailure = { + error: AptosError; + /** + * The index of which transaction failed, same as submission order + */ + transaction_index: number; +}; + diff --git a/m1/JavaScript-client/src/generated/models/TransactionsBatchSubmissionResult.ts b/m1/JavaScript-client/src/generated/models/TransactionsBatchSubmissionResult.ts new file mode 100644 index 00000000..43c5d62f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/TransactionsBatchSubmissionResult.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { TransactionsBatchSingleSubmissionFailure } from './TransactionsBatchSingleSubmissionFailure'; + +/** + * Batch transaction submission result + * + * Tells which transactions failed + */ +export type TransactionsBatchSubmissionResult = { + /** + * Summary of the failed transactions + */ + transaction_failures: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/U128.ts b/m1/JavaScript-client/src/generated/models/U128.ts new file mode 100644 index 00000000..d391d4af --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/U128.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * A string containing a 128-bit unsigned integer. + * + * We represent u128 values as a string to ensure compatibility with languages such + * as JavaScript that do not parse u128s in JSON natively. + * + */ +export type U128 = string; diff --git a/m1/JavaScript-client/src/generated/models/U256.ts b/m1/JavaScript-client/src/generated/models/U256.ts new file mode 100644 index 00000000..092d56d8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/U256.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * A string containing a 256-bit unsigned integer. + * + * We represent u256 values as a string to ensure compatibility with languages such + * as JavaScript that do not parse u256s in JSON natively. + * + */ +export type U256 = string; diff --git a/m1/JavaScript-client/src/generated/models/U64.ts b/m1/JavaScript-client/src/generated/models/U64.ts new file mode 100644 index 00000000..f8f542c6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/U64.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +/** + * A string containing a 64-bit unsigned integer. + * + * We represent u64 values as a string to ensure compatibility with languages such + * as JavaScript that do not parse u64s in JSON natively. + * + */ +export type U64 = string; diff --git a/m1/JavaScript-client/src/generated/models/UserTransaction.ts b/m1/JavaScript-client/src/generated/models/UserTransaction.ts new file mode 100644 index 00000000..4aa4e67a --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/UserTransaction.ts @@ -0,0 +1,49 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { Event } from './Event'; +import type { HashValue } from './HashValue'; +import type { TransactionPayload } from './TransactionPayload'; +import type { TransactionSignature } from './TransactionSignature'; +import type { U64 } from './U64'; +import type { WriteSetChange } from './WriteSetChange'; + +/** + * A transaction submitted by a user to change the state of the blockchain + */ +export type UserTransaction = { + version: U64; + hash: HashValue; + state_change_hash: HashValue; + event_root_hash: HashValue; + state_checkpoint_hash?: HashValue; + gas_used: U64; + /** + * Whether the transaction was successful + */ + success: boolean; + /** + * The VM status of the transaction, can tell useful information in a failure + */ + vm_status: string; + accumulator_root_hash: HashValue; + /** + * Final state of resources changed by the transaction + */ + changes: Array; + sender: Address; + sequence_number: U64; + max_gas_amount: U64; + gas_unit_price: U64; + expiration_timestamp_secs: U64; + payload: TransactionPayload; + signature?: TransactionSignature; + /** + * Events generated by the transaction + */ + events: Array; + timestamp: U64; +}; + diff --git a/m1/JavaScript-client/src/generated/models/VersionedEvent.ts b/m1/JavaScript-client/src/generated/models/VersionedEvent.ts new file mode 100644 index 00000000..c9934c82 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/VersionedEvent.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EventGuid } from './EventGuid'; +import type { MoveType } from './MoveType'; +import type { U64 } from './U64'; + +/** + * An event from a transaction with a version + */ +export type VersionedEvent = { + version: U64; + guid: EventGuid; + sequence_number: U64; + type: MoveType; + /** + * The JSON representation of the event + */ + data: any; +}; + diff --git a/m1/JavaScript-client/src/generated/models/ViewRequest.ts b/m1/JavaScript-client/src/generated/models/ViewRequest.ts new file mode 100644 index 00000000..5fa124b8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/ViewRequest.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { EntryFunctionId } from './EntryFunctionId'; +import type { MoveType } from './MoveType'; + +/** + * View request for the Move View Function API + */ +export type ViewRequest = { + function: EntryFunctionId; + /** + * Type arguments of the function + */ + type_arguments: Array; + /** + * Arguments of the function + */ + arguments: Array; +}; + diff --git a/m1/JavaScript-client/src/generated/models/WriteModule.ts b/m1/JavaScript-client/src/generated/models/WriteModule.ts new file mode 100644 index 00000000..b032cc91 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteModule.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { MoveModuleBytecode } from './MoveModuleBytecode'; + +/** + * Write a new module or update an existing one + */ +export type WriteModule = { + address: Address; + /** + * State key hash + */ + state_key_hash: string; + data: MoveModuleBytecode; +}; + diff --git a/m1/JavaScript-client/src/generated/models/WriteResource.ts b/m1/JavaScript-client/src/generated/models/WriteResource.ts new file mode 100644 index 00000000..2bccf200 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteResource.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { Address } from './Address'; +import type { MoveResource } from './MoveResource'; + +/** + * Write a resource or update an existing one + */ +export type WriteResource = { + address: Address; + /** + * State key hash + */ + state_key_hash: string; + data: MoveResource; +}; + diff --git a/m1/JavaScript-client/src/generated/models/WriteSet.ts b/m1/JavaScript-client/src/generated/models/WriteSet.ts new file mode 100644 index 00000000..ea06d3e6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSet.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteSet_DirectWriteSet } from './WriteSet_DirectWriteSet'; +import type { WriteSet_ScriptWriteSet } from './WriteSet_ScriptWriteSet'; + +/** + * The associated writeset with a payload + */ +export type WriteSet = (WriteSet_ScriptWriteSet | WriteSet_DirectWriteSet); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange.ts new file mode 100644 index 00000000..dfcc2cb6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteSetChange_DeleteModule } from './WriteSetChange_DeleteModule'; +import type { WriteSetChange_DeleteResource } from './WriteSetChange_DeleteResource'; +import type { WriteSetChange_DeleteTableItem } from './WriteSetChange_DeleteTableItem'; +import type { WriteSetChange_WriteModule } from './WriteSetChange_WriteModule'; +import type { WriteSetChange_WriteResource } from './WriteSetChange_WriteResource'; +import type { WriteSetChange_WriteTableItem } from './WriteSetChange_WriteTableItem'; + +/** + * A final state change of a transaction on a resource or module + */ +export type WriteSetChange = (WriteSetChange_DeleteModule | WriteSetChange_DeleteResource | WriteSetChange_DeleteTableItem | WriteSetChange_WriteModule | WriteSetChange_WriteResource | WriteSetChange_WriteTableItem); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteModule.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteModule.ts new file mode 100644 index 00000000..3c425296 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteModule.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DeleteModule } from './DeleteModule'; + +export type WriteSetChange_DeleteModule = ({ + type: string; +} & DeleteModule); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteResource.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteResource.ts new file mode 100644 index 00000000..bfb9924b --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteResource.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DeleteResource } from './DeleteResource'; + +export type WriteSetChange_DeleteResource = ({ + type: string; +} & DeleteResource); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteTableItem.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteTableItem.ts new file mode 100644 index 00000000..d8234022 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_DeleteTableItem.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DeleteTableItem } from './DeleteTableItem'; + +export type WriteSetChange_DeleteTableItem = ({ + type: string; +} & DeleteTableItem); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteModule.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteModule.ts new file mode 100644 index 00000000..924d279b --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteModule.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteModule } from './WriteModule'; + +export type WriteSetChange_WriteModule = ({ + type: string; +} & WriteModule); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteResource.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteResource.ts new file mode 100644 index 00000000..e7d4f6be --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteResource.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteResource } from './WriteResource'; + +export type WriteSetChange_WriteResource = ({ + type: string; +} & WriteResource); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteTableItem.ts b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteTableItem.ts new file mode 100644 index 00000000..087c9765 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetChange_WriteTableItem.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteTableItem } from './WriteTableItem'; + +export type WriteSetChange_WriteTableItem = ({ + type: string; +} & WriteTableItem); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSetPayload.ts b/m1/JavaScript-client/src/generated/models/WriteSetPayload.ts new file mode 100644 index 00000000..c15ea62f --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSetPayload.ts @@ -0,0 +1,13 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { WriteSet } from './WriteSet'; + +/** + * A writeset payload, used only for genesis + */ +export type WriteSetPayload = { + write_set: WriteSet; +}; + diff --git a/m1/JavaScript-client/src/generated/models/WriteSet_DirectWriteSet.ts b/m1/JavaScript-client/src/generated/models/WriteSet_DirectWriteSet.ts new file mode 100644 index 00000000..552e98d4 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSet_DirectWriteSet.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DirectWriteSet } from './DirectWriteSet'; + +export type WriteSet_DirectWriteSet = ({ + type: string; +} & DirectWriteSet); + diff --git a/m1/JavaScript-client/src/generated/models/WriteSet_ScriptWriteSet.ts b/m1/JavaScript-client/src/generated/models/WriteSet_ScriptWriteSet.ts new file mode 100644 index 00000000..e7304a8a --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteSet_ScriptWriteSet.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { ScriptWriteSet } from './ScriptWriteSet'; + +export type WriteSet_ScriptWriteSet = ({ + type: string; +} & ScriptWriteSet); + diff --git a/m1/JavaScript-client/src/generated/models/WriteTableItem.ts b/m1/JavaScript-client/src/generated/models/WriteTableItem.ts new file mode 100644 index 00000000..6fbaa2b0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/models/WriteTableItem.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ + +import type { DecodedTableData } from './DecodedTableData'; +import type { HexEncodedBytes } from './HexEncodedBytes'; + +/** + * Change set to write a table item + */ +export type WriteTableItem = { + state_key_hash: string; + handle: HexEncodedBytes; + key: HexEncodedBytes; + value: HexEncodedBytes; + data?: DecodedTableData; +}; + diff --git a/m1/JavaScript-client/src/generated/schemas/$AccountData.ts b/m1/JavaScript-client/src/generated/schemas/$AccountData.ts new file mode 100644 index 00000000..04582cd5 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AccountData.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AccountData = { + description: `Account data + + A simplified version of the onchain Account resource`, + properties: { + sequence_number: { + type: 'U64', + isRequired: true, + }, + authentication_key: { + type: 'HexEncodedBytes', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$AccountSignature.ts b/m1/JavaScript-client/src/generated/schemas/$AccountSignature.ts new file mode 100644 index 00000000..2a36b5d6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AccountSignature.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AccountSignature = { + type: 'one-of', + description: `Account signature scheme + + The account signature scheme allows you to have two types of accounts: + + 1. A single Ed25519 key account, one private key + 2. A k-of-n multi-Ed25519 key account, multiple private keys, such that k-of-n must sign a transaction.`, + contains: [{ + type: 'AccountSignature_Ed25519Signature', + }, { + type: 'AccountSignature_MultiEd25519Signature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$AccountSignature_Ed25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$AccountSignature_Ed25519Signature.ts new file mode 100644 index 00000000..2a16d8b9 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AccountSignature_Ed25519Signature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AccountSignature_Ed25519Signature = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'Ed25519Signature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$AccountSignature_MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$AccountSignature_MultiEd25519Signature.ts new file mode 100644 index 00000000..73d0cfed --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AccountSignature_MultiEd25519Signature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AccountSignature_MultiEd25519Signature = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'MultiEd25519Signature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Address.ts b/m1/JavaScript-client/src/generated/schemas/$Address.ts new file mode 100644 index 00000000..0e5400c7 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Address.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Address = { + type: 'string', + description: `A hex encoded 32 byte Aptos account address. + + This is represented in a string as a 64 character hex string, sometimes + shortened by stripping leading 0s, and adding a 0x. + + For example, address 0x0000000000000000000000000000000000000000000000000000000000000001 is represented as 0x1. + `, + format: 'hex', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$AptosError.ts b/m1/JavaScript-client/src/generated/schemas/$AptosError.ts new file mode 100644 index 00000000..72390bbc --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AptosError.ts @@ -0,0 +1,23 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AptosError = { + description: `This is the generic struct we use for all API errors, it contains a string + message and an Aptos API specific error code.`, + properties: { + message: { + type: 'string', + description: `A message describing the error`, + isRequired: true, + }, + error_code: { + type: 'AptosErrorCode', + isRequired: true, + }, + vm_error_code: { + type: 'number', + description: `A code providing VM error details when submitting transactions to the VM`, + format: 'uint64', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$AptosErrorCode.ts b/m1/JavaScript-client/src/generated/schemas/$AptosErrorCode.ts new file mode 100644 index 00000000..57dc2d10 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$AptosErrorCode.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $AptosErrorCode = { + type: 'Enum', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Block.ts b/m1/JavaScript-client/src/generated/schemas/$Block.ts new file mode 100644 index 00000000..970ba610 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Block.ts @@ -0,0 +1,37 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Block = { + description: `A Block with or without transactions + + This contains the information about a transactions along with + associated transactions if requested`, + properties: { + block_height: { + type: 'U64', + isRequired: true, + }, + block_hash: { + type: 'HashValue', + isRequired: true, + }, + block_timestamp: { + type: 'U64', + isRequired: true, + }, + first_version: { + type: 'U64', + isRequired: true, + }, + last_version: { + type: 'U64', + isRequired: true, + }, + transactions: { + type: 'array', + contains: { + type: 'Transaction', + }, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$BlockMetadataTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$BlockMetadataTransaction.ts new file mode 100644 index 00000000..434bf4ea --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$BlockMetadataTransaction.ts @@ -0,0 +1,98 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $BlockMetadataTransaction = { + description: `A block metadata transaction + + This signifies the beginning of a block, and contains information + about the specific block`, + properties: { + version: { + type: 'U64', + isRequired: true, + }, + hash: { + type: 'HashValue', + isRequired: true, + }, + state_change_hash: { + type: 'HashValue', + isRequired: true, + }, + event_root_hash: { + type: 'HashValue', + isRequired: true, + }, + state_checkpoint_hash: { + type: 'HashValue', + }, + gas_used: { + type: 'U64', + isRequired: true, + }, + success: { + type: 'boolean', + description: `Whether the transaction was successful`, + isRequired: true, + }, + vm_status: { + type: 'string', + description: `The VM status of the transaction, can tell useful information in a failure`, + isRequired: true, + }, + accumulator_root_hash: { + type: 'HashValue', + isRequired: true, + }, + changes: { + type: 'array', + contains: { + type: 'WriteSetChange', + }, + isRequired: true, + }, + id: { + type: 'HashValue', + isRequired: true, + }, + epoch: { + type: 'U64', + isRequired: true, + }, + round: { + type: 'U64', + isRequired: true, + }, + events: { + type: 'array', + contains: { + type: 'Event', + }, + isRequired: true, + }, + previous_block_votes_bitvec: { + type: 'array', + contains: { + type: 'number', + format: 'uint8', + }, + isRequired: true, + }, + proposer: { + type: 'Address', + isRequired: true, + }, + failed_proposer_indices: { + type: 'array', + contains: { + type: 'number', + format: 'uint32', + }, + isRequired: true, + }, + timestamp: { + type: 'U64', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DecodedTableData.ts b/m1/JavaScript-client/src/generated/schemas/$DecodedTableData.ts new file mode 100644 index 00000000..75bd13d2 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DecodedTableData.ts @@ -0,0 +1,30 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DecodedTableData = { + description: `Decoded table data`, + properties: { + key: { + description: `Key of table in JSON`, + properties: { + }, + isRequired: true, + }, + key_type: { + type: 'string', + description: `Type of key`, + isRequired: true, + }, + value: { + description: `Value of table in JSON`, + properties: { + }, + isRequired: true, + }, + value_type: { + type: 'string', + description: `Type of value`, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DeleteModule.ts b/m1/JavaScript-client/src/generated/schemas/$DeleteModule.ts new file mode 100644 index 00000000..e9c4c7fb --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DeleteModule.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DeleteModule = { + description: `Delete a module`, + properties: { + address: { + type: 'Address', + isRequired: true, + }, + state_key_hash: { + type: 'string', + description: `State key hash`, + isRequired: true, + }, + module: { + type: 'MoveModuleId', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DeleteResource.ts b/m1/JavaScript-client/src/generated/schemas/$DeleteResource.ts new file mode 100644 index 00000000..61e39a70 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DeleteResource.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DeleteResource = { + description: `Delete a resource`, + properties: { + address: { + type: 'Address', + isRequired: true, + }, + state_key_hash: { + type: 'string', + description: `State key hash`, + isRequired: true, + }, + resource: { + type: 'MoveStructTag', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DeleteTableItem.ts b/m1/JavaScript-client/src/generated/schemas/$DeleteTableItem.ts new file mode 100644 index 00000000..1e641263 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DeleteTableItem.ts @@ -0,0 +1,23 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DeleteTableItem = { + description: `Delete a table item`, + properties: { + state_key_hash: { + type: 'string', + isRequired: true, + }, + handle: { + type: 'HexEncodedBytes', + isRequired: true, + }, + key: { + type: 'HexEncodedBytes', + isRequired: true, + }, + data: { + type: 'DeletedTableData', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DeletedTableData.ts b/m1/JavaScript-client/src/generated/schemas/$DeletedTableData.ts new file mode 100644 index 00000000..c617e3b9 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DeletedTableData.ts @@ -0,0 +1,19 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DeletedTableData = { + description: `Deleted table data`, + properties: { + key: { + description: `Deleted key`, + properties: { + }, + isRequired: true, + }, + key_type: { + type: 'string', + description: `Deleted key type`, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$DirectWriteSet.ts b/m1/JavaScript-client/src/generated/schemas/$DirectWriteSet.ts new file mode 100644 index 00000000..fc121ffe --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$DirectWriteSet.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $DirectWriteSet = { + properties: { + changes: { + type: 'array', + contains: { + type: 'WriteSetChange', + }, + isRequired: true, + }, + events: { + type: 'array', + contains: { + type: 'Event', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Ed25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$Ed25519Signature.ts new file mode 100644 index 00000000..c1a87faa --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Ed25519Signature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Ed25519Signature = { + description: `A single Ed25519 signature`, + properties: { + public_key: { + type: 'HexEncodedBytes', + isRequired: true, + }, + signature: { + type: 'HexEncodedBytes', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$EncodeSubmissionRequest.ts b/m1/JavaScript-client/src/generated/schemas/$EncodeSubmissionRequest.ts new file mode 100644 index 00000000..f27f9799 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$EncodeSubmissionRequest.ts @@ -0,0 +1,38 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $EncodeSubmissionRequest = { + description: `Request to encode a submission`, + properties: { + sender: { + type: 'Address', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + max_gas_amount: { + type: 'U64', + isRequired: true, + }, + gas_unit_price: { + type: 'U64', + isRequired: true, + }, + expiration_timestamp_secs: { + type: 'U64', + isRequired: true, + }, + payload: { + type: 'TransactionPayload', + isRequired: true, + }, + secondary_signers: { + type: 'array', + contains: { + type: 'Address', + }, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$EntryFunctionId.ts b/m1/JavaScript-client/src/generated/schemas/$EntryFunctionId.ts new file mode 100644 index 00000000..7e1f80c0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$EntryFunctionId.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $EntryFunctionId = { + type: 'string', + description: `Entry function id is string representation of a entry function defined on-chain. + + Format: \`{address}::{module name}::{function name}\` + + Both \`module name\` and \`function name\` are case-sensitive. + `, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$EntryFunctionPayload.ts b/m1/JavaScript-client/src/generated/schemas/$EntryFunctionPayload.ts new file mode 100644 index 00000000..4cdddc0a --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$EntryFunctionPayload.ts @@ -0,0 +1,27 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $EntryFunctionPayload = { + description: `Payload which runs a single entry function`, + properties: { + function: { + type: 'EntryFunctionId', + isRequired: true, + }, + type_arguments: { + type: 'array', + contains: { + type: 'MoveType', + }, + isRequired: true, + }, + arguments: { + type: 'array', + contains: { + properties: { + }, + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Event.ts b/m1/JavaScript-client/src/generated/schemas/$Event.ts new file mode 100644 index 00000000..83b84ca1 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Event.ts @@ -0,0 +1,26 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Event = { + description: `An event from a transaction`, + properties: { + guid: { + type: 'EventGuid', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + type: { + type: 'MoveType', + isRequired: true, + }, + data: { + description: `The JSON representation of the event`, + properties: { + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$EventGuid.ts b/m1/JavaScript-client/src/generated/schemas/$EventGuid.ts new file mode 100644 index 00000000..773e5b9a --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$EventGuid.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $EventGuid = { + properties: { + creation_number: { + type: 'U64', + isRequired: true, + }, + account_address: { + type: 'Address', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$GasEstimation.ts b/m1/JavaScript-client/src/generated/schemas/$GasEstimation.ts new file mode 100644 index 00000000..22dc555d --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$GasEstimation.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $GasEstimation = { + description: `Struct holding the outputs of the estimate gas API`, + properties: { + deprioritized_gas_estimate: { + type: 'number', + description: `The deprioritized estimate for the gas unit price`, + format: 'uint64', + }, + gas_estimate: { + type: 'number', + description: `The current estimate for the gas unit price`, + isRequired: true, + format: 'uint64', + }, + prioritized_gas_estimate: { + type: 'number', + description: `The prioritized estimate for the gas unit price`, + format: 'uint64', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$GenesisPayload.ts b/m1/JavaScript-client/src/generated/schemas/$GenesisPayload.ts new file mode 100644 index 00000000..bbc3ca1b --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$GenesisPayload.ts @@ -0,0 +1,10 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $GenesisPayload = { + type: 'one-of', + description: `The writeset payload of the Genesis transaction`, + contains: [{ + type: 'GenesisPayload_WriteSetPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$GenesisPayload_WriteSetPayload.ts b/m1/JavaScript-client/src/generated/schemas/$GenesisPayload_WriteSetPayload.ts new file mode 100644 index 00000000..4d81069d --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$GenesisPayload_WriteSetPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $GenesisPayload_WriteSetPayload = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'WriteSetPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$GenesisTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$GenesisTransaction.ts new file mode 100644 index 00000000..96846f1f --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$GenesisTransaction.ts @@ -0,0 +1,65 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $GenesisTransaction = { + description: `The genesis transaction + + This only occurs at the genesis transaction (version 0)`, + properties: { + version: { + type: 'U64', + isRequired: true, + }, + hash: { + type: 'HashValue', + isRequired: true, + }, + state_change_hash: { + type: 'HashValue', + isRequired: true, + }, + event_root_hash: { + type: 'HashValue', + isRequired: true, + }, + state_checkpoint_hash: { + type: 'HashValue', + }, + gas_used: { + type: 'U64', + isRequired: true, + }, + success: { + type: 'boolean', + description: `Whether the transaction was successful`, + isRequired: true, + }, + vm_status: { + type: 'string', + description: `The VM status of the transaction, can tell useful information in a failure`, + isRequired: true, + }, + accumulator_root_hash: { + type: 'HashValue', + isRequired: true, + }, + changes: { + type: 'array', + contains: { + type: 'WriteSetChange', + }, + isRequired: true, + }, + payload: { + type: 'GenesisPayload', + isRequired: true, + }, + events: { + type: 'array', + contains: { + type: 'Event', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$HashValue.ts b/m1/JavaScript-client/src/generated/schemas/$HashValue.ts new file mode 100644 index 00000000..69bc6ed1 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$HashValue.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $HashValue = { + type: 'string', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$HealthCheckSuccess.ts b/m1/JavaScript-client/src/generated/schemas/$HealthCheckSuccess.ts new file mode 100644 index 00000000..bb42237d --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$HealthCheckSuccess.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $HealthCheckSuccess = { + description: `Representation of a successful healthcheck`, + properties: { + message: { + type: 'string', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$HexEncodedBytes.ts b/m1/JavaScript-client/src/generated/schemas/$HexEncodedBytes.ts new file mode 100644 index 00000000..69282bba --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$HexEncodedBytes.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $HexEncodedBytes = { + type: 'string', + description: `All bytes (Vec) data is represented as hex-encoded string prefixed with \`0x\` and fulfilled with + two hex digits per byte. + + Unlike the \`Address\` type, HexEncodedBytes will not trim any zeros. + `, + format: 'hex', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$IdentifierWrapper.ts b/m1/JavaScript-client/src/generated/schemas/$IdentifierWrapper.ts new file mode 100644 index 00000000..b139a50f --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$IdentifierWrapper.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $IdentifierWrapper = { + type: 'string', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$IndexResponse.ts b/m1/JavaScript-client/src/generated/schemas/$IndexResponse.ts new file mode 100644 index 00000000..a9e1f9cf --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$IndexResponse.ts @@ -0,0 +1,48 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $IndexResponse = { + description: `The struct holding all data returned to the client by the + index endpoint (i.e., GET "/"). Only for responding in JSON`, + properties: { + chain_id: { + type: 'number', + description: `Chain ID of the current chain`, + isRequired: true, + format: 'uint8', + }, + epoch: { + type: 'U64', + isRequired: true, + }, + ledger_version: { + type: 'U64', + isRequired: true, + }, + oldest_ledger_version: { + type: 'U64', + isRequired: true, + }, + ledger_timestamp: { + type: 'U64', + isRequired: true, + }, + node_role: { + type: 'RoleType', + isRequired: true, + }, + oldest_block_height: { + type: 'U64', + isRequired: true, + }, + block_height: { + type: 'U64', + isRequired: true, + }, + git_hash: { + type: 'string', + description: `Git hash of the build of the API endpoint. Can be used to determine the exact + software version used by the API endpoint.`, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$ModuleBundlePayload.ts b/m1/JavaScript-client/src/generated/schemas/$ModuleBundlePayload.ts new file mode 100644 index 00000000..714ba9aa --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$ModuleBundlePayload.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $ModuleBundlePayload = { + properties: { + modules: { + type: 'array', + contains: { + type: 'MoveModuleBytecode', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveAbility.ts b/m1/JavaScript-client/src/generated/schemas/$MoveAbility.ts new file mode 100644 index 00000000..40c3cd3c --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveAbility.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveAbility = { + type: 'string', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveFunction.ts b/m1/JavaScript-client/src/generated/schemas/$MoveFunction.ts new file mode 100644 index 00000000..a813733b --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveFunction.ts @@ -0,0 +1,47 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveFunction = { + description: `Move function`, + properties: { + name: { + type: 'IdentifierWrapper', + isRequired: true, + }, + visibility: { + type: 'MoveFunctionVisibility', + isRequired: true, + }, + is_entry: { + type: 'boolean', + description: `Whether the function can be called as an entry function directly in a transaction`, + isRequired: true, + }, + is_view: { + type: 'boolean', + description: `Whether the function is a view function or not`, + isRequired: true, + }, + generic_type_params: { + type: 'array', + contains: { + type: 'MoveFunctionGenericTypeParam', + }, + isRequired: true, + }, + params: { + type: 'array', + contains: { + type: 'MoveType', + }, + isRequired: true, + }, + return: { + type: 'array', + contains: { + type: 'MoveType', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveFunctionGenericTypeParam.ts b/m1/JavaScript-client/src/generated/schemas/$MoveFunctionGenericTypeParam.ts new file mode 100644 index 00000000..9d739aac --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveFunctionGenericTypeParam.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveFunctionGenericTypeParam = { + description: `Move function generic type param`, + properties: { + constraints: { + type: 'array', + contains: { + type: 'MoveAbility', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveFunctionVisibility.ts b/m1/JavaScript-client/src/generated/schemas/$MoveFunctionVisibility.ts new file mode 100644 index 00000000..25ea8043 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveFunctionVisibility.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveFunctionVisibility = { + type: 'Enum', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveModule.ts b/m1/JavaScript-client/src/generated/schemas/$MoveModule.ts new file mode 100644 index 00000000..38a42525 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveModule.ts @@ -0,0 +1,37 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveModule = { + description: `A Move module`, + properties: { + address: { + type: 'Address', + isRequired: true, + }, + name: { + type: 'IdentifierWrapper', + isRequired: true, + }, + friends: { + type: 'array', + contains: { + type: 'MoveModuleId', + }, + isRequired: true, + }, + exposed_functions: { + type: 'array', + contains: { + type: 'MoveFunction', + }, + isRequired: true, + }, + structs: { + type: 'array', + contains: { + type: 'MoveStruct', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveModuleBytecode.ts b/m1/JavaScript-client/src/generated/schemas/$MoveModuleBytecode.ts new file mode 100644 index 00000000..801d994e --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveModuleBytecode.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveModuleBytecode = { + description: `Move module bytecode along with it's ABI`, + properties: { + bytecode: { + type: 'HexEncodedBytes', + isRequired: true, + }, + abi: { + type: 'MoveModule', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveModuleId.ts b/m1/JavaScript-client/src/generated/schemas/$MoveModuleId.ts new file mode 100644 index 00000000..338679c7 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveModuleId.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveModuleId = { + type: 'string', + description: `Move module id is a string representation of Move module. + + Format: \`{address}::{module name}\` + + \`address\` should be hex-encoded 32 byte account address that is prefixed with \`0x\`. + + Module name is case-sensitive. + `, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveResource.ts b/m1/JavaScript-client/src/generated/schemas/$MoveResource.ts new file mode 100644 index 00000000..e1abb5a0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveResource.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveResource = { + description: `A parsed Move resource`, + properties: { + type: { + type: 'MoveStructTag', + isRequired: true, + }, + data: { + type: 'MoveStructValue', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveScriptBytecode.ts b/m1/JavaScript-client/src/generated/schemas/$MoveScriptBytecode.ts new file mode 100644 index 00000000..3951245a --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveScriptBytecode.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveScriptBytecode = { + description: `Move script bytecode`, + properties: { + bytecode: { + type: 'HexEncodedBytes', + isRequired: true, + }, + abi: { + type: 'MoveFunction', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveStruct.ts b/m1/JavaScript-client/src/generated/schemas/$MoveStruct.ts new file mode 100644 index 00000000..8d79ad36 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveStruct.ts @@ -0,0 +1,38 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveStruct = { + description: `A move struct`, + properties: { + name: { + type: 'IdentifierWrapper', + isRequired: true, + }, + is_native: { + type: 'boolean', + description: `Whether the struct is a native struct of Move`, + isRequired: true, + }, + abilities: { + type: 'array', + contains: { + type: 'MoveAbility', + }, + isRequired: true, + }, + generic_type_params: { + type: 'array', + contains: { + type: 'MoveStructGenericTypeParam', + }, + isRequired: true, + }, + fields: { + type: 'array', + contains: { + type: 'MoveStructField', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveStructField.ts b/m1/JavaScript-client/src/generated/schemas/$MoveStructField.ts new file mode 100644 index 00000000..7ab94104 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveStructField.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveStructField = { + description: `Move struct field`, + properties: { + name: { + type: 'IdentifierWrapper', + isRequired: true, + }, + type: { + type: 'MoveType', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveStructGenericTypeParam.ts b/m1/JavaScript-client/src/generated/schemas/$MoveStructGenericTypeParam.ts new file mode 100644 index 00000000..b4a60c99 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveStructGenericTypeParam.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveStructGenericTypeParam = { + description: `Move generic type param`, + properties: { + constraints: { + type: 'array', + contains: { + type: 'MoveAbility', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveStructTag.ts b/m1/JavaScript-client/src/generated/schemas/$MoveStructTag.ts new file mode 100644 index 00000000..0bd557c6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveStructTag.ts @@ -0,0 +1,24 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveStructTag = { + type: 'string', + description: `String representation of a MoveStructTag (on-chain Move struct type). This exists so you + can specify MoveStructTags as path / query parameters, e.g. for get_events_by_event_handle. + + It is a combination of: + 1. \`move_module_address\`, \`module_name\` and \`struct_name\`, all joined by \`::\` + 2. \`struct generic type parameters\` joined by \`, \` + + Examples: + * \`0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>\` + * \`0x1::account::Account\` + + Note: + 1. Empty chars should be ignored when comparing 2 struct tag ids. + 2. When used in an URL path, should be encoded by url-encoding (AKA percent-encoding). + + See [doc](https://aptos.dev/concepts/accounts) for more details. + `, + pattern: '^0x[0-9a-zA-Z:_<>]+$', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveStructValue.ts b/m1/JavaScript-client/src/generated/schemas/$MoveStructValue.ts new file mode 100644 index 00000000..bff113d6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveStructValue.ts @@ -0,0 +1,47 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveStructValue = { + description: `This is a JSON representation of some data within an account resource. More specifically, + it is a map of strings to arbitrary JSON values / objects, where the keys are top level + fields within the given resource. + + To clarify, you might query for 0x1::account::Account and see the example data. + + Move \`bool\` type value is serialized into \`boolean\`. + + Move \`u8\`, \`u16\` and \`u32\` type value is serialized into \`integer\`. + + Move \`u64\`, \`u128\` and \`u256\` type value is serialized into \`string\`. + + Move \`address\` type value (32 byte Aptos account address) is serialized into a HexEncodedBytes string. + For example: + - \`0x1\` + - \`0x1668f6be25668c1a17cd8caf6b8d2f25\` + + Move \`vector\` type value is serialized into \`array\`, except \`vector\` which is serialized into a + HexEncodedBytes string with \`0x\` prefix. + For example: + - \`vector{255, 255}\` => \`["255", "255"]\` + - \`vector{255, 255}\` => \`0xffff\` + + Move \`struct\` type value is serialized into \`object\` that looks like this (except some Move stdlib types, see the following section): + \`\`\`json + { + field1_name: field1_value, + field2_name: field2_value, + ...... + } + \`\`\` + + For example: + \`{ "created": "0xa550c18", "role_id": "0" }\` + + **Special serialization for Move stdlib types**: + - [0x1::string::String](https://github.com/aptos-labs/aptos-core/blob/main/language/move-stdlib/docs/ascii.md) + is serialized into \`string\`. For example, struct value \`0x1::string::String{bytes: b"Hello World!"}\` + is serialized as \`"Hello World!"\` in JSON. + `, + properties: { + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveType.ts b/m1/JavaScript-client/src/generated/schemas/$MoveType.ts new file mode 100644 index 00000000..56f4279b --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveType.ts @@ -0,0 +1,34 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveType = { + type: 'string', + description: `String representation of an on-chain Move type tag that is exposed in transaction payload. + Values: + - bool + - u8 + - u16 + - u32 + - u64 + - u128 + - u256 + - address + - signer + - vector: \`vector<{non-reference MoveTypeId}>\` + - struct: \`{address}::{module_name}::{struct_name}::<{generic types}>\` + + Vector type value examples: + - \`vector\` + - \`vector>\` + - \`vector<0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>>\` + + Struct type value examples: + - \`0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin> + - \`0x1::account::Account\` + + Note: + 1. Empty chars should be ignored when comparing 2 struct tag ids. + 2. When used in an URL path, should be encoded by url-encoding (AKA percent-encoding). + `, + pattern: '^(bool|u8|u64|u128|address|signer|vector<.+>|0x[0-9a-zA-Z:_<, >]+)$', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MoveValue.ts b/m1/JavaScript-client/src/generated/schemas/$MoveValue.ts new file mode 100644 index 00000000..c4ac27bf --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MoveValue.ts @@ -0,0 +1,38 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MoveValue = { + type: 'any-of', + description: `An enum of the possible Move value types`, + contains: [{ + type: 'number', + format: 'uint8', + }, { + type: 'number', + format: 'uint16', + }, { + type: 'number', + format: 'uint32', + }, { + type: 'U64', + }, { + type: 'U128', + }, { + type: 'U256', + }, { + type: 'boolean', + }, { + type: 'Address', + }, { + type: 'array', + contains: { + type: 'MoveValue', + }, + }, { + type: 'HexEncodedBytes', + }, { + type: 'MoveStructValue', + }, { + type: 'string', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MultiAgentSignature.ts b/m1/JavaScript-client/src/generated/schemas/$MultiAgentSignature.ts new file mode 100644 index 00000000..3c1cff18 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MultiAgentSignature.ts @@ -0,0 +1,28 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MultiAgentSignature = { + description: `Multi agent signature for multi agent transactions + + This allows you to have transactions across multiple accounts`, + properties: { + sender: { + type: 'AccountSignature', + isRequired: true, + }, + secondary_signer_addresses: { + type: 'array', + contains: { + type: 'Address', + }, + isRequired: true, + }, + secondary_signers: { + type: 'array', + contains: { + type: 'AccountSignature', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$MultiEd25519Signature.ts new file mode 100644 index 00000000..1af586f6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MultiEd25519Signature.ts @@ -0,0 +1,34 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MultiEd25519Signature = { + description: `A Ed25519 multi-sig signature + + This allows k-of-n signing for a transaction`, + properties: { + public_keys: { + type: 'array', + contains: { + type: 'HexEncodedBytes', + }, + isRequired: true, + }, + signatures: { + type: 'array', + contains: { + type: 'HexEncodedBytes', + }, + isRequired: true, + }, + threshold: { + type: 'number', + description: `The number of signatures required for a successful transaction`, + isRequired: true, + format: 'uint8', + }, + bitmap: { + type: 'HexEncodedBytes', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MultisigPayload.ts b/m1/JavaScript-client/src/generated/schemas/$MultisigPayload.ts new file mode 100644 index 00000000..3db2ec25 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MultisigPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MultisigPayload = { + description: `A multisig transaction that allows an owner of a multisig account to execute a pre-approved + transaction as the multisig account.`, + properties: { + multisig_address: { + type: 'Address', + isRequired: true, + }, + transaction_payload: { + type: 'MultisigTransactionPayload', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$MultisigTransactionPayload.ts b/m1/JavaScript-client/src/generated/schemas/$MultisigTransactionPayload.ts new file mode 100644 index 00000000..d1f7dec5 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$MultisigTransactionPayload.ts @@ -0,0 +1,9 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $MultisigTransactionPayload = { + type: 'any-of', + contains: [{ + type: 'EntryFunctionPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$PendingTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$PendingTransaction.ts new file mode 100644 index 00000000..cffa0285 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$PendingTransaction.ts @@ -0,0 +1,39 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $PendingTransaction = { + description: `A transaction waiting in mempool`, + properties: { + hash: { + type: 'HashValue', + isRequired: true, + }, + sender: { + type: 'Address', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + max_gas_amount: { + type: 'U64', + isRequired: true, + }, + gas_unit_price: { + type: 'U64', + isRequired: true, + }, + expiration_timestamp_secs: { + type: 'U64', + isRequired: true, + }, + payload: { + type: 'TransactionPayload', + isRequired: true, + }, + signature: { + type: 'TransactionSignature', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$RawTableItemRequest.ts b/m1/JavaScript-client/src/generated/schemas/$RawTableItemRequest.ts new file mode 100644 index 00000000..b8fc9f02 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$RawTableItemRequest.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $RawTableItemRequest = { + description: `Table Item request for the GetTableItemRaw API`, + properties: { + key: { + type: 'HexEncodedBytes', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$RoleType.ts b/m1/JavaScript-client/src/generated/schemas/$RoleType.ts new file mode 100644 index 00000000..2f64bda0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$RoleType.ts @@ -0,0 +1,6 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $RoleType = { + type: 'Enum', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$ScriptPayload.ts b/m1/JavaScript-client/src/generated/schemas/$ScriptPayload.ts new file mode 100644 index 00000000..cf52931c --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$ScriptPayload.ts @@ -0,0 +1,27 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $ScriptPayload = { + description: `Payload which runs a script that can run multiple functions`, + properties: { + code: { + type: 'MoveScriptBytecode', + isRequired: true, + }, + type_arguments: { + type: 'array', + contains: { + type: 'MoveType', + }, + isRequired: true, + }, + arguments: { + type: 'array', + contains: { + properties: { + }, + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$ScriptWriteSet.ts b/m1/JavaScript-client/src/generated/schemas/$ScriptWriteSet.ts new file mode 100644 index 00000000..81effeee --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$ScriptWriteSet.ts @@ -0,0 +1,15 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $ScriptWriteSet = { + properties: { + execute_as: { + type: 'Address', + isRequired: true, + }, + script: { + type: 'ScriptPayload', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$StateCheckpointTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$StateCheckpointTransaction.ts new file mode 100644 index 00000000..608c8980 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$StateCheckpointTransaction.ts @@ -0,0 +1,56 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $StateCheckpointTransaction = { + description: `A state checkpoint transaction`, + properties: { + version: { + type: 'U64', + isRequired: true, + }, + hash: { + type: 'HashValue', + isRequired: true, + }, + state_change_hash: { + type: 'HashValue', + isRequired: true, + }, + event_root_hash: { + type: 'HashValue', + isRequired: true, + }, + state_checkpoint_hash: { + type: 'HashValue', + }, + gas_used: { + type: 'U64', + isRequired: true, + }, + success: { + type: 'boolean', + description: `Whether the transaction was successful`, + isRequired: true, + }, + vm_status: { + type: 'string', + description: `The VM status of the transaction, can tell useful information in a failure`, + isRequired: true, + }, + accumulator_root_hash: { + type: 'HashValue', + isRequired: true, + }, + changes: { + type: 'array', + contains: { + type: 'WriteSetChange', + }, + isRequired: true, + }, + timestamp: { + type: 'U64', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$StateKeyWrapper.ts b/m1/JavaScript-client/src/generated/schemas/$StateKeyWrapper.ts new file mode 100644 index 00000000..e4b0eb8e --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$StateKeyWrapper.ts @@ -0,0 +1,8 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $StateKeyWrapper = { + type: 'string', + description: `Representation of a StateKey as a hex string. This is used for cursor based pagination. + `, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$SubmitTransactionRequest.ts b/m1/JavaScript-client/src/generated/schemas/$SubmitTransactionRequest.ts new file mode 100644 index 00000000..4740d4a7 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$SubmitTransactionRequest.ts @@ -0,0 +1,38 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $SubmitTransactionRequest = { + description: `A request to submit a transaction + + This requires a transaction and a signature of it`, + properties: { + sender: { + type: 'Address', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + max_gas_amount: { + type: 'U64', + isRequired: true, + }, + gas_unit_price: { + type: 'U64', + isRequired: true, + }, + expiration_timestamp_secs: { + type: 'U64', + isRequired: true, + }, + payload: { + type: 'TransactionPayload', + isRequired: true, + }, + signature: { + type: 'TransactionSignature', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TableItemRequest.ts b/m1/JavaScript-client/src/generated/schemas/$TableItemRequest.ts new file mode 100644 index 00000000..6cd81ea5 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TableItemRequest.ts @@ -0,0 +1,22 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TableItemRequest = { + description: `Table Item request for the GetTableItem API`, + properties: { + key_type: { + type: 'MoveType', + isRequired: true, + }, + value_type: { + type: 'MoveType', + isRequired: true, + }, + key: { + description: `The value of the table item's key`, + properties: { + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction.ts new file mode 100644 index 00000000..0aa1e257 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction = { + type: 'one-of', + description: `Enum of the different types of transactions in Aptos`, + contains: [{ + type: 'Transaction_PendingTransaction', + }, { + type: 'Transaction_UserTransaction', + }, { + type: 'Transaction_GenesisTransaction', + }, { + type: 'Transaction_BlockMetadataTransaction', + }, { + type: 'Transaction_StateCheckpointTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionPayload.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload.ts new file mode 100644 index 00000000..9f8d72da --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionPayload = { + type: 'one-of', + description: `An enum of the possible transaction payloads`, + contains: [{ + type: 'TransactionPayload_EntryFunctionPayload', + }, { + type: 'TransactionPayload_ScriptPayload', + }, { + type: 'TransactionPayload_ModuleBundlePayload', + }, { + type: 'TransactionPayload_MultisigPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_EntryFunctionPayload.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_EntryFunctionPayload.ts new file mode 100644 index 00000000..e7ea8d6c --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_EntryFunctionPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionPayload_EntryFunctionPayload = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'EntryFunctionPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ModuleBundlePayload.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ModuleBundlePayload.ts new file mode 100644 index 00000000..4ee6f0ad --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ModuleBundlePayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionPayload_ModuleBundlePayload = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'ModuleBundlePayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_MultisigPayload.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_MultisigPayload.ts new file mode 100644 index 00000000..18ee4df6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_MultisigPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionPayload_MultisigPayload = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'MultisigPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ScriptPayload.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ScriptPayload.ts new file mode 100644 index 00000000..d15b4c86 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionPayload_ScriptPayload.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionPayload_ScriptPayload = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'ScriptPayload', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionSignature.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature.ts new file mode 100644 index 00000000..08df557f --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature.ts @@ -0,0 +1,14 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionSignature = { + type: 'one-of', + description: `An enum representing the different transaction signatures available`, + contains: [{ + type: 'TransactionSignature_Ed25519Signature', + }, { + type: 'TransactionSignature_MultiEd25519Signature', + }, { + type: 'TransactionSignature_MultiAgentSignature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_Ed25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_Ed25519Signature.ts new file mode 100644 index 00000000..2f754f0c --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_Ed25519Signature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionSignature_Ed25519Signature = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'Ed25519Signature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiAgentSignature.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiAgentSignature.ts new file mode 100644 index 00000000..c6c6ce5f --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiAgentSignature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionSignature_MultiAgentSignature = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'MultiAgentSignature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiEd25519Signature.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiEd25519Signature.ts new file mode 100644 index 00000000..e6ffaf20 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionSignature_MultiEd25519Signature.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionSignature_MultiEd25519Signature = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'MultiEd25519Signature', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction_BlockMetadataTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction_BlockMetadataTransaction.ts new file mode 100644 index 00000000..55aa7c46 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction_BlockMetadataTransaction.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction_BlockMetadataTransaction = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'BlockMetadataTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction_GenesisTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction_GenesisTransaction.ts new file mode 100644 index 00000000..adb40078 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction_GenesisTransaction.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction_GenesisTransaction = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'GenesisTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction_PendingTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction_PendingTransaction.ts new file mode 100644 index 00000000..8663a50b --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction_PendingTransaction.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction_PendingTransaction = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'PendingTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction_StateCheckpointTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction_StateCheckpointTransaction.ts new file mode 100644 index 00000000..19435016 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction_StateCheckpointTransaction.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction_StateCheckpointTransaction = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'StateCheckpointTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$Transaction_UserTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$Transaction_UserTransaction.ts new file mode 100644 index 00000000..b0bbf989 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$Transaction_UserTransaction.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $Transaction_UserTransaction = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'UserTransaction', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSingleSubmissionFailure.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSingleSubmissionFailure.ts new file mode 100644 index 00000000..2f6465cc --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSingleSubmissionFailure.ts @@ -0,0 +1,18 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionsBatchSingleSubmissionFailure = { + description: `Information telling which batch submission transactions failed`, + properties: { + error: { + type: 'AptosError', + isRequired: true, + }, + transaction_index: { + type: 'number', + description: `The index of which transaction failed, same as submission order`, + isRequired: true, + format: 'uint64', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSubmissionResult.ts b/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSubmissionResult.ts new file mode 100644 index 00000000..3e04e03c --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$TransactionsBatchSubmissionResult.ts @@ -0,0 +1,17 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $TransactionsBatchSubmissionResult = { + description: `Batch transaction submission result + + Tells which transactions failed`, + properties: { + transaction_failures: { + type: 'array', + contains: { + type: 'TransactionsBatchSingleSubmissionFailure', + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$U128.ts b/m1/JavaScript-client/src/generated/schemas/$U128.ts new file mode 100644 index 00000000..64524854 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$U128.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $U128 = { + type: 'string', + description: `A string containing a 128-bit unsigned integer. + + We represent u128 values as a string to ensure compatibility with languages such + as JavaScript that do not parse u128s in JSON natively. + `, + format: 'uint128', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$U256.ts b/m1/JavaScript-client/src/generated/schemas/$U256.ts new file mode 100644 index 00000000..20e7c0e1 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$U256.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $U256 = { + type: 'string', + description: `A string containing a 256-bit unsigned integer. + + We represent u256 values as a string to ensure compatibility with languages such + as JavaScript that do not parse u256s in JSON natively. + `, + format: 'uint256', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$U64.ts b/m1/JavaScript-client/src/generated/schemas/$U64.ts new file mode 100644 index 00000000..c98a054b --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$U64.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $U64 = { + type: 'string', + description: `A string containing a 64-bit unsigned integer. + + We represent u64 values as a string to ensure compatibility with languages such + as JavaScript that do not parse u64s in JSON natively. + `, + format: 'uint64', +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$UserTransaction.ts b/m1/JavaScript-client/src/generated/schemas/$UserTransaction.ts new file mode 100644 index 00000000..2ef0ed7e --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$UserTransaction.ts @@ -0,0 +1,90 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $UserTransaction = { + description: `A transaction submitted by a user to change the state of the blockchain`, + properties: { + version: { + type: 'U64', + isRequired: true, + }, + hash: { + type: 'HashValue', + isRequired: true, + }, + state_change_hash: { + type: 'HashValue', + isRequired: true, + }, + event_root_hash: { + type: 'HashValue', + isRequired: true, + }, + state_checkpoint_hash: { + type: 'HashValue', + }, + gas_used: { + type: 'U64', + isRequired: true, + }, + success: { + type: 'boolean', + description: `Whether the transaction was successful`, + isRequired: true, + }, + vm_status: { + type: 'string', + description: `The VM status of the transaction, can tell useful information in a failure`, + isRequired: true, + }, + accumulator_root_hash: { + type: 'HashValue', + isRequired: true, + }, + changes: { + type: 'array', + contains: { + type: 'WriteSetChange', + }, + isRequired: true, + }, + sender: { + type: 'Address', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + max_gas_amount: { + type: 'U64', + isRequired: true, + }, + gas_unit_price: { + type: 'U64', + isRequired: true, + }, + expiration_timestamp_secs: { + type: 'U64', + isRequired: true, + }, + payload: { + type: 'TransactionPayload', + isRequired: true, + }, + signature: { + type: 'TransactionSignature', + }, + events: { + type: 'array', + contains: { + type: 'Event', + }, + isRequired: true, + }, + timestamp: { + type: 'U64', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$VersionedEvent.ts b/m1/JavaScript-client/src/generated/schemas/$VersionedEvent.ts new file mode 100644 index 00000000..7e935382 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$VersionedEvent.ts @@ -0,0 +1,30 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $VersionedEvent = { + description: `An event from a transaction with a version`, + properties: { + version: { + type: 'U64', + isRequired: true, + }, + guid: { + type: 'EventGuid', + isRequired: true, + }, + sequence_number: { + type: 'U64', + isRequired: true, + }, + type: { + type: 'MoveType', + isRequired: true, + }, + data: { + description: `The JSON representation of the event`, + properties: { + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$ViewRequest.ts b/m1/JavaScript-client/src/generated/schemas/$ViewRequest.ts new file mode 100644 index 00000000..28994b8e --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$ViewRequest.ts @@ -0,0 +1,27 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $ViewRequest = { + description: `View request for the Move View Function API`, + properties: { + function: { + type: 'EntryFunctionId', + isRequired: true, + }, + type_arguments: { + type: 'array', + contains: { + type: 'MoveType', + }, + isRequired: true, + }, + arguments: { + type: 'array', + contains: { + properties: { + }, + }, + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteModule.ts b/m1/JavaScript-client/src/generated/schemas/$WriteModule.ts new file mode 100644 index 00000000..cd41e48e --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteModule.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteModule = { + description: `Write a new module or update an existing one`, + properties: { + address: { + type: 'Address', + isRequired: true, + }, + state_key_hash: { + type: 'string', + description: `State key hash`, + isRequired: true, + }, + data: { + type: 'MoveModuleBytecode', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteResource.ts b/m1/JavaScript-client/src/generated/schemas/$WriteResource.ts new file mode 100644 index 00000000..aff2055a --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteResource.ts @@ -0,0 +1,21 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteResource = { + description: `Write a resource or update an existing one`, + properties: { + address: { + type: 'Address', + isRequired: true, + }, + state_key_hash: { + type: 'string', + description: `State key hash`, + isRequired: true, + }, + data: { + type: 'MoveResource', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSet.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSet.ts new file mode 100644 index 00000000..a6a86944 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSet.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSet = { + type: 'one-of', + description: `The associated writeset with a payload`, + contains: [{ + type: 'WriteSet_ScriptWriteSet', + }, { + type: 'WriteSet_DirectWriteSet', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange.ts new file mode 100644 index 00000000..c6c2edb0 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange.ts @@ -0,0 +1,20 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange = { + type: 'one-of', + description: `A final state change of a transaction on a resource or module`, + contains: [{ + type: 'WriteSetChange_DeleteModule', + }, { + type: 'WriteSetChange_DeleteResource', + }, { + type: 'WriteSetChange_DeleteTableItem', + }, { + type: 'WriteSetChange_WriteModule', + }, { + type: 'WriteSetChange_WriteResource', + }, { + type: 'WriteSetChange_WriteTableItem', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteModule.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteModule.ts new file mode 100644 index 00000000..2ec71328 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteModule.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_DeleteModule = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'DeleteModule', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteResource.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteResource.ts new file mode 100644 index 00000000..8346da07 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteResource.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_DeleteResource = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'DeleteResource', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteTableItem.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteTableItem.ts new file mode 100644 index 00000000..ae905f57 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_DeleteTableItem.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_DeleteTableItem = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'DeleteTableItem', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteModule.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteModule.ts new file mode 100644 index 00000000..d0ec80fb --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteModule.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_WriteModule = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'WriteModule', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteResource.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteResource.ts new file mode 100644 index 00000000..9febd5b8 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteResource.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_WriteResource = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'WriteResource', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteTableItem.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteTableItem.ts new file mode 100644 index 00000000..eb8d77de --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetChange_WriteTableItem.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetChange_WriteTableItem = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'WriteTableItem', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSetPayload.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSetPayload.ts new file mode 100644 index 00000000..abe858d6 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSetPayload.ts @@ -0,0 +1,12 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSetPayload = { + description: `A writeset payload, used only for genesis`, + properties: { + write_set: { + type: 'WriteSet', + isRequired: true, + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSet_DirectWriteSet.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSet_DirectWriteSet.ts new file mode 100644 index 00000000..d9d89358 --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSet_DirectWriteSet.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSet_DirectWriteSet = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'DirectWriteSet', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteSet_ScriptWriteSet.ts b/m1/JavaScript-client/src/generated/schemas/$WriteSet_ScriptWriteSet.ts new file mode 100644 index 00000000..b3a3ebee --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteSet_ScriptWriteSet.ts @@ -0,0 +1,16 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteSet_ScriptWriteSet = { + type: 'all-of', + contains: [{ + properties: { + type: { + type: 'string', + isRequired: true, + }, + }, + }, { + type: 'ScriptWriteSet', + }], +} as const; diff --git a/m1/JavaScript-client/src/generated/schemas/$WriteTableItem.ts b/m1/JavaScript-client/src/generated/schemas/$WriteTableItem.ts new file mode 100644 index 00000000..1d1273fd --- /dev/null +++ b/m1/JavaScript-client/src/generated/schemas/$WriteTableItem.ts @@ -0,0 +1,27 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +export const $WriteTableItem = { + description: `Change set to write a table item`, + properties: { + state_key_hash: { + type: 'string', + isRequired: true, + }, + handle: { + type: 'HexEncodedBytes', + isRequired: true, + }, + key: { + type: 'HexEncodedBytes', + isRequired: true, + }, + value: { + type: 'HexEncodedBytes', + isRequired: true, + }, + data: { + type: 'DecodedTableData', + }, + }, +} as const; diff --git a/m1/JavaScript-client/src/generated/services/AccountsService.ts b/m1/JavaScript-client/src/generated/services/AccountsService.ts new file mode 100644 index 00000000..08f87cba --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/AccountsService.ts @@ -0,0 +1,200 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { AccountData } from '../models/AccountData'; +import type { Address } from '../models/Address'; +import type { IdentifierWrapper } from '../models/IdentifierWrapper'; +import type { MoveModuleBytecode } from '../models/MoveModuleBytecode'; +import type { MoveResource } from '../models/MoveResource'; +import type { MoveStructTag } from '../models/MoveStructTag'; +import type { StateKeyWrapper } from '../models/StateKeyWrapper'; +import type { U64 } from '../models/U64'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class AccountsService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Get account + * Return the authentication key and the sequence number for an account + * address. Optionally, a ledger version can be specified. If the ledger + * version is not specified in the request, the latest ledger version is used. + * @param address Address of account with or without a `0x` prefix + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns AccountData + * @throws ApiError + */ + public getAccount( + address: Address, + ledgerVersion?: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}', + path: { + 'address': address, + }, + query: { + 'ledger_version': ledgerVersion, + }, + }); + } + + /** + * Get account resources + * Retrieves all account resources for a given account and a specific ledger version. If the + * ledger version is not specified in the request, the latest ledger version is used. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param address Address of account with or without a `0x` prefix + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @param start Cursor specifying where to start for pagination + * + * This cursor cannot be derived manually client-side. Instead, you must + * call this endpoint once without this query parameter specified, and + * then use the cursor returned in the X-Aptos-Cursor header in the + * response. + * @param limit Max number of account resources to retrieve + * + * If not provided, defaults to default page size. + * @returns MoveResource + * @throws ApiError + */ + public getAccountResources( + address: Address, + ledgerVersion?: U64, + start?: StateKeyWrapper, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/resources', + path: { + 'address': address, + }, + query: { + 'ledger_version': ledgerVersion, + 'start': start, + 'limit': limit, + }, + }); + } + + /** + * Get account modules + * Retrieves all account modules' bytecode for a given account at a specific ledger version. + * If the ledger version is not specified in the request, the latest ledger version is used. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param address Address of account with or without a `0x` prefix + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @param start Cursor specifying where to start for pagination + * + * This cursor cannot be derived manually client-side. Instead, you must + * call this endpoint once without this query parameter specified, and + * then use the cursor returned in the X-Aptos-Cursor header in the + * response. + * @param limit Max number of account modules to retrieve + * + * If not provided, defaults to default page size. + * @returns MoveModuleBytecode + * @throws ApiError + */ + public getAccountModules( + address: Address, + ledgerVersion?: U64, + start?: StateKeyWrapper, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/modules', + path: { + 'address': address, + }, + query: { + 'ledger_version': ledgerVersion, + 'start': start, + 'limit': limit, + }, + }); + } + + /** + * Get account resource + * Retrieves an individual resource from a given account and at a specific ledger version. If the + * ledger version is not specified in the request, the latest ledger version is used. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param address Address of account with or without a `0x` prefix + * @param resourceType Name of struct to retrieve e.g. `0x1::account::Account` + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns MoveResource + * @throws ApiError + */ + public getAccountResource( + address: Address, + resourceType: MoveStructTag, + ledgerVersion?: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/resource/{resource_type}', + path: { + 'address': address, + 'resource_type': resourceType, + }, + query: { + 'ledger_version': ledgerVersion, + }, + }); + } + + /** + * Get account module + * Retrieves an individual module from a given account and at a specific ledger version. If the + * ledger version is not specified in the request, the latest ledger version is used. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param address Address of account with or without a `0x` prefix + * @param moduleName Name of module to retrieve e.g. `coin` + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns MoveModuleBytecode + * @throws ApiError + */ + public getAccountModule( + address: Address, + moduleName: IdentifierWrapper, + ledgerVersion?: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/module/{module_name}', + path: { + 'address': address, + 'module_name': moduleName, + }, + query: { + 'ledger_version': ledgerVersion, + }, + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/BlocksService.ts b/m1/JavaScript-client/src/generated/services/BlocksService.ts new file mode 100644 index 00000000..b9dbe17b --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/BlocksService.ts @@ -0,0 +1,79 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Block } from '../models/Block'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class BlocksService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Get blocks by height + * This endpoint allows you to get the transactions in a block + * and the corresponding block information. + * + * Transactions are limited by max default transactions size. If not all transactions + * are present, the user will need to query for the rest of the transactions via the + * get transactions API. + * + * If the block is pruned, it will return a 410 + * @param blockHeight Block height to lookup. Starts at 0 + * @param withTransactions If set to true, include all transactions in the block + * + * If not provided, no transactions will be retrieved + * @returns Block + * @throws ApiError + */ + public getBlockByHeight( + blockHeight: number, + withTransactions?: boolean, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/blocks/by_height/{block_height}', + path: { + 'block_height': blockHeight, + }, + query: { + 'with_transactions': withTransactions, + }, + }); + } + + /** + * Get blocks by version + * This endpoint allows you to get the transactions in a block + * and the corresponding block information given a version in the block. + * + * Transactions are limited by max default transactions size. If not all transactions + * are present, the user will need to query for the rest of the transactions via the + * get transactions API. + * + * If the block has been pruned, it will return a 410 + * @param version Ledger version to lookup block information for. + * @param withTransactions If set to true, include all transactions in the block + * + * If not provided, no transactions will be retrieved + * @returns Block + * @throws ApiError + */ + public getBlockByVersion( + version: number, + withTransactions?: boolean, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/blocks/by_version/{version}', + path: { + 'version': version, + }, + query: { + 'with_transactions': withTransactions, + }, + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/EventsService.ts b/m1/JavaScript-client/src/generated/services/EventsService.ts new file mode 100644 index 00000000..9396944b --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/EventsService.ts @@ -0,0 +1,100 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Address } from '../models/Address'; +import type { IdentifierWrapper } from '../models/IdentifierWrapper'; +import type { MoveStructTag } from '../models/MoveStructTag'; +import type { U64 } from '../models/U64'; +import type { VersionedEvent } from '../models/VersionedEvent'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class EventsService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Get events by creation number + * Event types are globally identifiable by an account `address` and + * monotonically increasing `creation_number`, one per event type emitted + * to the given account. This API returns events corresponding to that + * that event type. + * @param address Hex-encoded 32 byte Aptos account, with or without a `0x` prefix, for + * which events are queried. This refers to the account that events were + * emitted to, not the account hosting the move module that emits that + * event type. + * @param creationNumber Creation number corresponding to the event stream originating + * from the given account. + * @param start Starting sequence number of events. + * + * If unspecified, by default will retrieve the most recent events + * @param limit Max number of events to retrieve. + * + * If unspecified, defaults to default page size + * @returns VersionedEvent + * @throws ApiError + */ + public getEventsByCreationNumber( + address: Address, + creationNumber: U64, + start?: U64, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/events/{creation_number}', + path: { + 'address': address, + 'creation_number': creationNumber, + }, + query: { + 'start': start, + 'limit': limit, + }, + }); + } + + /** + * Get events by event handle + * This API uses the given account `address`, `eventHandle`, and `fieldName` + * to build a key that can globally identify an event types. It then uses this + * key to return events emitted to the given account matching that event type. + * @param address Hex-encoded 32 byte Aptos account, with or without a `0x` prefix, for + * which events are queried. This refers to the account that events were + * emitted to, not the account hosting the move module that emits that + * event type. + * @param eventHandle Name of struct to lookup event handle e.g. `0x1::account::Account` + * @param fieldName Name of field to lookup event handle e.g. `withdraw_events` + * @param start Starting sequence number of events. + * + * If unspecified, by default will retrieve the most recent + * @param limit Max number of events to retrieve. + * + * If unspecified, defaults to default page size + * @returns VersionedEvent + * @throws ApiError + */ + public getEventsByEventHandle( + address: Address, + eventHandle: MoveStructTag, + fieldName: IdentifierWrapper, + start?: U64, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/events/{event_handle}/{field_name}', + path: { + 'address': address, + 'event_handle': eventHandle, + 'field_name': fieldName, + }, + query: { + 'start': start, + 'limit': limit, + }, + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/GeneralService.ts b/m1/JavaScript-client/src/generated/services/GeneralService.ts new file mode 100644 index 00000000..3b06ef49 --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/GeneralService.ts @@ -0,0 +1,69 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { HealthCheckSuccess } from '../models/HealthCheckSuccess'; +import type { IndexResponse } from '../models/IndexResponse'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class GeneralService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Show OpenAPI explorer + * Provides a UI that you can use to explore the API. You can also + * retrieve the API directly at `/spec.yaml` and `/spec.json`. + * @returns string + * @throws ApiError + */ + public spec(): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/spec', + }); + } + + /** + * Check basic node health + * By default this endpoint just checks that it can get the latest ledger + * info and then returns 200. + * + * If the duration_secs param is provided, this endpoint will return a + * 200 if the following condition is true: + * + * `server_latest_ledger_info_timestamp >= server_current_time_timestamp - duration_secs` + * @param durationSecs Threshold in seconds that the server can be behind to be considered healthy + * + * If not provided, the healthcheck will always succeed + * @returns HealthCheckSuccess + * @throws ApiError + */ + public healthy( + durationSecs?: number, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/-/healthy', + query: { + 'duration_secs': durationSecs, + }, + }); + } + + /** + * Get ledger info + * Get the latest ledger information, including data such as chain ID, + * role type, ledger versions, epoch, etc. + * @returns IndexResponse + * @throws ApiError + */ + public getLedgerInfo(): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/', + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/TablesService.ts b/m1/JavaScript-client/src/generated/services/TablesService.ts new file mode 100644 index 00000000..ce909f85 --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/TablesService.ts @@ -0,0 +1,93 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Address } from '../models/Address'; +import type { MoveValue } from '../models/MoveValue'; +import type { RawTableItemRequest } from '../models/RawTableItemRequest'; +import type { TableItemRequest } from '../models/TableItemRequest'; +import type { U64 } from '../models/U64'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class TablesService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Get table item + * Get a table item at a specific ledger version from the table identified by {table_handle} + * in the path and the "key" (TableItemRequest) provided in the request body. + * + * This is a POST endpoint because the "key" for requesting a specific + * table item (TableItemRequest) could be quite complex, as each of its + * fields could themselves be composed of other structs. This makes it + * impractical to express using query params, meaning GET isn't an option. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param tableHandle Table handle hex encoded 32-byte string + * @param requestBody + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns MoveValue + * @throws ApiError + */ + public getTableItem( + tableHandle: Address, + requestBody: TableItemRequest, + ledgerVersion?: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/tables/{table_handle}/item', + path: { + 'table_handle': tableHandle, + }, + query: { + 'ledger_version': ledgerVersion, + }, + body: requestBody, + mediaType: 'application/json', + }); + } + + /** + * Get raw table item + * Get a table item at a specific ledger version from the table identified by {table_handle} + * in the path and the "key" (RawTableItemRequest) provided in the request body. + * + * The `get_raw_table_item` requires only a serialized key comparing to the full move type information + * comparing to the `get_table_item` api, and can only return the query in the bcs format. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param tableHandle Table handle hex encoded 32-byte string + * @param requestBody + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns MoveValue + * @throws ApiError + */ + public getRawTableItem( + tableHandle: Address, + requestBody: RawTableItemRequest, + ledgerVersion?: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/tables/{table_handle}/raw_item', + path: { + 'table_handle': tableHandle, + }, + query: { + 'ledger_version': ledgerVersion, + }, + body: requestBody, + mediaType: 'application/json', + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/TransactionsService.ts b/m1/JavaScript-client/src/generated/services/TransactionsService.ts new file mode 100644 index 00000000..4f9ea4b2 --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/TransactionsService.ts @@ -0,0 +1,302 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { Address } from '../models/Address'; +import type { EncodeSubmissionRequest } from '../models/EncodeSubmissionRequest'; +import type { GasEstimation } from '../models/GasEstimation'; +import type { HashValue } from '../models/HashValue'; +import type { HexEncodedBytes } from '../models/HexEncodedBytes'; +import type { PendingTransaction } from '../models/PendingTransaction'; +import type { SubmitTransactionRequest } from '../models/SubmitTransactionRequest'; +import type { Transaction } from '../models/Transaction'; +import type { TransactionsBatchSubmissionResult } from '../models/TransactionsBatchSubmissionResult'; +import type { U64 } from '../models/U64'; +import type { UserTransaction } from '../models/UserTransaction'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class TransactionsService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Get transactions + * Retrieve on-chain committed transactions. The page size and start ledger version + * can be provided to get a specific sequence of transactions. + * + * If the version has been pruned, then a 410 will be returned. + * + * To retrieve a pending transaction, use /transactions/by_hash. + * @param start Ledger version to start list of transactions + * + * If not provided, defaults to showing the latest transactions + * @param limit Max number of transactions to retrieve. + * + * If not provided, defaults to default page size + * @returns Transaction + * @throws ApiError + */ + public getTransactions( + start?: U64, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/transactions', + query: { + 'start': start, + 'limit': limit, + }, + }); + } + + /** + * Submit transaction + * This endpoint accepts transaction submissions in two formats. + * + * To submit a transaction as JSON, you must submit a SubmitTransactionRequest. + * To build this request, do the following: + * + * 1. Encode the transaction as BCS. If you are using a language that has + * native BCS support, make sure of that library. If not, you may take + * advantage of /transactions/encode_submission. When using this + * endpoint, make sure you trust the node you're talking to, as it is + * possible they could manipulate your request. + * 2. Sign the encoded transaction and use it to create a TransactionSignature. + * 3. Submit the request. Make sure to use the "application/json" Content-Type. + * + * To submit a transaction as BCS, you must submit a SignedTransaction + * encoded as BCS. See SignedTransaction in types/src/transaction/mod.rs. + * Make sure to use the `application/x.aptos.signed_transaction+bcs` Content-Type. + * @param requestBody + * @returns PendingTransaction + * @throws ApiError + */ + public submitTransaction( + requestBody: SubmitTransactionRequest, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/transactions', + body: requestBody, + mediaType: 'application/json', + }); + } + + /** + * Get transaction by hash + * Look up a transaction by its hash. This is the same hash that is returned + * by the API when submitting a transaction (see PendingTransaction). + * + * When given a transaction hash, the server first looks for the transaction + * in storage (on-chain, committed). If no on-chain transaction is found, it + * looks the transaction up by hash in the mempool (pending, not yet committed). + * + * To create a transaction hash by yourself, do the following: + * 1. Hash message bytes: "RawTransaction" bytes + BCS bytes of [Transaction](https://aptos-labs.github.io/aptos-core/aptos_types/transaction/enum.Transaction.html). + * 2. Apply hash algorithm `SHA3-256` to the hash message bytes. + * 3. Hex-encode the hash bytes with `0x` prefix. + * @param txnHash Hash of transaction to retrieve + * @returns Transaction + * @throws ApiError + */ + public getTransactionByHash( + txnHash: HashValue, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/transactions/by_hash/{txn_hash}', + path: { + 'txn_hash': txnHash, + }, + }); + } + + /** + * Get transaction by version + * Retrieves a transaction by a given version. If the version has been + * pruned, a 410 will be returned. + * @param txnVersion Version of transaction to retrieve + * @returns Transaction + * @throws ApiError + */ + public getTransactionByVersion( + txnVersion: U64, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/transactions/by_version/{txn_version}', + path: { + 'txn_version': txnVersion, + }, + }); + } + + /** + * Get account transactions + * Retrieves on-chain committed transactions from an account. If the start + * version is too far in the past, a 410 will be returned. + * + * If no start version is given, it will start at version 0. + * + * To retrieve a pending transaction, use /transactions/by_hash. + * @param address Address of account with or without a `0x` prefix + * @param start Account sequence number to start list of transactions + * + * If not provided, defaults to showing the latest transactions + * @param limit Max number of transactions to retrieve. + * + * If not provided, defaults to default page size + * @returns Transaction + * @throws ApiError + */ + public getAccountTransactions( + address: Address, + start?: U64, + limit?: number, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'GET', + url: '/accounts/{address}/transactions', + path: { + 'address': address, + }, + query: { + 'start': start, + 'limit': limit, + }, + }); + } + + /** + * Submit batch transactions + * This allows you to submit multiple transactions. The response has three outcomes: + * + * 1. All transactions succeed, and it will return a 202 + * 2. Some transactions succeed, and it will return the failed transactions and a 206 + * 3. No transactions succeed, and it will also return the failed transactions and a 206 + * + * To submit a transaction as JSON, you must submit a SubmitTransactionRequest. + * To build this request, do the following: + * + * 1. Encode the transaction as BCS. If you are using a language that has + * native BCS support, make sure to use that library. If not, you may take + * advantage of /transactions/encode_submission. When using this + * endpoint, make sure you trust the node you're talking to, as it is + * possible they could manipulate your request. + * 2. Sign the encoded transaction and use it to create a TransactionSignature. + * 3. Submit the request. Make sure to use the "application/json" Content-Type. + * + * To submit a transaction as BCS, you must submit a SignedTransaction + * encoded as BCS. See SignedTransaction in types/src/transaction/mod.rs. + * Make sure to use the `application/x.aptos.signed_transaction+bcs` Content-Type. + * @param requestBody + * @returns TransactionsBatchSubmissionResult + * @throws ApiError + */ + public submitBatchTransactions( + requestBody: Array, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/transactions/batch', + body: requestBody, + mediaType: 'application/json', + }); + } + + /** + * Simulate transaction + * The output of the transaction will have the exact transaction outputs and events that running + * an actual signed transaction would have. However, it will not have the associated state + * hashes, as they are not updated in storage. This can be used to estimate the maximum gas + * units for a submitted transaction. + * + * To use this, you must: + * - Create a SignedTransaction with a zero-padded signature. + * - Submit a SubmitTransactionRequest containing a UserTransactionRequest containing that signature. + * + * To use this endpoint with BCS, you must submit a SignedTransaction + * encoded as BCS. See SignedTransaction in types/src/transaction/mod.rs. + * @param requestBody + * @param estimateMaxGasAmount If set to true, the max gas value in the transaction will be ignored + * and the maximum possible gas will be used + * @param estimateGasUnitPrice If set to true, the gas unit price in the transaction will be ignored + * and the estimated value will be used + * @param estimatePrioritizedGasUnitPrice If set to true, the transaction will use a higher price than the original + * estimate. + * @returns UserTransaction + * @throws ApiError + */ + public simulateTransaction( + requestBody: SubmitTransactionRequest, + estimateMaxGasAmount?: boolean, + estimateGasUnitPrice?: boolean, + estimatePrioritizedGasUnitPrice?: boolean, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'POST', + url: '/transactions/simulate', + query: { + 'estimate_max_gas_amount': estimateMaxGasAmount, + 'estimate_gas_unit_price': estimateGasUnitPrice, + 'estimate_prioritized_gas_unit_price': estimatePrioritizedGasUnitPrice, + }, + body: requestBody, + mediaType: 'application/json', + }); + } + + /** + * Encode submission + * This endpoint accepts an EncodeSubmissionRequest, which internally is a + * UserTransactionRequestInner (and optionally secondary signers) encoded + * as JSON, validates the request format, and then returns that request + * encoded in BCS. The client can then use this to create a transaction + * signature to be used in a SubmitTransactionRequest, which it then + * passes to the /transactions POST endpoint. + * + * To be clear, this endpoint makes it possible to submit transaction + * requests to the API from languages that do not have library support for + * BCS. If you are using an SDK that has BCS support, such as the official + * Rust, TypeScript, or Python SDKs, you do not need to use this endpoint. + * + * To sign a message using the response from this endpoint: + * - Decode the hex encoded string in the response to bytes. + * - Sign the bytes to create the signature. + * - Use that as the signature field in something like Ed25519Signature, which you then use to build a TransactionSignature. + * @param requestBody + * @returns HexEncodedBytes + * @throws ApiError + */ + public encodeSubmission( + requestBody: EncodeSubmissionRequest, + ): CancelablePromise { + return this.httpRequest.request({ + method: 'POST', + url: '/transactions/encode_submission', + body: requestBody, + mediaType: 'application/json', + }); + } + + /** + * Estimate gas price + * Currently, the gas estimation is handled by taking the median of the last 100,000 transactions + * If a user wants to prioritize their transaction and is willing to pay, they can pay more + * than the gas price. If they're willing to wait longer, they can pay less. Note that the + * gas price moves with the fee market, and should only increase when demand outweighs supply. + * + * If there have been no transactions in the last 100,000 transactions, the price will be 1. + * @returns GasEstimation + * @throws ApiError + */ + public estimateGasPrice(): CancelablePromise { + return this.httpRequest.request({ + method: 'GET', + url: '/estimate_gas_price', + }); + } + +} diff --git a/m1/JavaScript-client/src/generated/services/ViewService.ts b/m1/JavaScript-client/src/generated/services/ViewService.ts new file mode 100644 index 00000000..8b4d40df --- /dev/null +++ b/m1/JavaScript-client/src/generated/services/ViewService.ts @@ -0,0 +1,43 @@ +/* istanbul ignore file */ +/* tslint:disable */ +/* eslint-disable */ +import type { MoveValue } from '../models/MoveValue'; +import type { U64 } from '../models/U64'; +import type { ViewRequest } from '../models/ViewRequest'; + +import type { CancelablePromise } from '../core/CancelablePromise'; +import type { BaseHttpRequest } from '../core/BaseHttpRequest'; + +export class ViewService { + + constructor(public readonly httpRequest: BaseHttpRequest) {} + + /** + * Execute view function of a module + * Execute the Move function with the given parameters and return its execution result. + * + * The Aptos nodes prune account state history, via a configurable time window. + * If the requested ledger version has been pruned, the server responds with a 410. + * @param requestBody + * @param ledgerVersion Ledger version to get state of account + * + * If not provided, it will be the latest version + * @returns MoveValue + * @throws ApiError + */ + public view( + requestBody: ViewRequest, + ledgerVersion?: U64, + ): CancelablePromise> { + return this.httpRequest.request({ + method: 'POST', + url: '/view', + query: { + 'ledger_version': ledgerVersion, + }, + body: requestBody, + mediaType: 'application/json', + }); + } + +} diff --git a/m1/JavaScript-client/src/index.ts b/m1/JavaScript-client/src/index.ts new file mode 100644 index 00000000..6e1cbbf7 --- /dev/null +++ b/m1/JavaScript-client/src/index.ts @@ -0,0 +1,18 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export * from "./account"; +export * from "./providers"; +export * as BCS from "./bcs"; +export * from "./utils/hex_string"; +export * from "./plugins"; +export * from "./transaction_builder"; +export * as TokenTypes from "./aptos_types/token_types"; +export * as Types from "./generated/index"; +export { derivePath } from "./utils/hd-key"; +export { + deserializePropertyMap, + deserializeValueBasedOnTypeTag, + getPropertyValueRaw, +} from "./utils/property_map_serde"; +export { Network, CustomEndpoints } from "./utils/api-endpoints"; diff --git a/m1/JavaScript-client/src/indexer/codegen.yml b/m1/JavaScript-client/src/indexer/codegen.yml new file mode 100644 index 00000000..a9e72503 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/codegen.yml @@ -0,0 +1,22 @@ +overwrite: true +documents: src/indexer/queries/**/*.graphql +schema: https://indexer.mainnet.aptoslabs.com/v1/graphql +generates: + src/indexer/generated/types.ts: + plugins: + - typescript + src/indexer/generated/operations.ts: + preset: import-types-preset + presetConfig: + typesPath: ./types + plugins: + - typescript-operations + src/indexer/generated/queries.ts: + preset: import-types-preset + presetConfig: + typesPath: ./operations + plugins: + - typescript-graphql-request + config: + documentMode: string + documentVariableSuffix: "" diff --git a/m1/JavaScript-client/src/indexer/generated/operations.ts b/m1/JavaScript-client/src/indexer/generated/operations.ts new file mode 100644 index 00000000..361f2a66 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/generated/operations.ts @@ -0,0 +1,113 @@ +import * as Types from './types'; + +export type GetAccountCoinsDataQueryVariables = Types.Exact<{ + owner_address?: Types.InputMaybe; + offset?: Types.InputMaybe; + limit?: Types.InputMaybe; +}>; + + +export type GetAccountCoinsDataQuery = { __typename?: 'query_root', current_coin_balances: Array<{ __typename?: 'current_coin_balances', amount: any, coin_type: string, coin_info?: { __typename?: 'coin_infos', name: string, decimals: number, symbol: string } | null }> }; + +export type GetAccountCurrentTokensQueryVariables = Types.Exact<{ + address: Types.Scalars['String']; + offset?: Types.InputMaybe; + limit?: Types.InputMaybe; +}>; + + +export type GetAccountCurrentTokensQuery = { __typename?: 'query_root', current_token_ownerships: Array<{ __typename?: 'current_token_ownerships', amount: any, last_transaction_version: any, property_version: any, current_token_data?: { __typename?: 'current_token_datas', creator_address: string, collection_name: string, description: string, metadata_uri: string, name: string, token_data_id_hash: string, collection_data_id_hash: string } | null, current_collection_data?: { __typename?: 'current_collection_datas', metadata_uri: string, supply: any, description: string, collection_name: string, collection_data_id_hash: string, table_handle: string, creator_address: string } | null }> }; + +export type TokenDataFieldsFragment = { __typename?: 'current_token_datas', creator_address: string, collection_name: string, description: string, metadata_uri: string, name: string, token_data_id_hash: string, collection_data_id_hash: string }; + +export type CollectionDataFieldsFragment = { __typename?: 'current_collection_datas', metadata_uri: string, supply: any, description: string, collection_name: string, collection_data_id_hash: string, table_handle: string, creator_address: string }; + +export type GetAccountTokensCountQueryVariables = Types.Exact<{ + owner_address?: Types.InputMaybe; +}>; + + +export type GetAccountTokensCountQuery = { __typename?: 'query_root', current_token_ownerships_aggregate: { __typename?: 'current_token_ownerships_aggregate', aggregate?: { __typename?: 'current_token_ownerships_aggregate_fields', count: number } | null } }; + +export type GetAccountTransactionsCountQueryVariables = Types.Exact<{ + address?: Types.InputMaybe; +}>; + + +export type GetAccountTransactionsCountQuery = { __typename?: 'query_root', move_resources_aggregate: { __typename?: 'move_resources_aggregate', aggregate?: { __typename?: 'move_resources_aggregate_fields', count: number } | null } }; + +export type GetAccountTransactionsDataQueryVariables = Types.Exact<{ + address?: Types.InputMaybe; + limit?: Types.InputMaybe; + offset?: Types.InputMaybe; +}>; + + +export type GetAccountTransactionsDataQuery = { __typename?: 'query_root', move_resources: Array<{ __typename?: 'move_resources', transaction_version: any }> }; + +export type GetCurrentDelegatorBalancesCountQueryVariables = Types.Exact<{ + poolAddress?: Types.InputMaybe; +}>; + + +export type GetCurrentDelegatorBalancesCountQuery = { __typename?: 'query_root', current_delegator_balances_aggregate: { __typename?: 'current_delegator_balances_aggregate', aggregate?: { __typename?: 'current_delegator_balances_aggregate_fields', count: number } | null } }; + +export type GetDelegatedStakingActivitiesQueryVariables = Types.Exact<{ + delegatorAddress?: Types.InputMaybe; + poolAddress?: Types.InputMaybe; +}>; + + +export type GetDelegatedStakingActivitiesQuery = { __typename?: 'query_root', delegated_staking_activities: Array<{ __typename?: 'delegated_staking_activities', amount: any, delegator_address: string, event_index: any, event_type: string, pool_address: string, transaction_version: any }> }; + +export type GetIndexerLedgerInfoQueryVariables = Types.Exact<{ [key: string]: never; }>; + + +export type GetIndexerLedgerInfoQuery = { __typename?: 'query_root', ledger_infos: Array<{ __typename?: 'ledger_infos', chain_id: any }> }; + +export type GetTokenActivitiesQueryVariables = Types.Exact<{ + idHash: Types.Scalars['String']; + offset?: Types.InputMaybe; + limit?: Types.InputMaybe; +}>; + + +export type GetTokenActivitiesQuery = { __typename?: 'query_root', token_activities: Array<{ __typename?: 'token_activities', creator_address: string, collection_name: string, name: string, token_data_id_hash: string, collection_data_id_hash: string, from_address?: string | null, to_address?: string | null, transaction_version: any, transaction_timestamp: any, property_version: any, transfer_type: string, event_sequence_number: any, token_amount: any }> }; + +export type GetTokenActivitiesCountQueryVariables = Types.Exact<{ + token_id?: Types.InputMaybe; +}>; + + +export type GetTokenActivitiesCountQuery = { __typename?: 'query_root', token_activities_aggregate: { __typename?: 'token_activities_aggregate', aggregate?: { __typename?: 'token_activities_aggregate_fields', count: number } | null } }; + +export type GetTokenDataQueryVariables = Types.Exact<{ + token_id?: Types.InputMaybe; +}>; + + +export type GetTokenDataQuery = { __typename?: 'query_root', current_token_datas: Array<{ __typename?: 'current_token_datas', token_data_id_hash: string, name: string, collection_name: string, creator_address: string, default_properties: any, largest_property_version: any, maximum: any, metadata_uri: string, payee_address: string, royalty_points_denominator: any, royalty_points_numerator: any, supply: any }> }; + +export type GetTokenOwnersDataQueryVariables = Types.Exact<{ + token_id?: Types.InputMaybe; + property_version?: Types.InputMaybe; +}>; + + +export type GetTokenOwnersDataQuery = { __typename?: 'query_root', current_token_ownerships: Array<{ __typename?: 'current_token_ownerships', owner_address: string }> }; + +export type GetTopUserTransactionsQueryVariables = Types.Exact<{ + limit?: Types.InputMaybe; +}>; + + +export type GetTopUserTransactionsQuery = { __typename?: 'query_root', user_transactions: Array<{ __typename?: 'user_transactions', version: any }> }; + +export type GetUserTransactionsQueryVariables = Types.Exact<{ + limit?: Types.InputMaybe; + start_version?: Types.InputMaybe; + offset?: Types.InputMaybe; +}>; + + +export type GetUserTransactionsQuery = { __typename?: 'query_root', user_transactions: Array<{ __typename?: 'user_transactions', version: any }> }; diff --git a/m1/JavaScript-client/src/indexer/generated/queries.ts b/m1/JavaScript-client/src/indexer/generated/queries.ts new file mode 100644 index 00000000..9ef4ccac --- /dev/null +++ b/m1/JavaScript-client/src/indexer/generated/queries.ts @@ -0,0 +1,265 @@ +import * as Types from './operations'; + +import { GraphQLClient } from 'graphql-request'; +import * as Dom from 'graphql-request/dist/types.dom'; +export const TokenDataFieldsFragmentDoc = ` + fragment TokenDataFields on current_token_datas { + creator_address + collection_name + description + metadata_uri + name + token_data_id_hash + collection_data_id_hash +} + `; +export const CollectionDataFieldsFragmentDoc = ` + fragment CollectionDataFields on current_collection_datas { + metadata_uri + supply + description + collection_name + collection_data_id_hash + table_handle + creator_address +} + `; +export const GetAccountCoinsData = ` + query getAccountCoinsData($owner_address: String, $offset: Int, $limit: Int) { + current_coin_balances( + where: {owner_address: {_eq: $owner_address}} + offset: $offset + limit: $limit + ) { + amount + coin_type + coin_info { + name + decimals + symbol + } + } +} + `; +export const GetAccountCurrentTokens = ` + query getAccountCurrentTokens($address: String!, $offset: Int, $limit: Int) { + current_token_ownerships( + where: {owner_address: {_eq: $address}, amount: {_gt: 0}} + order_by: [{last_transaction_version: desc}, {creator_address: asc}, {collection_name: asc}, {name: asc}] + offset: $offset + limit: $limit + ) { + amount + current_token_data { + ...TokenDataFields + } + current_collection_data { + ...CollectionDataFields + } + last_transaction_version + property_version + } +} + ${TokenDataFieldsFragmentDoc} +${CollectionDataFieldsFragmentDoc}`; +export const GetAccountTokensCount = ` + query getAccountTokensCount($owner_address: String) { + current_token_ownerships_aggregate( + where: {owner_address: {_eq: $owner_address}, amount: {_gt: "0"}} + ) { + aggregate { + count + } + } +} + `; +export const GetAccountTransactionsCount = ` + query getAccountTransactionsCount($address: String) { + move_resources_aggregate( + where: {address: {_eq: $address}} + distinct_on: transaction_version + ) { + aggregate { + count + } + } +} + `; +export const GetAccountTransactionsData = ` + query getAccountTransactionsData($address: String, $limit: Int, $offset: Int) { + move_resources( + where: {address: {_eq: $address}} + order_by: {transaction_version: desc} + distinct_on: transaction_version + limit: $limit + offset: $offset + ) { + transaction_version + } +} + `; +export const GetCurrentDelegatorBalancesCount = ` + query getCurrentDelegatorBalancesCount($poolAddress: String) { + current_delegator_balances_aggregate( + where: {pool_type: {_eq: "active_shares"}, pool_address: {_eq: $poolAddress}, amount: {_gt: "0"}} + distinct_on: delegator_address + ) { + aggregate { + count + } + } +} + `; +export const GetDelegatedStakingActivities = ` + query getDelegatedStakingActivities($delegatorAddress: String, $poolAddress: String) { + delegated_staking_activities( + where: {delegator_address: {_eq: $delegatorAddress}, pool_address: {_eq: $poolAddress}} + ) { + amount + delegator_address + event_index + event_type + pool_address + transaction_version + } +} + `; +export const GetIndexerLedgerInfo = ` + query getIndexerLedgerInfo { + ledger_infos { + chain_id + } +} + `; +export const GetTokenActivities = ` + query getTokenActivities($idHash: String!, $offset: Int, $limit: Int) { + token_activities( + where: {token_data_id_hash: {_eq: $idHash}} + order_by: {transaction_version: desc} + offset: $offset + limit: $limit + ) { + creator_address + collection_name + name + token_data_id_hash + collection_data_id_hash + from_address + to_address + transaction_version + transaction_timestamp + property_version + transfer_type + event_sequence_number + token_amount + } +} + `; +export const GetTokenActivitiesCount = ` + query getTokenActivitiesCount($token_id: String) { + token_activities_aggregate(where: {token_data_id_hash: {_eq: $token_id}}) { + aggregate { + count + } + } +} + `; +export const GetTokenData = ` + query getTokenData($token_id: String) { + current_token_datas(where: {token_data_id_hash: {_eq: $token_id}}) { + token_data_id_hash + name + collection_name + creator_address + default_properties + largest_property_version + maximum + metadata_uri + payee_address + royalty_points_denominator + royalty_points_numerator + supply + } +} + `; +export const GetTokenOwnersData = ` + query getTokenOwnersData($token_id: String, $property_version: numeric) { + current_token_ownerships( + where: {token_data_id_hash: {_eq: $token_id}, property_version: {_eq: $property_version}} + ) { + owner_address + } +} + `; +export const GetTopUserTransactions = ` + query getTopUserTransactions($limit: Int) { + user_transactions(limit: $limit, order_by: {version: desc}) { + version + } +} + `; +export const GetUserTransactions = ` + query getUserTransactions($limit: Int, $start_version: bigint, $offset: Int) { + user_transactions( + limit: $limit + order_by: {version: desc} + where: {version: {_lte: $start_version}} + offset: $offset + ) { + version + } +} + `; + +export type SdkFunctionWrapper = (action: (requestHeaders?:Record) => Promise, operationName: string, operationType?: string) => Promise; + + +const defaultWrapper: SdkFunctionWrapper = (action, _operationName, _operationType) => action(); + +export function getSdk(client: GraphQLClient, withWrapper: SdkFunctionWrapper = defaultWrapper) { + return { + getAccountCoinsData(variables?: Types.GetAccountCoinsDataQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetAccountCoinsData, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getAccountCoinsData', 'query'); + }, + getAccountCurrentTokens(variables: Types.GetAccountCurrentTokensQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetAccountCurrentTokens, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getAccountCurrentTokens', 'query'); + }, + getAccountTokensCount(variables?: Types.GetAccountTokensCountQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetAccountTokensCount, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getAccountTokensCount', 'query'); + }, + getAccountTransactionsCount(variables?: Types.GetAccountTransactionsCountQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetAccountTransactionsCount, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getAccountTransactionsCount', 'query'); + }, + getAccountTransactionsData(variables?: Types.GetAccountTransactionsDataQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetAccountTransactionsData, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getAccountTransactionsData', 'query'); + }, + getCurrentDelegatorBalancesCount(variables?: Types.GetCurrentDelegatorBalancesCountQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetCurrentDelegatorBalancesCount, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getCurrentDelegatorBalancesCount', 'query'); + }, + getDelegatedStakingActivities(variables?: Types.GetDelegatedStakingActivitiesQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetDelegatedStakingActivities, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getDelegatedStakingActivities', 'query'); + }, + getIndexerLedgerInfo(variables?: Types.GetIndexerLedgerInfoQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetIndexerLedgerInfo, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getIndexerLedgerInfo', 'query'); + }, + getTokenActivities(variables: Types.GetTokenActivitiesQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetTokenActivities, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getTokenActivities', 'query'); + }, + getTokenActivitiesCount(variables?: Types.GetTokenActivitiesCountQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetTokenActivitiesCount, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getTokenActivitiesCount', 'query'); + }, + getTokenData(variables?: Types.GetTokenDataQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetTokenData, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getTokenData', 'query'); + }, + getTokenOwnersData(variables?: Types.GetTokenOwnersDataQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetTokenOwnersData, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getTokenOwnersData', 'query'); + }, + getTopUserTransactions(variables?: Types.GetTopUserTransactionsQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetTopUserTransactions, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getTopUserTransactions', 'query'); + }, + getUserTransactions(variables?: Types.GetUserTransactionsQueryVariables, requestHeaders?: Dom.RequestInit["headers"]): Promise { + return withWrapper((wrappedRequestHeaders) => client.request(GetUserTransactions, variables, {...requestHeaders, ...wrappedRequestHeaders}), 'getUserTransactions', 'query'); + } + }; +} +export type Sdk = ReturnType; \ No newline at end of file diff --git a/m1/JavaScript-client/src/indexer/generated/types.ts b/m1/JavaScript-client/src/indexer/generated/types.ts new file mode 100644 index 00000000..52466067 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/generated/types.ts @@ -0,0 +1,4975 @@ +export type Maybe = T | null; +export type InputMaybe = Maybe; +export type Exact = { [K in keyof T]: T[K] }; +export type MakeOptional = Omit & { [SubKey in K]?: Maybe }; +export type MakeMaybe = Omit & { [SubKey in K]: Maybe }; +/** All built-in and custom scalars, mapped to their actual values */ +export type Scalars = { + ID: string; + String: string; + Boolean: boolean; + Int: number; + Float: number; + bigint: any; + jsonb: any; + numeric: any; + timestamp: any; +}; + +/** Boolean expression to compare columns of type "Boolean". All fields are combined with logical 'AND'. */ +export type Boolean_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** Boolean expression to compare columns of type "Int". All fields are combined with logical 'AND'. */ +export type Int_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** Boolean expression to compare columns of type "String". All fields are combined with logical 'AND'. */ +export type String_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + /** does the column match the given case-insensitive pattern */ + _ilike?: InputMaybe; + _in?: InputMaybe>; + /** does the column match the given POSIX regular expression, case insensitive */ + _iregex?: InputMaybe; + _is_null?: InputMaybe; + /** does the column match the given pattern */ + _like?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + /** does the column NOT match the given case-insensitive pattern */ + _nilike?: InputMaybe; + _nin?: InputMaybe>; + /** does the column NOT match the given POSIX regular expression, case insensitive */ + _niregex?: InputMaybe; + /** does the column NOT match the given pattern */ + _nlike?: InputMaybe; + /** does the column NOT match the given POSIX regular expression, case sensitive */ + _nregex?: InputMaybe; + /** does the column NOT match the given SQL regular expression */ + _nsimilar?: InputMaybe; + /** does the column match the given POSIX regular expression, case sensitive */ + _regex?: InputMaybe; + /** does the column match the given SQL regular expression */ + _similar?: InputMaybe; +}; + +/** columns and relationships of "address_version_from_events" */ +export type Address_Version_From_Events = { + __typename?: 'address_version_from_events'; + account_address?: Maybe; + coin_activities: Array; + token_activities: Array; + token_activities_aggregate: Token_Activities_Aggregate; + transaction_version?: Maybe; +}; + + +/** columns and relationships of "address_version_from_events" */ +export type Address_Version_From_EventsCoin_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +/** columns and relationships of "address_version_from_events" */ +export type Address_Version_From_EventsToken_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +/** columns and relationships of "address_version_from_events" */ +export type Address_Version_From_EventsToken_Activities_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "address_version_from_events". All fields are combined with a logical 'AND'. */ +export type Address_Version_From_Events_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + account_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "address_version_from_events". */ +export type Address_Version_From_Events_Order_By = { + account_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "address_version_from_events" */ +export enum Address_Version_From_Events_Select_Column { + /** column name */ + AccountAddress = 'account_address', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "address_version_from_events" */ +export type Address_Version_From_Events_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Address_Version_From_Events_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Address_Version_From_Events_Stream_Cursor_Value_Input = { + account_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Boolean expression to compare columns of type "bigint". All fields are combined with logical 'AND'. */ +export type Bigint_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** columns and relationships of "coin_activities" */ +export type Coin_Activities = { + __typename?: 'coin_activities'; + activity_type: Scalars['String']; + amount: Scalars['numeric']; + block_height: Scalars['bigint']; + coin_type: Scalars['String']; + entry_function_id_str?: Maybe; + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_index?: Maybe; + event_sequence_number: Scalars['bigint']; + is_gas_fee: Scalars['Boolean']; + is_transaction_success: Scalars['Boolean']; + owner_address: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "coin_activities". All fields are combined with a logical 'AND'. */ +export type Coin_Activities_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + activity_type?: InputMaybe; + amount?: InputMaybe; + block_height?: InputMaybe; + coin_type?: InputMaybe; + entry_function_id_str?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + is_gas_fee?: InputMaybe; + is_transaction_success?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "coin_activities". */ +export type Coin_Activities_Order_By = { + activity_type?: InputMaybe; + amount?: InputMaybe; + block_height?: InputMaybe; + coin_type?: InputMaybe; + entry_function_id_str?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + is_gas_fee?: InputMaybe; + is_transaction_success?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "coin_activities" */ +export enum Coin_Activities_Select_Column { + /** column name */ + ActivityType = 'activity_type', + /** column name */ + Amount = 'amount', + /** column name */ + BlockHeight = 'block_height', + /** column name */ + CoinType = 'coin_type', + /** column name */ + EntryFunctionIdStr = 'entry_function_id_str', + /** column name */ + EventAccountAddress = 'event_account_address', + /** column name */ + EventCreationNumber = 'event_creation_number', + /** column name */ + EventIndex = 'event_index', + /** column name */ + EventSequenceNumber = 'event_sequence_number', + /** column name */ + IsGasFee = 'is_gas_fee', + /** column name */ + IsTransactionSuccess = 'is_transaction_success', + /** column name */ + OwnerAddress = 'owner_address', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "coin_activities" */ +export type Coin_Activities_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Coin_Activities_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Coin_Activities_Stream_Cursor_Value_Input = { + activity_type?: InputMaybe; + amount?: InputMaybe; + block_height?: InputMaybe; + coin_type?: InputMaybe; + entry_function_id_str?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + is_gas_fee?: InputMaybe; + is_transaction_success?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "coin_balances" */ +export type Coin_Balances = { + __typename?: 'coin_balances'; + amount: Scalars['numeric']; + coin_type: Scalars['String']; + coin_type_hash: Scalars['String']; + owner_address: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "coin_balances". All fields are combined with a logical 'AND'. */ +export type Coin_Balances_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "coin_balances". */ +export type Coin_Balances_Order_By = { + amount?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "coin_balances" */ +export enum Coin_Balances_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + CoinType = 'coin_type', + /** column name */ + CoinTypeHash = 'coin_type_hash', + /** column name */ + OwnerAddress = 'owner_address', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "coin_balances" */ +export type Coin_Balances_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Coin_Balances_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Coin_Balances_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + owner_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "coin_infos" */ +export type Coin_Infos = { + __typename?: 'coin_infos'; + coin_type: Scalars['String']; + coin_type_hash: Scalars['String']; + creator_address: Scalars['String']; + decimals: Scalars['Int']; + name: Scalars['String']; + supply_aggregator_table_handle?: Maybe; + supply_aggregator_table_key?: Maybe; + symbol: Scalars['String']; + transaction_created_timestamp: Scalars['timestamp']; + transaction_version_created: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "coin_infos". All fields are combined with a logical 'AND'. */ +export type Coin_Infos_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + creator_address?: InputMaybe; + decimals?: InputMaybe; + name?: InputMaybe; + supply_aggregator_table_handle?: InputMaybe; + supply_aggregator_table_key?: InputMaybe; + symbol?: InputMaybe; + transaction_created_timestamp?: InputMaybe; + transaction_version_created?: InputMaybe; +}; + +/** Ordering options when selecting data from "coin_infos". */ +export type Coin_Infos_Order_By = { + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + creator_address?: InputMaybe; + decimals?: InputMaybe; + name?: InputMaybe; + supply_aggregator_table_handle?: InputMaybe; + supply_aggregator_table_key?: InputMaybe; + symbol?: InputMaybe; + transaction_created_timestamp?: InputMaybe; + transaction_version_created?: InputMaybe; +}; + +/** select columns of table "coin_infos" */ +export enum Coin_Infos_Select_Column { + /** column name */ + CoinType = 'coin_type', + /** column name */ + CoinTypeHash = 'coin_type_hash', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + Decimals = 'decimals', + /** column name */ + Name = 'name', + /** column name */ + SupplyAggregatorTableHandle = 'supply_aggregator_table_handle', + /** column name */ + SupplyAggregatorTableKey = 'supply_aggregator_table_key', + /** column name */ + Symbol = 'symbol', + /** column name */ + TransactionCreatedTimestamp = 'transaction_created_timestamp', + /** column name */ + TransactionVersionCreated = 'transaction_version_created' +} + +/** Streaming cursor of the table "coin_infos" */ +export type Coin_Infos_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Coin_Infos_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Coin_Infos_Stream_Cursor_Value_Input = { + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + creator_address?: InputMaybe; + decimals?: InputMaybe; + name?: InputMaybe; + supply_aggregator_table_handle?: InputMaybe; + supply_aggregator_table_key?: InputMaybe; + symbol?: InputMaybe; + transaction_created_timestamp?: InputMaybe; + transaction_version_created?: InputMaybe; +}; + +/** columns and relationships of "coin_supply" */ +export type Coin_Supply = { + __typename?: 'coin_supply'; + coin_type: Scalars['String']; + coin_type_hash: Scalars['String']; + supply: Scalars['numeric']; + transaction_epoch: Scalars['bigint']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "coin_supply". All fields are combined with a logical 'AND'. */ +export type Coin_Supply_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + supply?: InputMaybe; + transaction_epoch?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "coin_supply". */ +export type Coin_Supply_Order_By = { + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + supply?: InputMaybe; + transaction_epoch?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "coin_supply" */ +export enum Coin_Supply_Select_Column { + /** column name */ + CoinType = 'coin_type', + /** column name */ + CoinTypeHash = 'coin_type_hash', + /** column name */ + Supply = 'supply', + /** column name */ + TransactionEpoch = 'transaction_epoch', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "coin_supply" */ +export type Coin_Supply_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Coin_Supply_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Coin_Supply_Stream_Cursor_Value_Input = { + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + supply?: InputMaybe; + transaction_epoch?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "collection_datas" */ +export type Collection_Datas = { + __typename?: 'collection_datas'; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + description: Scalars['String']; + description_mutable: Scalars['Boolean']; + maximum: Scalars['numeric']; + maximum_mutable: Scalars['Boolean']; + metadata_uri: Scalars['String']; + supply: Scalars['numeric']; + table_handle: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; + uri_mutable: Scalars['Boolean']; +}; + +/** Boolean expression to filter rows from the table "collection_datas". All fields are combined with a logical 'AND'. */ +export type Collection_Datas_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** Ordering options when selecting data from "collection_datas". */ +export type Collection_Datas_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** select columns of table "collection_datas" */ +export enum Collection_Datas_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + Description = 'description', + /** column name */ + DescriptionMutable = 'description_mutable', + /** column name */ + Maximum = 'maximum', + /** column name */ + MaximumMutable = 'maximum_mutable', + /** column name */ + MetadataUri = 'metadata_uri', + /** column name */ + Supply = 'supply', + /** column name */ + TableHandle = 'table_handle', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + UriMutable = 'uri_mutable' +} + +/** Streaming cursor of the table "collection_datas" */ +export type Collection_Datas_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Collection_Datas_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Collection_Datas_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** columns and relationships of "current_ans_lookup" */ +export type Current_Ans_Lookup = { + __typename?: 'current_ans_lookup'; + /** An array relationship */ + all_token_ownerships: Array; + /** An aggregate relationship */ + all_token_ownerships_aggregate: Current_Token_Ownerships_Aggregate; + domain: Scalars['String']; + expiration_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + registered_address?: Maybe; + subdomain: Scalars['String']; +}; + + +/** columns and relationships of "current_ans_lookup" */ +export type Current_Ans_LookupAll_Token_OwnershipsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +/** columns and relationships of "current_ans_lookup" */ +export type Current_Ans_LookupAll_Token_Ownerships_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "current_ans_lookup". All fields are combined with a logical 'AND'. */ +export type Current_Ans_Lookup_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + all_token_ownerships?: InputMaybe; + domain?: InputMaybe; + expiration_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + registered_address?: InputMaybe; + subdomain?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_ans_lookup". */ +export type Current_Ans_Lookup_Order_By = { + all_token_ownerships_aggregate?: InputMaybe; + domain?: InputMaybe; + expiration_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + registered_address?: InputMaybe; + subdomain?: InputMaybe; +}; + +/** select columns of table "current_ans_lookup" */ +export enum Current_Ans_Lookup_Select_Column { + /** column name */ + Domain = 'domain', + /** column name */ + ExpirationTimestamp = 'expiration_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + RegisteredAddress = 'registered_address', + /** column name */ + Subdomain = 'subdomain' +} + +/** Streaming cursor of the table "current_ans_lookup" */ +export type Current_Ans_Lookup_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Ans_Lookup_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Ans_Lookup_Stream_Cursor_Value_Input = { + domain?: InputMaybe; + expiration_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + registered_address?: InputMaybe; + subdomain?: InputMaybe; +}; + +/** columns and relationships of "current_coin_balances" */ +export type Current_Coin_Balances = { + __typename?: 'current_coin_balances'; + amount: Scalars['numeric']; + /** An object relationship */ + coin_info?: Maybe; + coin_type: Scalars['String']; + coin_type_hash: Scalars['String']; + last_transaction_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + owner_address: Scalars['String']; +}; + +/** Boolean expression to filter rows from the table "current_coin_balances". All fields are combined with a logical 'AND'. */ +export type Current_Coin_Balances_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + coin_info?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_coin_balances". */ +export type Current_Coin_Balances_Order_By = { + amount?: InputMaybe; + coin_info?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** select columns of table "current_coin_balances" */ +export enum Current_Coin_Balances_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + CoinType = 'coin_type', + /** column name */ + CoinTypeHash = 'coin_type_hash', + /** column name */ + LastTransactionTimestamp = 'last_transaction_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + OwnerAddress = 'owner_address' +} + +/** Streaming cursor of the table "current_coin_balances" */ +export type Current_Coin_Balances_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Coin_Balances_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Coin_Balances_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + coin_type?: InputMaybe; + coin_type_hash?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** columns and relationships of "current_collection_datas" */ +export type Current_Collection_Datas = { + __typename?: 'current_collection_datas'; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + description: Scalars['String']; + description_mutable: Scalars['Boolean']; + last_transaction_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + maximum: Scalars['numeric']; + maximum_mutable: Scalars['Boolean']; + metadata_uri: Scalars['String']; + supply: Scalars['numeric']; + table_handle: Scalars['String']; + uri_mutable: Scalars['Boolean']; +}; + +/** Boolean expression to filter rows from the table "current_collection_datas". All fields are combined with a logical 'AND'. */ +export type Current_Collection_Datas_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_collection_datas". */ +export type Current_Collection_Datas_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** select columns of table "current_collection_datas" */ +export enum Current_Collection_Datas_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + Description = 'description', + /** column name */ + DescriptionMutable = 'description_mutable', + /** column name */ + LastTransactionTimestamp = 'last_transaction_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + Maximum = 'maximum', + /** column name */ + MaximumMutable = 'maximum_mutable', + /** column name */ + MetadataUri = 'metadata_uri', + /** column name */ + Supply = 'supply', + /** column name */ + TableHandle = 'table_handle', + /** column name */ + UriMutable = 'uri_mutable' +} + +/** Streaming cursor of the table "current_collection_datas" */ +export type Current_Collection_Datas_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Collection_Datas_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Collection_Datas_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + supply?: InputMaybe; + table_handle?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** columns and relationships of "current_collection_ownership_view" */ +export type Current_Collection_Ownership_View = { + __typename?: 'current_collection_ownership_view'; + collection_data_id_hash?: Maybe; + collection_name?: Maybe; + creator_address?: Maybe; + distinct_tokens?: Maybe; + last_transaction_version?: Maybe; + owner_address?: Maybe; +}; + +/** Boolean expression to filter rows from the table "current_collection_ownership_view". All fields are combined with a logical 'AND'. */ +export type Current_Collection_Ownership_View_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + distinct_tokens?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_collection_ownership_view". */ +export type Current_Collection_Ownership_View_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + distinct_tokens?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** select columns of table "current_collection_ownership_view" */ +export enum Current_Collection_Ownership_View_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + DistinctTokens = 'distinct_tokens', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + OwnerAddress = 'owner_address' +} + +/** Streaming cursor of the table "current_collection_ownership_view" */ +export type Current_Collection_Ownership_View_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Collection_Ownership_View_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Collection_Ownership_View_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + distinct_tokens?: InputMaybe; + last_transaction_version?: InputMaybe; + owner_address?: InputMaybe; +}; + +/** columns and relationships of "current_delegator_balances" */ +export type Current_Delegator_Balances = { + __typename?: 'current_delegator_balances'; + amount: Scalars['numeric']; + delegator_address: Scalars['String']; + last_transaction_version: Scalars['bigint']; + pool_address: Scalars['String']; + pool_type: Scalars['String']; + table_handle: Scalars['String']; +}; + +/** aggregated selection of "current_delegator_balances" */ +export type Current_Delegator_Balances_Aggregate = { + __typename?: 'current_delegator_balances_aggregate'; + aggregate?: Maybe; + nodes: Array; +}; + +/** aggregate fields of "current_delegator_balances" */ +export type Current_Delegator_Balances_Aggregate_Fields = { + __typename?: 'current_delegator_balances_aggregate_fields'; + avg?: Maybe; + count: Scalars['Int']; + max?: Maybe; + min?: Maybe; + stddev?: Maybe; + stddev_pop?: Maybe; + stddev_samp?: Maybe; + sum?: Maybe; + var_pop?: Maybe; + var_samp?: Maybe; + variance?: Maybe; +}; + + +/** aggregate fields of "current_delegator_balances" */ +export type Current_Delegator_Balances_Aggregate_FieldsCountArgs = { + columns?: InputMaybe>; + distinct?: InputMaybe; +}; + +/** aggregate avg on columns */ +export type Current_Delegator_Balances_Avg_Fields = { + __typename?: 'current_delegator_balances_avg_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** Boolean expression to filter rows from the table "current_delegator_balances". All fields are combined with a logical 'AND'. */ +export type Current_Delegator_Balances_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + delegator_address?: InputMaybe; + last_transaction_version?: InputMaybe; + pool_address?: InputMaybe; + pool_type?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** aggregate max on columns */ +export type Current_Delegator_Balances_Max_Fields = { + __typename?: 'current_delegator_balances_max_fields'; + amount?: Maybe; + delegator_address?: Maybe; + last_transaction_version?: Maybe; + pool_address?: Maybe; + pool_type?: Maybe; + table_handle?: Maybe; +}; + +/** aggregate min on columns */ +export type Current_Delegator_Balances_Min_Fields = { + __typename?: 'current_delegator_balances_min_fields'; + amount?: Maybe; + delegator_address?: Maybe; + last_transaction_version?: Maybe; + pool_address?: Maybe; + pool_type?: Maybe; + table_handle?: Maybe; +}; + +/** Ordering options when selecting data from "current_delegator_balances". */ +export type Current_Delegator_Balances_Order_By = { + amount?: InputMaybe; + delegator_address?: InputMaybe; + last_transaction_version?: InputMaybe; + pool_address?: InputMaybe; + pool_type?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** select columns of table "current_delegator_balances" */ +export enum Current_Delegator_Balances_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + DelegatorAddress = 'delegator_address', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + PoolAddress = 'pool_address', + /** column name */ + PoolType = 'pool_type', + /** column name */ + TableHandle = 'table_handle' +} + +/** aggregate stddev on columns */ +export type Current_Delegator_Balances_Stddev_Fields = { + __typename?: 'current_delegator_balances_stddev_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** aggregate stddev_pop on columns */ +export type Current_Delegator_Balances_Stddev_Pop_Fields = { + __typename?: 'current_delegator_balances_stddev_pop_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** aggregate stddev_samp on columns */ +export type Current_Delegator_Balances_Stddev_Samp_Fields = { + __typename?: 'current_delegator_balances_stddev_samp_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** Streaming cursor of the table "current_delegator_balances" */ +export type Current_Delegator_Balances_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Delegator_Balances_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Delegator_Balances_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + delegator_address?: InputMaybe; + last_transaction_version?: InputMaybe; + pool_address?: InputMaybe; + pool_type?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** aggregate sum on columns */ +export type Current_Delegator_Balances_Sum_Fields = { + __typename?: 'current_delegator_balances_sum_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** aggregate var_pop on columns */ +export type Current_Delegator_Balances_Var_Pop_Fields = { + __typename?: 'current_delegator_balances_var_pop_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** aggregate var_samp on columns */ +export type Current_Delegator_Balances_Var_Samp_Fields = { + __typename?: 'current_delegator_balances_var_samp_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** aggregate variance on columns */ +export type Current_Delegator_Balances_Variance_Fields = { + __typename?: 'current_delegator_balances_variance_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; +}; + +/** columns and relationships of "current_staking_pool_voter" */ +export type Current_Staking_Pool_Voter = { + __typename?: 'current_staking_pool_voter'; + last_transaction_version: Scalars['bigint']; + staking_pool_address: Scalars['String']; + voter_address: Scalars['String']; +}; + +/** Boolean expression to filter rows from the table "current_staking_pool_voter". All fields are combined with a logical 'AND'. */ +export type Current_Staking_Pool_Voter_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + last_transaction_version?: InputMaybe; + staking_pool_address?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_staking_pool_voter". */ +export type Current_Staking_Pool_Voter_Order_By = { + last_transaction_version?: InputMaybe; + staking_pool_address?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** select columns of table "current_staking_pool_voter" */ +export enum Current_Staking_Pool_Voter_Select_Column { + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + StakingPoolAddress = 'staking_pool_address', + /** column name */ + VoterAddress = 'voter_address' +} + +/** Streaming cursor of the table "current_staking_pool_voter" */ +export type Current_Staking_Pool_Voter_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Staking_Pool_Voter_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Staking_Pool_Voter_Stream_Cursor_Value_Input = { + last_transaction_version?: InputMaybe; + staking_pool_address?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** columns and relationships of "current_table_items" */ +export type Current_Table_Items = { + __typename?: 'current_table_items'; + decoded_key: Scalars['jsonb']; + decoded_value?: Maybe; + is_deleted: Scalars['Boolean']; + key: Scalars['String']; + key_hash: Scalars['String']; + last_transaction_version: Scalars['bigint']; + table_handle: Scalars['String']; +}; + + +/** columns and relationships of "current_table_items" */ +export type Current_Table_ItemsDecoded_KeyArgs = { + path?: InputMaybe; +}; + + +/** columns and relationships of "current_table_items" */ +export type Current_Table_ItemsDecoded_ValueArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "current_table_items". All fields are combined with a logical 'AND'. */ +export type Current_Table_Items_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + is_deleted?: InputMaybe; + key?: InputMaybe; + key_hash?: InputMaybe; + last_transaction_version?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_table_items". */ +export type Current_Table_Items_Order_By = { + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + is_deleted?: InputMaybe; + key?: InputMaybe; + key_hash?: InputMaybe; + last_transaction_version?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** select columns of table "current_table_items" */ +export enum Current_Table_Items_Select_Column { + /** column name */ + DecodedKey = 'decoded_key', + /** column name */ + DecodedValue = 'decoded_value', + /** column name */ + IsDeleted = 'is_deleted', + /** column name */ + Key = 'key', + /** column name */ + KeyHash = 'key_hash', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + TableHandle = 'table_handle' +} + +/** Streaming cursor of the table "current_table_items" */ +export type Current_Table_Items_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Table_Items_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Table_Items_Stream_Cursor_Value_Input = { + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + is_deleted?: InputMaybe; + key?: InputMaybe; + key_hash?: InputMaybe; + last_transaction_version?: InputMaybe; + table_handle?: InputMaybe; +}; + +/** columns and relationships of "current_token_datas" */ +export type Current_Token_Datas = { + __typename?: 'current_token_datas'; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + /** An object relationship */ + current_collection_data?: Maybe; + default_properties: Scalars['jsonb']; + description: Scalars['String']; + description_mutable: Scalars['Boolean']; + largest_property_version: Scalars['numeric']; + last_transaction_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + maximum: Scalars['numeric']; + maximum_mutable: Scalars['Boolean']; + metadata_uri: Scalars['String']; + name: Scalars['String']; + payee_address: Scalars['String']; + properties_mutable: Scalars['Boolean']; + royalty_mutable: Scalars['Boolean']; + royalty_points_denominator: Scalars['numeric']; + royalty_points_numerator: Scalars['numeric']; + supply: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + uri_mutable: Scalars['Boolean']; +}; + + +/** columns and relationships of "current_token_datas" */ +export type Current_Token_DatasDefault_PropertiesArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "current_token_datas". All fields are combined with a logical 'AND'. */ +export type Current_Token_Datas_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_token_datas". */ +export type Current_Token_Datas_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** select columns of table "current_token_datas" */ +export enum Current_Token_Datas_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + DefaultProperties = 'default_properties', + /** column name */ + Description = 'description', + /** column name */ + DescriptionMutable = 'description_mutable', + /** column name */ + LargestPropertyVersion = 'largest_property_version', + /** column name */ + LastTransactionTimestamp = 'last_transaction_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + Maximum = 'maximum', + /** column name */ + MaximumMutable = 'maximum_mutable', + /** column name */ + MetadataUri = 'metadata_uri', + /** column name */ + Name = 'name', + /** column name */ + PayeeAddress = 'payee_address', + /** column name */ + PropertiesMutable = 'properties_mutable', + /** column name */ + RoyaltyMutable = 'royalty_mutable', + /** column name */ + RoyaltyPointsDenominator = 'royalty_points_denominator', + /** column name */ + RoyaltyPointsNumerator = 'royalty_points_numerator', + /** column name */ + Supply = 'supply', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + UriMutable = 'uri_mutable' +} + +/** Streaming cursor of the table "current_token_datas" */ +export type Current_Token_Datas_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Token_Datas_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Token_Datas_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** columns and relationships of "current_token_ownerships" */ +export type Current_Token_Ownerships = { + __typename?: 'current_token_ownerships'; + amount: Scalars['numeric']; + /** An object relationship */ + aptos_name?: Maybe; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + /** An object relationship */ + current_collection_data?: Maybe; + /** An object relationship */ + current_token_data?: Maybe; + last_transaction_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + name: Scalars['String']; + owner_address: Scalars['String']; + property_version: Scalars['numeric']; + table_type: Scalars['String']; + token_data_id_hash: Scalars['String']; + token_properties: Scalars['jsonb']; +}; + + +/** columns and relationships of "current_token_ownerships" */ +export type Current_Token_OwnershipsToken_PropertiesArgs = { + path?: InputMaybe; +}; + +/** aggregated selection of "current_token_ownerships" */ +export type Current_Token_Ownerships_Aggregate = { + __typename?: 'current_token_ownerships_aggregate'; + aggregate?: Maybe; + nodes: Array; +}; + +/** aggregate fields of "current_token_ownerships" */ +export type Current_Token_Ownerships_Aggregate_Fields = { + __typename?: 'current_token_ownerships_aggregate_fields'; + avg?: Maybe; + count: Scalars['Int']; + max?: Maybe; + min?: Maybe; + stddev?: Maybe; + stddev_pop?: Maybe; + stddev_samp?: Maybe; + sum?: Maybe; + var_pop?: Maybe; + var_samp?: Maybe; + variance?: Maybe; +}; + + +/** aggregate fields of "current_token_ownerships" */ +export type Current_Token_Ownerships_Aggregate_FieldsCountArgs = { + columns?: InputMaybe>; + distinct?: InputMaybe; +}; + +/** order by aggregate values of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Aggregate_Order_By = { + avg?: InputMaybe; + count?: InputMaybe; + max?: InputMaybe; + min?: InputMaybe; + stddev?: InputMaybe; + stddev_pop?: InputMaybe; + stddev_samp?: InputMaybe; + sum?: InputMaybe; + var_pop?: InputMaybe; + var_samp?: InputMaybe; + variance?: InputMaybe; +}; + +/** aggregate avg on columns */ +export type Current_Token_Ownerships_Avg_Fields = { + __typename?: 'current_token_ownerships_avg_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by avg() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Avg_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "current_token_ownerships". All fields are combined with a logical 'AND'. */ +export type Current_Token_Ownerships_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + aptos_name?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + current_token_data?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; +}; + +/** aggregate max on columns */ +export type Current_Token_Ownerships_Max_Fields = { + __typename?: 'current_token_ownerships_max_fields'; + amount?: Maybe; + collection_data_id_hash?: Maybe; + collection_name?: Maybe; + creator_address?: Maybe; + last_transaction_timestamp?: Maybe; + last_transaction_version?: Maybe; + name?: Maybe; + owner_address?: Maybe; + property_version?: Maybe; + table_type?: Maybe; + token_data_id_hash?: Maybe; +}; + +/** order by max() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Max_Order_By = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; +}; + +/** aggregate min on columns */ +export type Current_Token_Ownerships_Min_Fields = { + __typename?: 'current_token_ownerships_min_fields'; + amount?: Maybe; + collection_data_id_hash?: Maybe; + collection_name?: Maybe; + creator_address?: Maybe; + last_transaction_timestamp?: Maybe; + last_transaction_version?: Maybe; + name?: Maybe; + owner_address?: Maybe; + property_version?: Maybe; + table_type?: Maybe; + token_data_id_hash?: Maybe; +}; + +/** order by min() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Min_Order_By = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_token_ownerships". */ +export type Current_Token_Ownerships_Order_By = { + amount?: InputMaybe; + aptos_name?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + current_token_data?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; +}; + +/** select columns of table "current_token_ownerships" */ +export enum Current_Token_Ownerships_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + LastTransactionTimestamp = 'last_transaction_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + Name = 'name', + /** column name */ + OwnerAddress = 'owner_address', + /** column name */ + PropertyVersion = 'property_version', + /** column name */ + TableType = 'table_type', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + TokenProperties = 'token_properties' +} + +/** aggregate stddev on columns */ +export type Current_Token_Ownerships_Stddev_Fields = { + __typename?: 'current_token_ownerships_stddev_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by stddev() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Stddev_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** aggregate stddev_pop on columns */ +export type Current_Token_Ownerships_Stddev_Pop_Fields = { + __typename?: 'current_token_ownerships_stddev_pop_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by stddev_pop() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Stddev_Pop_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** aggregate stddev_samp on columns */ +export type Current_Token_Ownerships_Stddev_Samp_Fields = { + __typename?: 'current_token_ownerships_stddev_samp_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by stddev_samp() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Stddev_Samp_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** Streaming cursor of the table "current_token_ownerships" */ +export type Current_Token_Ownerships_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Token_Ownerships_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Token_Ownerships_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; +}; + +/** aggregate sum on columns */ +export type Current_Token_Ownerships_Sum_Fields = { + __typename?: 'current_token_ownerships_sum_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by sum() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Sum_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** aggregate var_pop on columns */ +export type Current_Token_Ownerships_Var_Pop_Fields = { + __typename?: 'current_token_ownerships_var_pop_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by var_pop() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Var_Pop_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** aggregate var_samp on columns */ +export type Current_Token_Ownerships_Var_Samp_Fields = { + __typename?: 'current_token_ownerships_var_samp_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by var_samp() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Var_Samp_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** aggregate variance on columns */ +export type Current_Token_Ownerships_Variance_Fields = { + __typename?: 'current_token_ownerships_variance_fields'; + amount?: Maybe; + last_transaction_version?: Maybe; + property_version?: Maybe; +}; + +/** order by variance() on columns of table "current_token_ownerships" */ +export type Current_Token_Ownerships_Variance_Order_By = { + amount?: InputMaybe; + last_transaction_version?: InputMaybe; + property_version?: InputMaybe; +}; + +/** columns and relationships of "current_token_pending_claims" */ +export type Current_Token_Pending_Claims = { + __typename?: 'current_token_pending_claims'; + amount: Scalars['numeric']; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + /** An object relationship */ + current_collection_data?: Maybe; + /** An object relationship */ + current_token_data?: Maybe; + from_address: Scalars['String']; + last_transaction_timestamp: Scalars['timestamp']; + last_transaction_version: Scalars['bigint']; + name: Scalars['String']; + property_version: Scalars['numeric']; + table_handle: Scalars['String']; + to_address: Scalars['String']; + /** An object relationship */ + token?: Maybe; + token_data_id_hash: Scalars['String']; +}; + +/** Boolean expression to filter rows from the table "current_token_pending_claims". All fields are combined with a logical 'AND'. */ +export type Current_Token_Pending_Claims_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + current_token_data?: InputMaybe; + from_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + to_address?: InputMaybe; + token?: InputMaybe; + token_data_id_hash?: InputMaybe; +}; + +/** Ordering options when selecting data from "current_token_pending_claims". */ +export type Current_Token_Pending_Claims_Order_By = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_collection_data?: InputMaybe; + current_token_data?: InputMaybe; + from_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + to_address?: InputMaybe; + token?: InputMaybe; + token_data_id_hash?: InputMaybe; +}; + +/** select columns of table "current_token_pending_claims" */ +export enum Current_Token_Pending_Claims_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + FromAddress = 'from_address', + /** column name */ + LastTransactionTimestamp = 'last_transaction_timestamp', + /** column name */ + LastTransactionVersion = 'last_transaction_version', + /** column name */ + Name = 'name', + /** column name */ + PropertyVersion = 'property_version', + /** column name */ + TableHandle = 'table_handle', + /** column name */ + ToAddress = 'to_address', + /** column name */ + TokenDataIdHash = 'token_data_id_hash' +} + +/** Streaming cursor of the table "current_token_pending_claims" */ +export type Current_Token_Pending_Claims_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Current_Token_Pending_Claims_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Current_Token_Pending_Claims_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + from_address?: InputMaybe; + last_transaction_timestamp?: InputMaybe; + last_transaction_version?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + to_address?: InputMaybe; + token_data_id_hash?: InputMaybe; +}; + +/** ordering argument of a cursor */ +export enum Cursor_Ordering { + /** ascending ordering of the cursor */ + Asc = 'ASC', + /** descending ordering of the cursor */ + Desc = 'DESC' +} + +/** columns and relationships of "delegated_staking_activities" */ +export type Delegated_Staking_Activities = { + __typename?: 'delegated_staking_activities'; + amount: Scalars['numeric']; + delegator_address: Scalars['String']; + event_index: Scalars['bigint']; + event_type: Scalars['String']; + pool_address: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "delegated_staking_activities". All fields are combined with a logical 'AND'. */ +export type Delegated_Staking_Activities_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + delegator_address?: InputMaybe; + event_index?: InputMaybe; + event_type?: InputMaybe; + pool_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "delegated_staking_activities". */ +export type Delegated_Staking_Activities_Order_By = { + amount?: InputMaybe; + delegator_address?: InputMaybe; + event_index?: InputMaybe; + event_type?: InputMaybe; + pool_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "delegated_staking_activities" */ +export enum Delegated_Staking_Activities_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + DelegatorAddress = 'delegator_address', + /** column name */ + EventIndex = 'event_index', + /** column name */ + EventType = 'event_type', + /** column name */ + PoolAddress = 'pool_address', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "delegated_staking_activities" */ +export type Delegated_Staking_Activities_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Delegated_Staking_Activities_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Delegated_Staking_Activities_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + delegator_address?: InputMaybe; + event_index?: InputMaybe; + event_type?: InputMaybe; + pool_address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "events" */ +export type Events = { + __typename?: 'events'; + account_address: Scalars['String']; + creation_number: Scalars['bigint']; + data: Scalars['jsonb']; + event_index?: Maybe; + sequence_number: Scalars['bigint']; + transaction_block_height: Scalars['bigint']; + transaction_version: Scalars['bigint']; + type: Scalars['String']; +}; + + +/** columns and relationships of "events" */ +export type EventsDataArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "events". All fields are combined with a logical 'AND'. */ +export type Events_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + account_address?: InputMaybe; + creation_number?: InputMaybe; + data?: InputMaybe; + event_index?: InputMaybe; + sequence_number?: InputMaybe; + transaction_block_height?: InputMaybe; + transaction_version?: InputMaybe; + type?: InputMaybe; +}; + +/** Ordering options when selecting data from "events". */ +export type Events_Order_By = { + account_address?: InputMaybe; + creation_number?: InputMaybe; + data?: InputMaybe; + event_index?: InputMaybe; + sequence_number?: InputMaybe; + transaction_block_height?: InputMaybe; + transaction_version?: InputMaybe; + type?: InputMaybe; +}; + +/** select columns of table "events" */ +export enum Events_Select_Column { + /** column name */ + AccountAddress = 'account_address', + /** column name */ + CreationNumber = 'creation_number', + /** column name */ + Data = 'data', + /** column name */ + EventIndex = 'event_index', + /** column name */ + SequenceNumber = 'sequence_number', + /** column name */ + TransactionBlockHeight = 'transaction_block_height', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + Type = 'type' +} + +/** Streaming cursor of the table "events" */ +export type Events_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Events_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Events_Stream_Cursor_Value_Input = { + account_address?: InputMaybe; + creation_number?: InputMaybe; + data?: InputMaybe; + event_index?: InputMaybe; + sequence_number?: InputMaybe; + transaction_block_height?: InputMaybe; + transaction_version?: InputMaybe; + type?: InputMaybe; +}; + +/** columns and relationships of "indexer_status" */ +export type Indexer_Status = { + __typename?: 'indexer_status'; + db: Scalars['String']; + is_indexer_up: Scalars['Boolean']; +}; + +/** Boolean expression to filter rows from the table "indexer_status". All fields are combined with a logical 'AND'. */ +export type Indexer_Status_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + db?: InputMaybe; + is_indexer_up?: InputMaybe; +}; + +/** Ordering options when selecting data from "indexer_status". */ +export type Indexer_Status_Order_By = { + db?: InputMaybe; + is_indexer_up?: InputMaybe; +}; + +/** select columns of table "indexer_status" */ +export enum Indexer_Status_Select_Column { + /** column name */ + Db = 'db', + /** column name */ + IsIndexerUp = 'is_indexer_up' +} + +/** Streaming cursor of the table "indexer_status" */ +export type Indexer_Status_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Indexer_Status_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Indexer_Status_Stream_Cursor_Value_Input = { + db?: InputMaybe; + is_indexer_up?: InputMaybe; +}; + +export type Jsonb_Cast_Exp = { + String?: InputMaybe; +}; + +/** Boolean expression to compare columns of type "jsonb". All fields are combined with logical 'AND'. */ +export type Jsonb_Comparison_Exp = { + _cast?: InputMaybe; + /** is the column contained in the given json value */ + _contained_in?: InputMaybe; + /** does the column contain the given json value at the top level */ + _contains?: InputMaybe; + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + /** does the string exist as a top-level key in the column */ + _has_key?: InputMaybe; + /** do all of these strings exist as top-level keys in the column */ + _has_keys_all?: InputMaybe>; + /** do any of these strings exist as top-level keys in the column */ + _has_keys_any?: InputMaybe>; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** columns and relationships of "ledger_infos" */ +export type Ledger_Infos = { + __typename?: 'ledger_infos'; + chain_id: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "ledger_infos". All fields are combined with a logical 'AND'. */ +export type Ledger_Infos_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + chain_id?: InputMaybe; +}; + +/** Ordering options when selecting data from "ledger_infos". */ +export type Ledger_Infos_Order_By = { + chain_id?: InputMaybe; +}; + +/** select columns of table "ledger_infos" */ +export enum Ledger_Infos_Select_Column { + /** column name */ + ChainId = 'chain_id' +} + +/** Streaming cursor of the table "ledger_infos" */ +export type Ledger_Infos_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Ledger_Infos_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Ledger_Infos_Stream_Cursor_Value_Input = { + chain_id?: InputMaybe; +}; + +/** columns and relationships of "move_resources" */ +export type Move_Resources = { + __typename?: 'move_resources'; + address: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + +/** aggregated selection of "move_resources" */ +export type Move_Resources_Aggregate = { + __typename?: 'move_resources_aggregate'; + aggregate?: Maybe; + nodes: Array; +}; + +/** aggregate fields of "move_resources" */ +export type Move_Resources_Aggregate_Fields = { + __typename?: 'move_resources_aggregate_fields'; + avg?: Maybe; + count: Scalars['Int']; + max?: Maybe; + min?: Maybe; + stddev?: Maybe; + stddev_pop?: Maybe; + stddev_samp?: Maybe; + sum?: Maybe; + var_pop?: Maybe; + var_samp?: Maybe; + variance?: Maybe; +}; + + +/** aggregate fields of "move_resources" */ +export type Move_Resources_Aggregate_FieldsCountArgs = { + columns?: InputMaybe>; + distinct?: InputMaybe; +}; + +/** aggregate avg on columns */ +export type Move_Resources_Avg_Fields = { + __typename?: 'move_resources_avg_fields'; + transaction_version?: Maybe; +}; + +/** Boolean expression to filter rows from the table "move_resources". All fields are combined with a logical 'AND'. */ +export type Move_Resources_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** aggregate max on columns */ +export type Move_Resources_Max_Fields = { + __typename?: 'move_resources_max_fields'; + address?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate min on columns */ +export type Move_Resources_Min_Fields = { + __typename?: 'move_resources_min_fields'; + address?: Maybe; + transaction_version?: Maybe; +}; + +/** Ordering options when selecting data from "move_resources". */ +export type Move_Resources_Order_By = { + address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "move_resources" */ +export enum Move_Resources_Select_Column { + /** column name */ + Address = 'address', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** aggregate stddev on columns */ +export type Move_Resources_Stddev_Fields = { + __typename?: 'move_resources_stddev_fields'; + transaction_version?: Maybe; +}; + +/** aggregate stddev_pop on columns */ +export type Move_Resources_Stddev_Pop_Fields = { + __typename?: 'move_resources_stddev_pop_fields'; + transaction_version?: Maybe; +}; + +/** aggregate stddev_samp on columns */ +export type Move_Resources_Stddev_Samp_Fields = { + __typename?: 'move_resources_stddev_samp_fields'; + transaction_version?: Maybe; +}; + +/** Streaming cursor of the table "move_resources" */ +export type Move_Resources_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Move_Resources_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Move_Resources_Stream_Cursor_Value_Input = { + address?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** aggregate sum on columns */ +export type Move_Resources_Sum_Fields = { + __typename?: 'move_resources_sum_fields'; + transaction_version?: Maybe; +}; + +/** aggregate var_pop on columns */ +export type Move_Resources_Var_Pop_Fields = { + __typename?: 'move_resources_var_pop_fields'; + transaction_version?: Maybe; +}; + +/** aggregate var_samp on columns */ +export type Move_Resources_Var_Samp_Fields = { + __typename?: 'move_resources_var_samp_fields'; + transaction_version?: Maybe; +}; + +/** aggregate variance on columns */ +export type Move_Resources_Variance_Fields = { + __typename?: 'move_resources_variance_fields'; + transaction_version?: Maybe; +}; + +/** Boolean expression to compare columns of type "numeric". All fields are combined with logical 'AND'. */ +export type Numeric_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** column ordering options */ +export enum Order_By { + /** in ascending order, nulls last */ + Asc = 'asc', + /** in ascending order, nulls first */ + AscNullsFirst = 'asc_nulls_first', + /** in ascending order, nulls last */ + AscNullsLast = 'asc_nulls_last', + /** in descending order, nulls first */ + Desc = 'desc', + /** in descending order, nulls first */ + DescNullsFirst = 'desc_nulls_first', + /** in descending order, nulls last */ + DescNullsLast = 'desc_nulls_last' +} + +/** columns and relationships of "processor_status" */ +export type Processor_Status = { + __typename?: 'processor_status'; + last_success_version: Scalars['bigint']; + processor: Scalars['String']; +}; + +/** Boolean expression to filter rows from the table "processor_status". All fields are combined with a logical 'AND'. */ +export type Processor_Status_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + last_success_version?: InputMaybe; + processor?: InputMaybe; +}; + +/** Ordering options when selecting data from "processor_status". */ +export type Processor_Status_Order_By = { + last_success_version?: InputMaybe; + processor?: InputMaybe; +}; + +/** select columns of table "processor_status" */ +export enum Processor_Status_Select_Column { + /** column name */ + LastSuccessVersion = 'last_success_version', + /** column name */ + Processor = 'processor' +} + +/** Streaming cursor of the table "processor_status" */ +export type Processor_Status_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Processor_Status_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Processor_Status_Stream_Cursor_Value_Input = { + last_success_version?: InputMaybe; + processor?: InputMaybe; +}; + +/** columns and relationships of "proposal_votes" */ +export type Proposal_Votes = { + __typename?: 'proposal_votes'; + num_votes: Scalars['numeric']; + proposal_id: Scalars['bigint']; + should_pass: Scalars['Boolean']; + staking_pool_address: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; + voter_address: Scalars['String']; +}; + +/** aggregated selection of "proposal_votes" */ +export type Proposal_Votes_Aggregate = { + __typename?: 'proposal_votes_aggregate'; + aggregate?: Maybe; + nodes: Array; +}; + +/** aggregate fields of "proposal_votes" */ +export type Proposal_Votes_Aggregate_Fields = { + __typename?: 'proposal_votes_aggregate_fields'; + avg?: Maybe; + count: Scalars['Int']; + max?: Maybe; + min?: Maybe; + stddev?: Maybe; + stddev_pop?: Maybe; + stddev_samp?: Maybe; + sum?: Maybe; + var_pop?: Maybe; + var_samp?: Maybe; + variance?: Maybe; +}; + + +/** aggregate fields of "proposal_votes" */ +export type Proposal_Votes_Aggregate_FieldsCountArgs = { + columns?: InputMaybe>; + distinct?: InputMaybe; +}; + +/** aggregate avg on columns */ +export type Proposal_Votes_Avg_Fields = { + __typename?: 'proposal_votes_avg_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** Boolean expression to filter rows from the table "proposal_votes". All fields are combined with a logical 'AND'. */ +export type Proposal_Votes_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + num_votes?: InputMaybe; + proposal_id?: InputMaybe; + should_pass?: InputMaybe; + staking_pool_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** aggregate max on columns */ +export type Proposal_Votes_Max_Fields = { + __typename?: 'proposal_votes_max_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + staking_pool_address?: Maybe; + transaction_timestamp?: Maybe; + transaction_version?: Maybe; + voter_address?: Maybe; +}; + +/** aggregate min on columns */ +export type Proposal_Votes_Min_Fields = { + __typename?: 'proposal_votes_min_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + staking_pool_address?: Maybe; + transaction_timestamp?: Maybe; + transaction_version?: Maybe; + voter_address?: Maybe; +}; + +/** Ordering options when selecting data from "proposal_votes". */ +export type Proposal_Votes_Order_By = { + num_votes?: InputMaybe; + proposal_id?: InputMaybe; + should_pass?: InputMaybe; + staking_pool_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** select columns of table "proposal_votes" */ +export enum Proposal_Votes_Select_Column { + /** column name */ + NumVotes = 'num_votes', + /** column name */ + ProposalId = 'proposal_id', + /** column name */ + ShouldPass = 'should_pass', + /** column name */ + StakingPoolAddress = 'staking_pool_address', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + VoterAddress = 'voter_address' +} + +/** aggregate stddev on columns */ +export type Proposal_Votes_Stddev_Fields = { + __typename?: 'proposal_votes_stddev_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate stddev_pop on columns */ +export type Proposal_Votes_Stddev_Pop_Fields = { + __typename?: 'proposal_votes_stddev_pop_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate stddev_samp on columns */ +export type Proposal_Votes_Stddev_Samp_Fields = { + __typename?: 'proposal_votes_stddev_samp_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** Streaming cursor of the table "proposal_votes" */ +export type Proposal_Votes_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Proposal_Votes_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Proposal_Votes_Stream_Cursor_Value_Input = { + num_votes?: InputMaybe; + proposal_id?: InputMaybe; + should_pass?: InputMaybe; + staking_pool_address?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + voter_address?: InputMaybe; +}; + +/** aggregate sum on columns */ +export type Proposal_Votes_Sum_Fields = { + __typename?: 'proposal_votes_sum_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate var_pop on columns */ +export type Proposal_Votes_Var_Pop_Fields = { + __typename?: 'proposal_votes_var_pop_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate var_samp on columns */ +export type Proposal_Votes_Var_Samp_Fields = { + __typename?: 'proposal_votes_var_samp_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate variance on columns */ +export type Proposal_Votes_Variance_Fields = { + __typename?: 'proposal_votes_variance_fields'; + num_votes?: Maybe; + proposal_id?: Maybe; + transaction_version?: Maybe; +}; + +export type Query_Root = { + __typename?: 'query_root'; + /** fetch data from the table: "address_version_from_events" */ + address_version_from_events: Array; + coin_activities: Array; + /** fetch data from the table: "coin_activities" using primary key columns */ + coin_activities_by_pk?: Maybe; + /** fetch data from the table: "coin_balances" */ + coin_balances: Array; + /** fetch data from the table: "coin_balances" using primary key columns */ + coin_balances_by_pk?: Maybe; + /** fetch data from the table: "coin_infos" */ + coin_infos: Array; + /** fetch data from the table: "coin_infos" using primary key columns */ + coin_infos_by_pk?: Maybe; + /** fetch data from the table: "coin_supply" */ + coin_supply: Array; + /** fetch data from the table: "coin_supply" using primary key columns */ + coin_supply_by_pk?: Maybe; + /** fetch data from the table: "collection_datas" */ + collection_datas: Array; + /** fetch data from the table: "collection_datas" using primary key columns */ + collection_datas_by_pk?: Maybe; + /** fetch data from the table: "current_ans_lookup" */ + current_ans_lookup: Array; + /** fetch data from the table: "current_ans_lookup" using primary key columns */ + current_ans_lookup_by_pk?: Maybe; + /** fetch data from the table: "current_coin_balances" */ + current_coin_balances: Array; + /** fetch data from the table: "current_coin_balances" using primary key columns */ + current_coin_balances_by_pk?: Maybe; + /** fetch data from the table: "current_collection_datas" */ + current_collection_datas: Array; + /** fetch data from the table: "current_collection_datas" using primary key columns */ + current_collection_datas_by_pk?: Maybe; + /** fetch data from the table: "current_collection_ownership_view" */ + current_collection_ownership_view: Array; + /** fetch data from the table: "current_delegator_balances" */ + current_delegator_balances: Array; + /** fetch aggregated fields from the table: "current_delegator_balances" */ + current_delegator_balances_aggregate: Current_Delegator_Balances_Aggregate; + /** fetch data from the table: "current_delegator_balances" using primary key columns */ + current_delegator_balances_by_pk?: Maybe; + /** fetch data from the table: "current_staking_pool_voter" */ + current_staking_pool_voter: Array; + /** fetch data from the table: "current_staking_pool_voter" using primary key columns */ + current_staking_pool_voter_by_pk?: Maybe; + /** fetch data from the table: "current_table_items" */ + current_table_items: Array; + /** fetch data from the table: "current_table_items" using primary key columns */ + current_table_items_by_pk?: Maybe; + /** fetch data from the table: "current_token_datas" */ + current_token_datas: Array; + /** fetch data from the table: "current_token_datas" using primary key columns */ + current_token_datas_by_pk?: Maybe; + /** fetch data from the table: "current_token_ownerships" */ + current_token_ownerships: Array; + /** fetch aggregated fields from the table: "current_token_ownerships" */ + current_token_ownerships_aggregate: Current_Token_Ownerships_Aggregate; + /** fetch data from the table: "current_token_ownerships" using primary key columns */ + current_token_ownerships_by_pk?: Maybe; + /** fetch data from the table: "current_token_pending_claims" */ + current_token_pending_claims: Array; + /** fetch data from the table: "current_token_pending_claims" using primary key columns */ + current_token_pending_claims_by_pk?: Maybe; + /** fetch data from the table: "delegated_staking_activities" */ + delegated_staking_activities: Array; + /** fetch data from the table: "delegated_staking_activities" using primary key columns */ + delegated_staking_activities_by_pk?: Maybe; + /** fetch data from the table: "events" */ + events: Array; + /** fetch data from the table: "events" using primary key columns */ + events_by_pk?: Maybe; + /** fetch data from the table: "indexer_status" */ + indexer_status: Array; + /** fetch data from the table: "indexer_status" using primary key columns */ + indexer_status_by_pk?: Maybe; + /** fetch data from the table: "ledger_infos" */ + ledger_infos: Array; + /** fetch data from the table: "ledger_infos" using primary key columns */ + ledger_infos_by_pk?: Maybe; + /** fetch data from the table: "move_resources" */ + move_resources: Array; + /** fetch aggregated fields from the table: "move_resources" */ + move_resources_aggregate: Move_Resources_Aggregate; + /** fetch data from the table: "processor_status" */ + processor_status: Array; + /** fetch data from the table: "processor_status" using primary key columns */ + processor_status_by_pk?: Maybe; + /** fetch data from the table: "proposal_votes" */ + proposal_votes: Array; + /** fetch aggregated fields from the table: "proposal_votes" */ + proposal_votes_aggregate: Proposal_Votes_Aggregate; + /** fetch data from the table: "proposal_votes" using primary key columns */ + proposal_votes_by_pk?: Maybe; + /** fetch data from the table: "table_items" */ + table_items: Array; + /** fetch data from the table: "table_items" using primary key columns */ + table_items_by_pk?: Maybe; + /** fetch data from the table: "table_metadatas" */ + table_metadatas: Array; + /** fetch data from the table: "table_metadatas" using primary key columns */ + table_metadatas_by_pk?: Maybe; + token_activities: Array; + token_activities_aggregate: Token_Activities_Aggregate; + /** fetch data from the table: "token_activities" using primary key columns */ + token_activities_by_pk?: Maybe; + /** fetch data from the table: "token_datas" */ + token_datas: Array; + /** fetch data from the table: "token_datas" using primary key columns */ + token_datas_by_pk?: Maybe; + /** fetch data from the table: "token_ownerships" */ + token_ownerships: Array; + /** fetch data from the table: "token_ownerships" using primary key columns */ + token_ownerships_by_pk?: Maybe; + /** fetch data from the table: "tokens" */ + tokens: Array; + /** fetch data from the table: "tokens" using primary key columns */ + tokens_by_pk?: Maybe; + /** fetch data from the table: "user_transactions" */ + user_transactions: Array; + /** fetch data from the table: "user_transactions" using primary key columns */ + user_transactions_by_pk?: Maybe; +}; + + +export type Query_RootAddress_Version_From_EventsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCoin_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCoin_Activities_By_PkArgs = { + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_sequence_number: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootCoin_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCoin_Balances_By_PkArgs = { + coin_type_hash: Scalars['String']; + owner_address: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootCoin_InfosArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCoin_Infos_By_PkArgs = { + coin_type_hash: Scalars['String']; +}; + + +export type Query_RootCoin_SupplyArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCoin_Supply_By_PkArgs = { + coin_type_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootCollection_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCollection_Datas_By_PkArgs = { + collection_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootCurrent_Ans_LookupArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Ans_Lookup_By_PkArgs = { + domain: Scalars['String']; + subdomain: Scalars['String']; +}; + + +export type Query_RootCurrent_Coin_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Coin_Balances_By_PkArgs = { + coin_type_hash: Scalars['String']; + owner_address: Scalars['String']; +}; + + +export type Query_RootCurrent_Collection_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Collection_Datas_By_PkArgs = { + collection_data_id_hash: Scalars['String']; +}; + + +export type Query_RootCurrent_Collection_Ownership_ViewArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Delegator_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Delegator_Balances_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Delegator_Balances_By_PkArgs = { + delegator_address: Scalars['String']; + pool_address: Scalars['String']; + pool_type: Scalars['String']; +}; + + +export type Query_RootCurrent_Staking_Pool_VoterArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Staking_Pool_Voter_By_PkArgs = { + staking_pool_address: Scalars['String']; +}; + + +export type Query_RootCurrent_Table_ItemsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Table_Items_By_PkArgs = { + key_hash: Scalars['String']; + table_handle: Scalars['String']; +}; + + +export type Query_RootCurrent_Token_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Token_Datas_By_PkArgs = { + token_data_id_hash: Scalars['String']; +}; + + +export type Query_RootCurrent_Token_OwnershipsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Token_Ownerships_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Token_Ownerships_By_PkArgs = { + owner_address: Scalars['String']; + property_version: Scalars['numeric']; + token_data_id_hash: Scalars['String']; +}; + + +export type Query_RootCurrent_Token_Pending_ClaimsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootCurrent_Token_Pending_Claims_By_PkArgs = { + from_address: Scalars['String']; + property_version: Scalars['numeric']; + to_address: Scalars['String']; + token_data_id_hash: Scalars['String']; +}; + + +export type Query_RootDelegated_Staking_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootDelegated_Staking_Activities_By_PkArgs = { + event_index: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootEventsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootEvents_By_PkArgs = { + account_address: Scalars['String']; + creation_number: Scalars['bigint']; + sequence_number: Scalars['bigint']; +}; + + +export type Query_RootIndexer_StatusArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootIndexer_Status_By_PkArgs = { + db: Scalars['String']; +}; + + +export type Query_RootLedger_InfosArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootLedger_Infos_By_PkArgs = { + chain_id: Scalars['bigint']; +}; + + +export type Query_RootMove_ResourcesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootMove_Resources_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootProcessor_StatusArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootProcessor_Status_By_PkArgs = { + processor: Scalars['String']; +}; + + +export type Query_RootProposal_VotesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootProposal_Votes_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootProposal_Votes_By_PkArgs = { + proposal_id: Scalars['bigint']; + transaction_version: Scalars['bigint']; + voter_address: Scalars['String']; +}; + + +export type Query_RootTable_ItemsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootTable_Items_By_PkArgs = { + transaction_version: Scalars['bigint']; + write_set_change_index: Scalars['bigint']; +}; + + +export type Query_RootTable_MetadatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootTable_Metadatas_By_PkArgs = { + handle: Scalars['String']; +}; + + +export type Query_RootToken_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootToken_Activities_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootToken_Activities_By_PkArgs = { + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_sequence_number: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootToken_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootToken_Datas_By_PkArgs = { + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootToken_OwnershipsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootToken_Ownerships_By_PkArgs = { + property_version: Scalars['numeric']; + table_handle: Scalars['String']; + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootTokensArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootTokens_By_PkArgs = { + property_version: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Query_RootUser_TransactionsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Query_RootUser_Transactions_By_PkArgs = { + version: Scalars['bigint']; +}; + +export type Subscription_Root = { + __typename?: 'subscription_root'; + /** fetch data from the table: "address_version_from_events" */ + address_version_from_events: Array; + /** fetch data from the table in a streaming manner : "address_version_from_events" */ + address_version_from_events_stream: Array; + coin_activities: Array; + /** fetch data from the table: "coin_activities" using primary key columns */ + coin_activities_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "coin_activities" */ + coin_activities_stream: Array; + /** fetch data from the table: "coin_balances" */ + coin_balances: Array; + /** fetch data from the table: "coin_balances" using primary key columns */ + coin_balances_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "coin_balances" */ + coin_balances_stream: Array; + /** fetch data from the table: "coin_infos" */ + coin_infos: Array; + /** fetch data from the table: "coin_infos" using primary key columns */ + coin_infos_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "coin_infos" */ + coin_infos_stream: Array; + /** fetch data from the table: "coin_supply" */ + coin_supply: Array; + /** fetch data from the table: "coin_supply" using primary key columns */ + coin_supply_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "coin_supply" */ + coin_supply_stream: Array; + /** fetch data from the table: "collection_datas" */ + collection_datas: Array; + /** fetch data from the table: "collection_datas" using primary key columns */ + collection_datas_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "collection_datas" */ + collection_datas_stream: Array; + /** fetch data from the table: "current_ans_lookup" */ + current_ans_lookup: Array; + /** fetch data from the table: "current_ans_lookup" using primary key columns */ + current_ans_lookup_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_ans_lookup" */ + current_ans_lookup_stream: Array; + /** fetch data from the table: "current_coin_balances" */ + current_coin_balances: Array; + /** fetch data from the table: "current_coin_balances" using primary key columns */ + current_coin_balances_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_coin_balances" */ + current_coin_balances_stream: Array; + /** fetch data from the table: "current_collection_datas" */ + current_collection_datas: Array; + /** fetch data from the table: "current_collection_datas" using primary key columns */ + current_collection_datas_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_collection_datas" */ + current_collection_datas_stream: Array; + /** fetch data from the table: "current_collection_ownership_view" */ + current_collection_ownership_view: Array; + /** fetch data from the table in a streaming manner : "current_collection_ownership_view" */ + current_collection_ownership_view_stream: Array; + /** fetch data from the table: "current_delegator_balances" */ + current_delegator_balances: Array; + /** fetch aggregated fields from the table: "current_delegator_balances" */ + current_delegator_balances_aggregate: Current_Delegator_Balances_Aggregate; + /** fetch data from the table: "current_delegator_balances" using primary key columns */ + current_delegator_balances_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_delegator_balances" */ + current_delegator_balances_stream: Array; + /** fetch data from the table: "current_staking_pool_voter" */ + current_staking_pool_voter: Array; + /** fetch data from the table: "current_staking_pool_voter" using primary key columns */ + current_staking_pool_voter_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_staking_pool_voter" */ + current_staking_pool_voter_stream: Array; + /** fetch data from the table: "current_table_items" */ + current_table_items: Array; + /** fetch data from the table: "current_table_items" using primary key columns */ + current_table_items_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_table_items" */ + current_table_items_stream: Array; + /** fetch data from the table: "current_token_datas" */ + current_token_datas: Array; + /** fetch data from the table: "current_token_datas" using primary key columns */ + current_token_datas_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_token_datas" */ + current_token_datas_stream: Array; + /** fetch data from the table: "current_token_ownerships" */ + current_token_ownerships: Array; + /** fetch aggregated fields from the table: "current_token_ownerships" */ + current_token_ownerships_aggregate: Current_Token_Ownerships_Aggregate; + /** fetch data from the table: "current_token_ownerships" using primary key columns */ + current_token_ownerships_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_token_ownerships" */ + current_token_ownerships_stream: Array; + /** fetch data from the table: "current_token_pending_claims" */ + current_token_pending_claims: Array; + /** fetch data from the table: "current_token_pending_claims" using primary key columns */ + current_token_pending_claims_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "current_token_pending_claims" */ + current_token_pending_claims_stream: Array; + /** fetch data from the table: "delegated_staking_activities" */ + delegated_staking_activities: Array; + /** fetch data from the table: "delegated_staking_activities" using primary key columns */ + delegated_staking_activities_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "delegated_staking_activities" */ + delegated_staking_activities_stream: Array; + /** fetch data from the table: "events" */ + events: Array; + /** fetch data from the table: "events" using primary key columns */ + events_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "events" */ + events_stream: Array; + /** fetch data from the table: "indexer_status" */ + indexer_status: Array; + /** fetch data from the table: "indexer_status" using primary key columns */ + indexer_status_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "indexer_status" */ + indexer_status_stream: Array; + /** fetch data from the table: "ledger_infos" */ + ledger_infos: Array; + /** fetch data from the table: "ledger_infos" using primary key columns */ + ledger_infos_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "ledger_infos" */ + ledger_infos_stream: Array; + /** fetch data from the table: "move_resources" */ + move_resources: Array; + /** fetch aggregated fields from the table: "move_resources" */ + move_resources_aggregate: Move_Resources_Aggregate; + /** fetch data from the table in a streaming manner : "move_resources" */ + move_resources_stream: Array; + /** fetch data from the table: "processor_status" */ + processor_status: Array; + /** fetch data from the table: "processor_status" using primary key columns */ + processor_status_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "processor_status" */ + processor_status_stream: Array; + /** fetch data from the table: "proposal_votes" */ + proposal_votes: Array; + /** fetch aggregated fields from the table: "proposal_votes" */ + proposal_votes_aggregate: Proposal_Votes_Aggregate; + /** fetch data from the table: "proposal_votes" using primary key columns */ + proposal_votes_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "proposal_votes" */ + proposal_votes_stream: Array; + /** fetch data from the table: "table_items" */ + table_items: Array; + /** fetch data from the table: "table_items" using primary key columns */ + table_items_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "table_items" */ + table_items_stream: Array; + /** fetch data from the table: "table_metadatas" */ + table_metadatas: Array; + /** fetch data from the table: "table_metadatas" using primary key columns */ + table_metadatas_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "table_metadatas" */ + table_metadatas_stream: Array; + token_activities: Array; + token_activities_aggregate: Token_Activities_Aggregate; + /** fetch data from the table: "token_activities" using primary key columns */ + token_activities_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "token_activities" */ + token_activities_stream: Array; + /** fetch data from the table: "token_datas" */ + token_datas: Array; + /** fetch data from the table: "token_datas" using primary key columns */ + token_datas_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "token_datas" */ + token_datas_stream: Array; + /** fetch data from the table: "token_ownerships" */ + token_ownerships: Array; + /** fetch data from the table: "token_ownerships" using primary key columns */ + token_ownerships_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "token_ownerships" */ + token_ownerships_stream: Array; + /** fetch data from the table: "tokens" */ + tokens: Array; + /** fetch data from the table: "tokens" using primary key columns */ + tokens_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "tokens" */ + tokens_stream: Array; + /** fetch data from the table: "user_transactions" */ + user_transactions: Array; + /** fetch data from the table: "user_transactions" using primary key columns */ + user_transactions_by_pk?: Maybe; + /** fetch data from the table in a streaming manner : "user_transactions" */ + user_transactions_stream: Array; +}; + + +export type Subscription_RootAddress_Version_From_EventsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootAddress_Version_From_Events_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_Activities_By_PkArgs = { + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_sequence_number: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootCoin_Activities_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_Balances_By_PkArgs = { + coin_type_hash: Scalars['String']; + owner_address: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootCoin_Balances_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_InfosArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_Infos_By_PkArgs = { + coin_type_hash: Scalars['String']; +}; + + +export type Subscription_RootCoin_Infos_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_SupplyArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCoin_Supply_By_PkArgs = { + coin_type_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootCoin_Supply_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCollection_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCollection_Datas_By_PkArgs = { + collection_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootCollection_Datas_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Ans_LookupArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Ans_Lookup_By_PkArgs = { + domain: Scalars['String']; + subdomain: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Ans_Lookup_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Coin_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Coin_Balances_By_PkArgs = { + coin_type_hash: Scalars['String']; + owner_address: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Coin_Balances_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Collection_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Collection_Datas_By_PkArgs = { + collection_data_id_hash: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Collection_Datas_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Collection_Ownership_ViewArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Collection_Ownership_View_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Delegator_BalancesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Delegator_Balances_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Delegator_Balances_By_PkArgs = { + delegator_address: Scalars['String']; + pool_address: Scalars['String']; + pool_type: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Delegator_Balances_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Staking_Pool_VoterArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Staking_Pool_Voter_By_PkArgs = { + staking_pool_address: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Staking_Pool_Voter_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Table_ItemsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Table_Items_By_PkArgs = { + key_hash: Scalars['String']; + table_handle: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Table_Items_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_Datas_By_PkArgs = { + token_data_id_hash: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Token_Datas_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_OwnershipsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_Ownerships_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_Ownerships_By_PkArgs = { + owner_address: Scalars['String']; + property_version: Scalars['numeric']; + token_data_id_hash: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Token_Ownerships_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_Pending_ClaimsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootCurrent_Token_Pending_Claims_By_PkArgs = { + from_address: Scalars['String']; + property_version: Scalars['numeric']; + to_address: Scalars['String']; + token_data_id_hash: Scalars['String']; +}; + + +export type Subscription_RootCurrent_Token_Pending_Claims_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootDelegated_Staking_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootDelegated_Staking_Activities_By_PkArgs = { + event_index: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootDelegated_Staking_Activities_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootEventsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootEvents_By_PkArgs = { + account_address: Scalars['String']; + creation_number: Scalars['bigint']; + sequence_number: Scalars['bigint']; +}; + + +export type Subscription_RootEvents_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootIndexer_StatusArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootIndexer_Status_By_PkArgs = { + db: Scalars['String']; +}; + + +export type Subscription_RootIndexer_Status_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootLedger_InfosArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootLedger_Infos_By_PkArgs = { + chain_id: Scalars['bigint']; +}; + + +export type Subscription_RootLedger_Infos_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootMove_ResourcesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootMove_Resources_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootMove_Resources_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootProcessor_StatusArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootProcessor_Status_By_PkArgs = { + processor: Scalars['String']; +}; + + +export type Subscription_RootProcessor_Status_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootProposal_VotesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootProposal_Votes_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootProposal_Votes_By_PkArgs = { + proposal_id: Scalars['bigint']; + transaction_version: Scalars['bigint']; + voter_address: Scalars['String']; +}; + + +export type Subscription_RootProposal_Votes_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootTable_ItemsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootTable_Items_By_PkArgs = { + transaction_version: Scalars['bigint']; + write_set_change_index: Scalars['bigint']; +}; + + +export type Subscription_RootTable_Items_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootTable_MetadatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootTable_Metadatas_By_PkArgs = { + handle: Scalars['String']; +}; + + +export type Subscription_RootTable_Metadatas_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_ActivitiesArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_Activities_AggregateArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_Activities_By_PkArgs = { + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_sequence_number: Scalars['bigint']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootToken_Activities_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_DatasArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_Datas_By_PkArgs = { + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootToken_Datas_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_OwnershipsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootToken_Ownerships_By_PkArgs = { + property_version: Scalars['numeric']; + table_handle: Scalars['String']; + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootToken_Ownerships_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootTokensArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootTokens_By_PkArgs = { + property_version: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + transaction_version: Scalars['bigint']; +}; + + +export type Subscription_RootTokens_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + + +export type Subscription_RootUser_TransactionsArgs = { + distinct_on?: InputMaybe>; + limit?: InputMaybe; + offset?: InputMaybe; + order_by?: InputMaybe>; + where?: InputMaybe; +}; + + +export type Subscription_RootUser_Transactions_By_PkArgs = { + version: Scalars['bigint']; +}; + + +export type Subscription_RootUser_Transactions_StreamArgs = { + batch_size: Scalars['Int']; + cursor: Array>; + where?: InputMaybe; +}; + +/** columns and relationships of "table_items" */ +export type Table_Items = { + __typename?: 'table_items'; + decoded_key: Scalars['jsonb']; + decoded_value?: Maybe; + key: Scalars['String']; + table_handle: Scalars['String']; + transaction_version: Scalars['bigint']; + write_set_change_index: Scalars['bigint']; +}; + + +/** columns and relationships of "table_items" */ +export type Table_ItemsDecoded_KeyArgs = { + path?: InputMaybe; +}; + + +/** columns and relationships of "table_items" */ +export type Table_ItemsDecoded_ValueArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "table_items". All fields are combined with a logical 'AND'. */ +export type Table_Items_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + key?: InputMaybe; + table_handle?: InputMaybe; + transaction_version?: InputMaybe; + write_set_change_index?: InputMaybe; +}; + +/** Ordering options when selecting data from "table_items". */ +export type Table_Items_Order_By = { + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + key?: InputMaybe; + table_handle?: InputMaybe; + transaction_version?: InputMaybe; + write_set_change_index?: InputMaybe; +}; + +/** select columns of table "table_items" */ +export enum Table_Items_Select_Column { + /** column name */ + DecodedKey = 'decoded_key', + /** column name */ + DecodedValue = 'decoded_value', + /** column name */ + Key = 'key', + /** column name */ + TableHandle = 'table_handle', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + WriteSetChangeIndex = 'write_set_change_index' +} + +/** Streaming cursor of the table "table_items" */ +export type Table_Items_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Table_Items_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Table_Items_Stream_Cursor_Value_Input = { + decoded_key?: InputMaybe; + decoded_value?: InputMaybe; + key?: InputMaybe; + table_handle?: InputMaybe; + transaction_version?: InputMaybe; + write_set_change_index?: InputMaybe; +}; + +/** columns and relationships of "table_metadatas" */ +export type Table_Metadatas = { + __typename?: 'table_metadatas'; + handle: Scalars['String']; + key_type: Scalars['String']; + value_type: Scalars['String']; +}; + +/** Boolean expression to filter rows from the table "table_metadatas". All fields are combined with a logical 'AND'. */ +export type Table_Metadatas_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + handle?: InputMaybe; + key_type?: InputMaybe; + value_type?: InputMaybe; +}; + +/** Ordering options when selecting data from "table_metadatas". */ +export type Table_Metadatas_Order_By = { + handle?: InputMaybe; + key_type?: InputMaybe; + value_type?: InputMaybe; +}; + +/** select columns of table "table_metadatas" */ +export enum Table_Metadatas_Select_Column { + /** column name */ + Handle = 'handle', + /** column name */ + KeyType = 'key_type', + /** column name */ + ValueType = 'value_type' +} + +/** Streaming cursor of the table "table_metadatas" */ +export type Table_Metadatas_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Table_Metadatas_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Table_Metadatas_Stream_Cursor_Value_Input = { + handle?: InputMaybe; + key_type?: InputMaybe; + value_type?: InputMaybe; +}; + +/** Boolean expression to compare columns of type "timestamp". All fields are combined with logical 'AND'. */ +export type Timestamp_Comparison_Exp = { + _eq?: InputMaybe; + _gt?: InputMaybe; + _gte?: InputMaybe; + _in?: InputMaybe>; + _is_null?: InputMaybe; + _lt?: InputMaybe; + _lte?: InputMaybe; + _neq?: InputMaybe; + _nin?: InputMaybe>; +}; + +/** columns and relationships of "token_activities" */ +export type Token_Activities = { + __typename?: 'token_activities'; + coin_amount?: Maybe; + coin_type?: Maybe; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + /** An object relationship */ + current_token_data?: Maybe; + event_account_address: Scalars['String']; + event_creation_number: Scalars['bigint']; + event_index?: Maybe; + event_sequence_number: Scalars['bigint']; + from_address?: Maybe; + name: Scalars['String']; + property_version: Scalars['numeric']; + to_address?: Maybe; + token_amount: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; + transfer_type: Scalars['String']; +}; + +/** aggregated selection of "token_activities" */ +export type Token_Activities_Aggregate = { + __typename?: 'token_activities_aggregate'; + aggregate?: Maybe; + nodes: Array; +}; + +/** aggregate fields of "token_activities" */ +export type Token_Activities_Aggregate_Fields = { + __typename?: 'token_activities_aggregate_fields'; + avg?: Maybe; + count: Scalars['Int']; + max?: Maybe; + min?: Maybe; + stddev?: Maybe; + stddev_pop?: Maybe; + stddev_samp?: Maybe; + sum?: Maybe; + var_pop?: Maybe; + var_samp?: Maybe; + variance?: Maybe; +}; + + +/** aggregate fields of "token_activities" */ +export type Token_Activities_Aggregate_FieldsCountArgs = { + columns?: InputMaybe>; + distinct?: InputMaybe; +}; + +/** aggregate avg on columns */ +export type Token_Activities_Avg_Fields = { + __typename?: 'token_activities_avg_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** Boolean expression to filter rows from the table "token_activities". All fields are combined with a logical 'AND'. */ +export type Token_Activities_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + coin_amount?: InputMaybe; + coin_type?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_token_data?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + from_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + to_address?: InputMaybe; + token_amount?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + transfer_type?: InputMaybe; +}; + +/** aggregate max on columns */ +export type Token_Activities_Max_Fields = { + __typename?: 'token_activities_max_fields'; + coin_amount?: Maybe; + coin_type?: Maybe; + collection_data_id_hash?: Maybe; + collection_name?: Maybe; + creator_address?: Maybe; + event_account_address?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + from_address?: Maybe; + name?: Maybe; + property_version?: Maybe; + to_address?: Maybe; + token_amount?: Maybe; + token_data_id_hash?: Maybe; + transaction_timestamp?: Maybe; + transaction_version?: Maybe; + transfer_type?: Maybe; +}; + +/** aggregate min on columns */ +export type Token_Activities_Min_Fields = { + __typename?: 'token_activities_min_fields'; + coin_amount?: Maybe; + coin_type?: Maybe; + collection_data_id_hash?: Maybe; + collection_name?: Maybe; + creator_address?: Maybe; + event_account_address?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + from_address?: Maybe; + name?: Maybe; + property_version?: Maybe; + to_address?: Maybe; + token_amount?: Maybe; + token_data_id_hash?: Maybe; + transaction_timestamp?: Maybe; + transaction_version?: Maybe; + transfer_type?: Maybe; +}; + +/** Ordering options when selecting data from "token_activities". */ +export type Token_Activities_Order_By = { + coin_amount?: InputMaybe; + coin_type?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + current_token_data?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + from_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + to_address?: InputMaybe; + token_amount?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + transfer_type?: InputMaybe; +}; + +/** select columns of table "token_activities" */ +export enum Token_Activities_Select_Column { + /** column name */ + CoinAmount = 'coin_amount', + /** column name */ + CoinType = 'coin_type', + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + EventAccountAddress = 'event_account_address', + /** column name */ + EventCreationNumber = 'event_creation_number', + /** column name */ + EventIndex = 'event_index', + /** column name */ + EventSequenceNumber = 'event_sequence_number', + /** column name */ + FromAddress = 'from_address', + /** column name */ + Name = 'name', + /** column name */ + PropertyVersion = 'property_version', + /** column name */ + ToAddress = 'to_address', + /** column name */ + TokenAmount = 'token_amount', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + TransferType = 'transfer_type' +} + +/** aggregate stddev on columns */ +export type Token_Activities_Stddev_Fields = { + __typename?: 'token_activities_stddev_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate stddev_pop on columns */ +export type Token_Activities_Stddev_Pop_Fields = { + __typename?: 'token_activities_stddev_pop_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate stddev_samp on columns */ +export type Token_Activities_Stddev_Samp_Fields = { + __typename?: 'token_activities_stddev_samp_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** Streaming cursor of the table "token_activities" */ +export type Token_Activities_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Token_Activities_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Token_Activities_Stream_Cursor_Value_Input = { + coin_amount?: InputMaybe; + coin_type?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + event_account_address?: InputMaybe; + event_creation_number?: InputMaybe; + event_index?: InputMaybe; + event_sequence_number?: InputMaybe; + from_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + to_address?: InputMaybe; + token_amount?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + transfer_type?: InputMaybe; +}; + +/** aggregate sum on columns */ +export type Token_Activities_Sum_Fields = { + __typename?: 'token_activities_sum_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate var_pop on columns */ +export type Token_Activities_Var_Pop_Fields = { + __typename?: 'token_activities_var_pop_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate var_samp on columns */ +export type Token_Activities_Var_Samp_Fields = { + __typename?: 'token_activities_var_samp_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** aggregate variance on columns */ +export type Token_Activities_Variance_Fields = { + __typename?: 'token_activities_variance_fields'; + coin_amount?: Maybe; + event_creation_number?: Maybe; + event_index?: Maybe; + event_sequence_number?: Maybe; + property_version?: Maybe; + token_amount?: Maybe; + transaction_version?: Maybe; +}; + +/** columns and relationships of "token_datas" */ +export type Token_Datas = { + __typename?: 'token_datas'; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + default_properties: Scalars['jsonb']; + description: Scalars['String']; + description_mutable: Scalars['Boolean']; + largest_property_version: Scalars['numeric']; + maximum: Scalars['numeric']; + maximum_mutable: Scalars['Boolean']; + metadata_uri: Scalars['String']; + name: Scalars['String']; + payee_address: Scalars['String']; + properties_mutable: Scalars['Boolean']; + royalty_mutable: Scalars['Boolean']; + royalty_points_denominator: Scalars['numeric']; + royalty_points_numerator: Scalars['numeric']; + supply: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; + uri_mutable: Scalars['Boolean']; +}; + + +/** columns and relationships of "token_datas" */ +export type Token_DatasDefault_PropertiesArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "token_datas". All fields are combined with a logical 'AND'. */ +export type Token_Datas_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** Ordering options when selecting data from "token_datas". */ +export type Token_Datas_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** select columns of table "token_datas" */ +export enum Token_Datas_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + DefaultProperties = 'default_properties', + /** column name */ + Description = 'description', + /** column name */ + DescriptionMutable = 'description_mutable', + /** column name */ + LargestPropertyVersion = 'largest_property_version', + /** column name */ + Maximum = 'maximum', + /** column name */ + MaximumMutable = 'maximum_mutable', + /** column name */ + MetadataUri = 'metadata_uri', + /** column name */ + Name = 'name', + /** column name */ + PayeeAddress = 'payee_address', + /** column name */ + PropertiesMutable = 'properties_mutable', + /** column name */ + RoyaltyMutable = 'royalty_mutable', + /** column name */ + RoyaltyPointsDenominator = 'royalty_points_denominator', + /** column name */ + RoyaltyPointsNumerator = 'royalty_points_numerator', + /** column name */ + Supply = 'supply', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version', + /** column name */ + UriMutable = 'uri_mutable' +} + +/** Streaming cursor of the table "token_datas" */ +export type Token_Datas_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Token_Datas_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Token_Datas_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + default_properties?: InputMaybe; + description?: InputMaybe; + description_mutable?: InputMaybe; + largest_property_version?: InputMaybe; + maximum?: InputMaybe; + maximum_mutable?: InputMaybe; + metadata_uri?: InputMaybe; + name?: InputMaybe; + payee_address?: InputMaybe; + properties_mutable?: InputMaybe; + royalty_mutable?: InputMaybe; + royalty_points_denominator?: InputMaybe; + royalty_points_numerator?: InputMaybe; + supply?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; + uri_mutable?: InputMaybe; +}; + +/** columns and relationships of "token_ownerships" */ +export type Token_Ownerships = { + __typename?: 'token_ownerships'; + amount: Scalars['numeric']; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + name: Scalars['String']; + owner_address?: Maybe; + property_version: Scalars['numeric']; + table_handle: Scalars['String']; + table_type?: Maybe; + token_data_id_hash: Scalars['String']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "token_ownerships". All fields are combined with a logical 'AND'. */ +export type Token_Ownerships_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "token_ownerships". */ +export type Token_Ownerships_Order_By = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "token_ownerships" */ +export enum Token_Ownerships_Select_Column { + /** column name */ + Amount = 'amount', + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + Name = 'name', + /** column name */ + OwnerAddress = 'owner_address', + /** column name */ + PropertyVersion = 'property_version', + /** column name */ + TableHandle = 'table_handle', + /** column name */ + TableType = 'table_type', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "token_ownerships" */ +export type Token_Ownerships_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Token_Ownerships_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Token_Ownerships_Stream_Cursor_Value_Input = { + amount?: InputMaybe; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + owner_address?: InputMaybe; + property_version?: InputMaybe; + table_handle?: InputMaybe; + table_type?: InputMaybe; + token_data_id_hash?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "tokens" */ +export type Tokens = { + __typename?: 'tokens'; + collection_data_id_hash: Scalars['String']; + collection_name: Scalars['String']; + creator_address: Scalars['String']; + name: Scalars['String']; + property_version: Scalars['numeric']; + token_data_id_hash: Scalars['String']; + token_properties: Scalars['jsonb']; + transaction_timestamp: Scalars['timestamp']; + transaction_version: Scalars['bigint']; +}; + + +/** columns and relationships of "tokens" */ +export type TokensToken_PropertiesArgs = { + path?: InputMaybe; +}; + +/** Boolean expression to filter rows from the table "tokens". All fields are combined with a logical 'AND'. */ +export type Tokens_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** Ordering options when selecting data from "tokens". */ +export type Tokens_Order_By = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** select columns of table "tokens" */ +export enum Tokens_Select_Column { + /** column name */ + CollectionDataIdHash = 'collection_data_id_hash', + /** column name */ + CollectionName = 'collection_name', + /** column name */ + CreatorAddress = 'creator_address', + /** column name */ + Name = 'name', + /** column name */ + PropertyVersion = 'property_version', + /** column name */ + TokenDataIdHash = 'token_data_id_hash', + /** column name */ + TokenProperties = 'token_properties', + /** column name */ + TransactionTimestamp = 'transaction_timestamp', + /** column name */ + TransactionVersion = 'transaction_version' +} + +/** Streaming cursor of the table "tokens" */ +export type Tokens_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: Tokens_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type Tokens_Stream_Cursor_Value_Input = { + collection_data_id_hash?: InputMaybe; + collection_name?: InputMaybe; + creator_address?: InputMaybe; + name?: InputMaybe; + property_version?: InputMaybe; + token_data_id_hash?: InputMaybe; + token_properties?: InputMaybe; + transaction_timestamp?: InputMaybe; + transaction_version?: InputMaybe; +}; + +/** columns and relationships of "user_transactions" */ +export type User_Transactions = { + __typename?: 'user_transactions'; + block_height: Scalars['bigint']; + entry_function_id_str: Scalars['String']; + epoch: Scalars['bigint']; + expiration_timestamp_secs: Scalars['timestamp']; + gas_unit_price: Scalars['numeric']; + max_gas_amount: Scalars['numeric']; + parent_signature_type: Scalars['String']; + sender: Scalars['String']; + sequence_number: Scalars['bigint']; + timestamp: Scalars['timestamp']; + version: Scalars['bigint']; +}; + +/** Boolean expression to filter rows from the table "user_transactions". All fields are combined with a logical 'AND'. */ +export type User_Transactions_Bool_Exp = { + _and?: InputMaybe>; + _not?: InputMaybe; + _or?: InputMaybe>; + block_height?: InputMaybe; + entry_function_id_str?: InputMaybe; + epoch?: InputMaybe; + expiration_timestamp_secs?: InputMaybe; + gas_unit_price?: InputMaybe; + max_gas_amount?: InputMaybe; + parent_signature_type?: InputMaybe; + sender?: InputMaybe; + sequence_number?: InputMaybe; + timestamp?: InputMaybe; + version?: InputMaybe; +}; + +/** Ordering options when selecting data from "user_transactions". */ +export type User_Transactions_Order_By = { + block_height?: InputMaybe; + entry_function_id_str?: InputMaybe; + epoch?: InputMaybe; + expiration_timestamp_secs?: InputMaybe; + gas_unit_price?: InputMaybe; + max_gas_amount?: InputMaybe; + parent_signature_type?: InputMaybe; + sender?: InputMaybe; + sequence_number?: InputMaybe; + timestamp?: InputMaybe; + version?: InputMaybe; +}; + +/** select columns of table "user_transactions" */ +export enum User_Transactions_Select_Column { + /** column name */ + BlockHeight = 'block_height', + /** column name */ + EntryFunctionIdStr = 'entry_function_id_str', + /** column name */ + Epoch = 'epoch', + /** column name */ + ExpirationTimestampSecs = 'expiration_timestamp_secs', + /** column name */ + GasUnitPrice = 'gas_unit_price', + /** column name */ + MaxGasAmount = 'max_gas_amount', + /** column name */ + ParentSignatureType = 'parent_signature_type', + /** column name */ + Sender = 'sender', + /** column name */ + SequenceNumber = 'sequence_number', + /** column name */ + Timestamp = 'timestamp', + /** column name */ + Version = 'version' +} + +/** Streaming cursor of the table "user_transactions" */ +export type User_Transactions_Stream_Cursor_Input = { + /** Stream column input with initial value */ + initial_value: User_Transactions_Stream_Cursor_Value_Input; + /** cursor ordering */ + ordering?: InputMaybe; +}; + +/** Initial value of the column from where the streaming should start */ +export type User_Transactions_Stream_Cursor_Value_Input = { + block_height?: InputMaybe; + entry_function_id_str?: InputMaybe; + epoch?: InputMaybe; + expiration_timestamp_secs?: InputMaybe; + gas_unit_price?: InputMaybe; + max_gas_amount?: InputMaybe; + parent_signature_type?: InputMaybe; + sender?: InputMaybe; + sequence_number?: InputMaybe; + timestamp?: InputMaybe; + version?: InputMaybe; +}; diff --git a/m1/JavaScript-client/src/indexer/queries/getAccountCoinsData.graphql b/m1/JavaScript-client/src/indexer/queries/getAccountCoinsData.graphql new file mode 100644 index 00000000..15bb54c6 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getAccountCoinsData.graphql @@ -0,0 +1,11 @@ +query getAccountCoinsData($owner_address: String, $offset: Int, $limit: Int) { + current_coin_balances(where: { owner_address: { _eq: $owner_address } }, offset: $offset, limit: $limit) { + amount + coin_type + coin_info { + name + decimals + symbol + } + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getAccountCurrentTokens.graphql b/m1/JavaScript-client/src/indexer/queries/getAccountCurrentTokens.graphql new file mode 100644 index 00000000..87dc86c5 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getAccountCurrentTokens.graphql @@ -0,0 +1,38 @@ +query getAccountCurrentTokens($address: String!, $offset: Int, $limit: Int) { + current_token_ownerships( + where: { owner_address: { _eq: $address }, amount: { _gt: 0 } } + order_by: [{ last_transaction_version: desc }, { creator_address: asc }, { collection_name: asc }, { name: asc }] + offset: $offset + limit: $limit + ) { + amount + current_token_data { + ...TokenDataFields + } + current_collection_data { + ...CollectionDataFields + } + last_transaction_version + property_version + } +} + +fragment TokenDataFields on current_token_datas { + creator_address + collection_name + description + metadata_uri + name + token_data_id_hash + collection_data_id_hash +} + +fragment CollectionDataFields on current_collection_datas { + metadata_uri + supply + description + collection_name + collection_data_id_hash + table_handle + creator_address +} diff --git a/m1/JavaScript-client/src/indexer/queries/getAccountTokensCount.graphql b/m1/JavaScript-client/src/indexer/queries/getAccountTokensCount.graphql new file mode 100644 index 00000000..0ce7eb9d --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getAccountTokensCount.graphql @@ -0,0 +1,7 @@ +query getAccountTokensCount($owner_address: String) { + current_token_ownerships_aggregate(where: { owner_address: { _eq: $owner_address }, amount: { _gt: "0" } }) { + aggregate { + count + } + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsCount.graphql b/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsCount.graphql new file mode 100644 index 00000000..e27c6e7a --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsCount.graphql @@ -0,0 +1,7 @@ +query getAccountTransactionsCount($address: String) { + move_resources_aggregate(where: { address: { _eq: $address } }, distinct_on: transaction_version) { + aggregate { + count + } + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsData.graphql b/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsData.graphql new file mode 100644 index 00000000..3a9ac0ce --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getAccountTransactionsData.graphql @@ -0,0 +1,11 @@ +query getAccountTransactionsData($address: String, $limit: Int, $offset: Int) { + move_resources( + where: { address: { _eq: $address } } + order_by: { transaction_version: desc } + distinct_on: transaction_version + limit: $limit + offset: $offset + ) { + transaction_version + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getCurrentDelegatorBalancesCount.graphql b/m1/JavaScript-client/src/indexer/queries/getCurrentDelegatorBalancesCount.graphql new file mode 100644 index 00000000..a07f051e --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getCurrentDelegatorBalancesCount.graphql @@ -0,0 +1,10 @@ +query getCurrentDelegatorBalancesCount($poolAddress: String) { + current_delegator_balances_aggregate( + where: { pool_type: { _eq: "active_shares" }, pool_address: { _eq: $poolAddress }, amount: { _gt: "0" } } + distinct_on: delegator_address + ) { + aggregate { + count + } + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getDelegatedStakingActivities.graphql b/m1/JavaScript-client/src/indexer/queries/getDelegatedStakingActivities.graphql new file mode 100644 index 00000000..70083e24 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getDelegatedStakingActivities.graphql @@ -0,0 +1,12 @@ +query getDelegatedStakingActivities($delegatorAddress: String, $poolAddress: String) { + delegated_staking_activities( + where: { delegator_address: { _eq: $delegatorAddress }, pool_address: { _eq: $poolAddress } } + ) { + amount + delegator_address + event_index + event_type + pool_address + transaction_version + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getLedgerInfo.graphql b/m1/JavaScript-client/src/indexer/queries/getLedgerInfo.graphql new file mode 100644 index 00000000..c4c6eb47 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getLedgerInfo.graphql @@ -0,0 +1,5 @@ +query getIndexerLedgerInfo { + ledger_infos { + chain_id + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getTokenActivities.graphql b/m1/JavaScript-client/src/indexer/queries/getTokenActivities.graphql new file mode 100644 index 00000000..ab7af888 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getTokenActivities.graphql @@ -0,0 +1,22 @@ +query getTokenActivities($idHash: String!, $offset: Int, $limit: Int) { + token_activities( + where: { token_data_id_hash: { _eq: $idHash } } + order_by: { transaction_version: desc } + offset: $offset + limit: $limit + ) { + creator_address + collection_name + name + token_data_id_hash + collection_data_id_hash + from_address + to_address + transaction_version + transaction_timestamp + property_version + transfer_type + event_sequence_number + token_amount + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getTokenActivitiesCount.graphql b/m1/JavaScript-client/src/indexer/queries/getTokenActivitiesCount.graphql new file mode 100644 index 00000000..6f8d2e99 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getTokenActivitiesCount.graphql @@ -0,0 +1,7 @@ +query getTokenActivitiesCount($token_id: String) { + token_activities_aggregate(where: { token_data_id_hash: { _eq: $token_id } }) { + aggregate { + count + } + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getTokenData.graphql b/m1/JavaScript-client/src/indexer/queries/getTokenData.graphql new file mode 100644 index 00000000..3fec6d92 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getTokenData.graphql @@ -0,0 +1,16 @@ +query getTokenData($token_id: String) { + current_token_datas(where: { token_data_id_hash: { _eq: $token_id } }) { + token_data_id_hash + name + collection_name + creator_address + default_properties + largest_property_version + maximum + metadata_uri + payee_address + royalty_points_denominator + royalty_points_numerator + supply + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getTokenOwnersData.graphql b/m1/JavaScript-client/src/indexer/queries/getTokenOwnersData.graphql new file mode 100644 index 00000000..3dcc3dec --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getTokenOwnersData.graphql @@ -0,0 +1,7 @@ +query getTokenOwnersData($token_id: String, $property_version: numeric) { + current_token_ownerships( + where: { token_data_id_hash: { _eq: $token_id }, property_version: { _eq: $property_version } } + ) { + owner_address + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getTopUserTransactions.graphql b/m1/JavaScript-client/src/indexer/queries/getTopUserTransactions.graphql new file mode 100644 index 00000000..b93cbc85 --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getTopUserTransactions.graphql @@ -0,0 +1,5 @@ +query getTopUserTransactions($limit: Int) { + user_transactions(limit: $limit, order_by: { version: desc }) { + version + } +} diff --git a/m1/JavaScript-client/src/indexer/queries/getUserTransactions.graphql b/m1/JavaScript-client/src/indexer/queries/getUserTransactions.graphql new file mode 100644 index 00000000..529f43ac --- /dev/null +++ b/m1/JavaScript-client/src/indexer/queries/getUserTransactions.graphql @@ -0,0 +1,10 @@ +query getUserTransactions($limit: Int, $start_version: bigint, $offset: Int) { + user_transactions( + limit: $limit + order_by: { version: desc } + where: { version: { _lte: $start_version } } + offset: $offset + ) { + version + } +} diff --git a/m1/JavaScript-client/src/plugins/ans_client.ts b/m1/JavaScript-client/src/plugins/ans_client.ts new file mode 100644 index 00000000..7fe21411 --- /dev/null +++ b/m1/JavaScript-client/src/plugins/ans_client.ts @@ -0,0 +1,232 @@ +import { AptosClient, ApiError, Provider, OptionalTransactionArgs } from "../providers"; +import * as Gen from "../generated/index"; +import { AptosAccount } from "../account"; +import { TransactionBuilderRemoteABI } from "../transaction_builder"; + +const ansContractsMap: Record = { + testnet: "0x5f8fd2347449685cf41d4db97926ec3a096eaf381332be4f1318ad4d16a8497c", + mainnet: "0x867ed1f6bf916171b1de3ee92849b8978b7d1b9e0a8cc982a3d19d535dfd9c0c", +}; + +// Each name component can only have lowercase letters, number or hyphens, and cannot start or end with a hyphen. +const nameComponentPattern = /^[a-z\d][a-z\d-]{1,61}[a-z\d]$/; + +const namePattern = new RegExp( + "^" + + // Optional subdomain (cannot be followed by .apt) + "(?:(?[^.]+)\\.(?!apt$))?" + + // Domain + "(?[^.]+)" + + // Optional .apt suffix + "(?:\\.apt)?" + + "$", +); + +type ReverseLookupRegistryV1 = { + registry: { + handle: string; + }; +}; + +type NameRegistryV1 = { + registry: { + handle: string; + }; +}; + +export class AnsClient { + contractAddress: string; + + provider: Provider; + + /** + * Creates new AnsClient instance + * @param provider Provider instance + * @param contractAddress An optional contract address. + * If there is no contract address matching to the provided network + * then the AnsClient class expects a contract address - + * this is to support both mainnet/testnet networks and local development. + */ + constructor(provider: Provider, contractAddress?: string) { + this.provider = provider; + if (!ansContractsMap[this.provider.network] && !contractAddress) { + throw new Error("Error: For custom providers, you must pass in a contract address"); + } + this.contractAddress = ansContractsMap[this.provider.network] ?? contractAddress; + } + + /** + * Returns the primary name for the given account address + * @param address An account address + * @returns Account's primary name | null if there is no primary name defined + */ + async getPrimaryNameByAddress(address: string): Promise { + const ansResource: Gen.MoveResource = await this.provider.getAccountResource( + this.contractAddress, + `${this.contractAddress}::domains::ReverseLookupRegistryV1`, + ); + const data = ansResource.data as ReverseLookupRegistryV1; + const { handle } = data.registry; + const domainsTableItemRequest = { + key_type: "address", + value_type: `${this.contractAddress}::domains::NameRecordKeyV1`, + key: address, + }; + try { + const item = await this.provider.getTableItem(handle, domainsTableItemRequest); + return item.subdomain_name.vec[0] ? `${item.subdomain_name.vec[0]}.${item.domain_name}` : item.domain_name; + } catch (error: any) { + // if item not found, response is 404 error - meaning item not found + if (error.status === 404) { + return null; + } + throw new Error(error); + } + } + + /** + * Returns the target account address for the given name + * @param name ANS name + * @returns Account address | null + */ + async getAddressByName(name: string): Promise { + const { domain, subdomain } = name.match(namePattern)?.groups ?? {}; + if (!domain) return null; + if (subdomain) return this.getAddressBySubdomainName(domain, subdomain); + return this.getAddressByDomainName(domain); + } + + /** + * Mint a new Aptos name + * + * @param account AptosAccount where collection will be created + * @param domainName Aptos domain name to mint + * @param years year duration of the domain name + * @returns The hash of the pending transaction submitted to the API + */ + async mintAptosName( + account: AptosAccount, + domainName: string, + years: number = 1, + extraArgs?: OptionalTransactionArgs, + ): Promise { + // check if the name is valid + if (domainName.match(nameComponentPattern) === null) { + throw new ApiError(400, `Name ${domainName} is not valid`); + } + // check if the name is available + const address = await this.getAddressByName(domainName); + if (address !== null) { + throw new ApiError(400, `Name ${domainName} is not available`); + } + + const builder = new TransactionBuilderRemoteABI(this.provider.aptosClient, { + sender: account.address(), + ...extraArgs, + }); + const rawTxn = await builder.build(`${this.contractAddress}::domains::register_domain`, [], [domainName, years]); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.provider.submitSignedBCSTransaction(bcsTxn); + + return pendingTransaction.hash; + } + + /** + * Initialize reverse lookup for contract owner + * + * @param owner the `aptos_names` AptosAccount + * @returns The hash of the pending transaction submitted to the API + */ + async initReverseLookupRegistry(owner: AptosAccount, extraArgs?: OptionalTransactionArgs): Promise { + const builder = new TransactionBuilderRemoteABI(this.provider.aptosClient, { + sender: owner.address(), + ...extraArgs, + }); + const rawTxn = await builder.build(`${this.contractAddress}::domains::init_reverse_lookup_registry_v1`, [], []); + + const bcsTxn = AptosClient.generateBCSTransaction(owner, rawTxn); + const pendingTransaction = await this.provider.submitSignedBCSTransaction(bcsTxn); + + return pendingTransaction.hash; + } + + /** + * Returns the account address for the given domain name + * @param domain domain name + * @example + * if name is `aptos.apt` + * domain = aptos + * + * @returns account address | null + */ + private async getAddressByDomainName(domain: string) { + if (domain.match(nameComponentPattern) === null) return null; + const ansResource: { type: Gen.MoveStructTag; data: any } = await this.provider.getAccountResource( + this.contractAddress, + `${this.contractAddress}::domains::NameRegistryV1`, + ); + const data = ansResource.data as NameRegistryV1; + const { handle } = data.registry; + const domainsTableItemRequest = { + key_type: `${this.contractAddress}::domains::NameRecordKeyV1`, + value_type: `${this.contractAddress}::domains::NameRecordV1`, + key: { + subdomain_name: { vec: [] }, + domain_name: domain, + }, + }; + + try { + const item = await this.provider.getTableItem(handle, domainsTableItemRequest); + return item.target_address.vec[0]; + } catch (error: any) { + // if item not found, response is 404 error - meaning item not found + if (error.status === 404) { + return null; + } + throw new Error(error); + } + } + + /** + * Returns the account address for the given subdomain_name + * @param domain domain name + * @param subdomain subdomain name + * @example + * if name is `dev.aptos.apt` + * domain = aptos + * subdomain = dev + * + * @returns account address | null + */ + private async getAddressBySubdomainName(domain: string, subdomain: string): Promise { + if (domain.match(nameComponentPattern) === null) return null; + if (subdomain.match(nameComponentPattern) === null) return null; + const ansResource: { type: Gen.MoveStructTag; data: any } = await this.provider.getAccountResource( + this.contractAddress, + `${this.contractAddress}::domains::NameRegistryV1`, + ); + const data = ansResource.data as NameRegistryV1; + const { handle } = data.registry; + const domainsTableItemRequest = { + key_type: `${this.contractAddress}::domains::NameRecordKeyV1`, + value_type: `${this.contractAddress}::domains::NameRecordV1`, + key: { + subdomain_name: { vec: [subdomain] }, + domain_name: domain, + }, + }; + + try { + const item = await this.provider.getTableItem(handle, domainsTableItemRequest); + return item.target_address.vec[0]; + } catch (error: any) { + // if item not found, response is 404 error - meaning item not found + if (error.status === 404) { + return null; + } + throw new Error(error); + } + } +} diff --git a/m1/JavaScript-client/src/plugins/aptos_token.ts b/m1/JavaScript-client/src/plugins/aptos_token.ts new file mode 100644 index 00000000..5d98bb8a --- /dev/null +++ b/m1/JavaScript-client/src/plugins/aptos_token.ts @@ -0,0 +1,490 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable max-len */ + +import { AptosAccount } from "../account/aptos_account"; +import { AnyNumber } from "../bcs"; +import { MAX_U64_BIG_INT } from "../bcs/consts"; +import { Provider } from "../providers"; +import { AptosClient, OptionalTransactionArgs } from "../providers/aptos_client"; +import { TransactionBuilderRemoteABI } from "../transaction_builder"; +import { HexString, MaybeHexString } from "../utils"; +import { getPropertyValueRaw, getSinglePropertyValueRaw } from "../utils/property_map_serde"; + +export interface CreateCollectionOptions { + royaltyNumerator?: number; + royaltyDenominator?: number; + mutableDescription?: boolean; + mutableRoyalty?: boolean; + mutableURI?: boolean; + mutableTokenDescription?: boolean; + mutableTokenName?: boolean; + mutableTokenProperties?: boolean; + mutableTokenURI?: boolean; + tokensBurnableByCreator?: boolean; + tokensFreezableByCreator?: boolean; +} + +const PropertyTypeMap = { + BOOLEAN: "bool", + U8: "u8", + U16: "u16", + U32: "u32", + U64: "u64", + U128: "u128", + U256: "u256", + ADDRESS: "address", + VECTOR: "vector", + STRING: "string", +}; + +export type PropertyType = keyof typeof PropertyTypeMap; + +/** + * Class for managing aptos_token + */ +export class AptosToken { + readonly provider: Provider; + + private readonly tokenType: string = "0x4::token::Token"; + + /** + * Creates new AptosToken instance + * + * @param provider Provider instance + */ + constructor(provider: Provider) { + this.provider = provider; + } + + private async submitTransaction( + account: AptosAccount, + funcName: string, + typeArgs: string[], + args: any[], + extraArgs?: OptionalTransactionArgs, + ) { + const builder = new TransactionBuilderRemoteABI(this.provider, { + sender: account.address(), + ...extraArgs, + }); + const rawTxn = await builder.build(`0x4::aptos_token::${funcName}`, typeArgs, args); + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.provider.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Creates a new collection within the specified account + * + * @param creator AptosAccount where collection will be created + * @param description Collection description + * @param name Collection name + * @param uri URL to additional info about collection + * @param options CreateCollectionOptions type. By default all values set to `true` or `0` + * @returns The hash of the transaction submitted to the API + */ + async createCollection( + creator: AptosAccount, + description: string, + name: string, + uri: string, + maxSupply: AnyNumber = MAX_U64_BIG_INT, + options?: CreateCollectionOptions, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "create_collection", + [], + [ + description, + maxSupply, + name, + uri, + options?.mutableDescription ?? true, + options?.mutableRoyalty ?? true, + options?.mutableURI ?? true, + options?.mutableTokenDescription ?? true, + options?.mutableTokenName ?? true, + options?.mutableTokenProperties ?? true, + options?.mutableTokenURI ?? true, + options?.tokensBurnableByCreator ?? true, + options?.tokensFreezableByCreator ?? true, + options?.royaltyNumerator ?? 0, + options?.royaltyDenominator ?? 0, + ], + extraArgs, + ); + } + + /** + * Mint a new token within the specified account + * + * @param account AptosAccount where token will be created + * @param collection Name of collection, that token belongs to + * @param description Token description + * @param name Token name + * @param uri URL to additional info about token + * @param propertyKeys the property keys for storing on-chain properties + * @param propertyTypes the type of property values + * @param propertyValues the property values to be stored on-chain + * @returns The hash of the transaction submitted to the API + */ + async mint( + account: AptosAccount, + collection: string, + description: string, + name: string, + uri: string, + propertyKeys: Array = [], + propertyTypes: Array = [], + propertyValues: Array = [], + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + account, + "mint", + [], + [ + collection, + description, + name, + uri, + propertyKeys, + propertyTypes, + getPropertyValueRaw(propertyValues, propertyTypes), + ], + extraArgs, + ); + } + + /** + * Mint a soul bound token into a recipient's account + * + * @param account AptosAccount that mints the token + * @param collection Name of collection, that token belongs to + * @param description Token description + * @param name Token name + * @param uri URL to additional info about token + * @param recipient AptosAccount where token will be created + * @param propertyKeys the property keys for storing on-chain properties + * @param propertyTypes the type of property values + * @param propertyValues the property values to be stored on-chain + * @returns The hash of the transaction submitted to the API + */ + async mintSoulBound( + account: AptosAccount, + collection: string, + description: string, + name: string, + uri: string, + recipient: AptosAccount, + propertyKeys: Array = [], + propertyTypes: Array = [], + propertyValues: Array = [], + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + account, + "mint_soul_bound", + [], + [ + collection, + description, + name, + uri, + propertyKeys, + propertyTypes, + getPropertyValueRaw(propertyValues, propertyTypes), + recipient.address().hex(), + ], + extraArgs, + ); + } + + /** + * Burn a token by its creator + * @param creator Creator account + * @param token Token address + * @returns The hash of the transaction submitted to the API + */ + async burnToken( + creator: AptosAccount, + token: MaybeHexString, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "burn", + [tokenType || this.tokenType], + [HexString.ensure(token).hex()], + extraArgs, + ); + } + + /** + * Freeze token transfer ability + * @param creator Creator account + * @param token Token address + * @returns The hash of the transaction submitted to the API + */ + async freezeTokenTransafer( + creator: AptosAccount, + token: MaybeHexString, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "freeze_transfer", + [tokenType || this.tokenType], + [HexString.ensure(token).hex()], + extraArgs, + ); + } + + /** + * Unfreeze token transfer ability + * @param creator Creator account + * @param token Token address + * @returns The hash of the transaction submitted to the API + */ + async unfreezeTokenTransafer( + creator: AptosAccount, + token: MaybeHexString, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "unfreeze_transfer", + [tokenType || this.tokenType], + [HexString.ensure(token).hex()], + extraArgs, + ); + } + + /** + * Set token description + * @param creator Creator account + * @param token Token address + * @param description Token description + * @returns The hash of the transaction submitted to the API + */ + async setTokenDescription( + creator: AptosAccount, + token: MaybeHexString, + description: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "set_description", + [tokenType || this.tokenType], + [HexString.ensure(token).hex(), description], + extraArgs, + ); + } + + /** + * Set token name + * @param creator Creator account + * @param token Token address + * @param name Token name + * @returns The hash of the transaction submitted to the API + */ + async setTokenName( + creator: AptosAccount, + token: MaybeHexString, + name: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "set_name", + [tokenType || this.tokenType], + [HexString.ensure(token).hex(), name], + extraArgs, + ); + } + + /** + * Set token URI + * @param creator Creator account + * @param token Token address + * @param uri Token uri + * @returns The hash of the transaction submitted to the API + */ + async setTokenURI( + creator: AptosAccount, + token: MaybeHexString, + uri: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "set_uri", + [tokenType || this.tokenType], + [HexString.ensure(token).hex(), uri], + extraArgs, + ); + } + + /** + * Add token property + * @param creator Creator account + * @param token Token address + * @param key the property key for storing on-chain property + * @param type the type of property value + * @param value the property value to be stored on-chain + * @returns The hash of the transaction submitted to the API + */ + async addTokenProperty( + creator: AptosAccount, + token: MaybeHexString, + propertyKey: string, + propertyType: PropertyType, + propertyValue: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "add_property", + [tokenType || this.tokenType], + [ + HexString.ensure(token).hex(), + propertyKey, + PropertyTypeMap[propertyType], + getSinglePropertyValueRaw(propertyValue, PropertyTypeMap[propertyType]), + ], + extraArgs, + ); + } + + /** + * Remove token property + * @param creator Creator account + * @param token Token address + * @param key the property key stored on-chain + * @returns The hash of the transaction submitted to the API + */ + async removeTokenProperty( + creator: AptosAccount, + token: MaybeHexString, + propertyKey: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "remove_property", + [tokenType || this.tokenType], + [HexString.ensure(token).hex(), propertyKey], + extraArgs, + ); + } + + /** + * Update token property + * @param creator Creator account + * @param token Token address + * @param key the property key stored on-chain + * @param type the property typed stored on-chain + * @param value the property value to be stored on-chain + * @returns The hash of the transaction submitted to the API + */ + async updateTokenProperty( + creator: AptosAccount, + token: MaybeHexString, + propertyKey: string, + propertyType: PropertyType, + propertyValue: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + return this.submitTransaction( + creator, + "update_property", + [tokenType || this.tokenType], + [ + HexString.ensure(token).hex(), + propertyKey, + PropertyTypeMap[propertyType], + getSinglePropertyValueRaw(propertyValue, PropertyTypeMap[propertyType]), + ], + extraArgs, + ); + } + + async addTypedProperty( + creator: AptosAccount, + token: MaybeHexString, + propertyKey: string, + propertyType: PropertyType, + propertyValue: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ) { + return this.submitTransaction( + creator, + "add_typed_property", + [tokenType || this.tokenType, PropertyTypeMap[propertyType]], + [HexString.ensure(token).hex(), propertyKey, propertyValue], + extraArgs, + ); + } + + async updateTypedProperty( + creator: AptosAccount, + token: MaybeHexString, + propertyKey: string, + propertyType: PropertyType, + propertyValue: string, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ) { + return this.submitTransaction( + creator, + "update_typed_property", + [tokenType || this.tokenType, PropertyTypeMap[propertyType]], + [HexString.ensure(token).hex(), propertyKey, propertyValue], + extraArgs, + ); + } + + /** + * Transfer a token ownership. + * We can transfer a token only when the token is not frozen (i.e. owner transfer is not disabled such as for soul bound tokens) + * @param owner The account of the current token owner + * @param token Token address + * @param recipient Recipient address + * @returns The hash of the transaction submitted to the API + */ + async transferTokenOwnership( + owner: AptosAccount, + token: MaybeHexString, + recipient: MaybeHexString, + tokenType?: string, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.provider.aptosClient, { + sender: owner.address(), + ...extraArgs, + }); + const rawTxn = await builder.build( + "0x1::object::transfer", + [tokenType || this.tokenType], + [HexString.ensure(token).hex(), HexString.ensure(recipient).hex()], + ); + const bcsTxn = AptosClient.generateBCSTransaction(owner, rawTxn); + const pendingTransaction = await this.provider.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } +} diff --git a/m1/JavaScript-client/src/plugins/coin_client.ts b/m1/JavaScript-client/src/plugins/coin_client.ts new file mode 100644 index 00000000..8eff1e09 --- /dev/null +++ b/m1/JavaScript-client/src/plugins/coin_client.ts @@ -0,0 +1,99 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosAccount, getAddressFromAccountOrAddress } from "../account/aptos_account"; +import { AptosClient, OptionalTransactionArgs } from "../providers/aptos_client"; +import { MaybeHexString, APTOS_COIN } from "../utils"; +import { TransactionBuilderRemoteABI } from "../transaction_builder"; + +/** + * Class for working with the coin module, such as transferring coins and + * checking balances. + */ +export class CoinClient { + aptosClient: AptosClient; + + /** + * Creates new CoinClient instance + * @param aptosClient AptosClient instance + */ + constructor(aptosClient: AptosClient) { + this.aptosClient = aptosClient; + } + + /** + * Generate, sign, and submit a transaction to the Aptos blockchain API to + * transfer coins from one account to another. By default it transfers + * 0x1::aptos_coin::AptosCoin, but you can specify a different coin type + * with the `coinType` argument. + * + * You may set `createReceiverIfMissing` to true if you want to create the + * receiver account if it does not exist on chain yet. If you do not set + * this to true, the transaction will fail if the receiver account does not + * exist on-chain. + * + * @param from Account sending the coins + * @param to Account to receive the coins + * @param amount Number of coins to transfer + * @param extraArgs Extra args for building the transaction or configuring how + * the client should submit and wait for the transaction + * @returns The hash of the transaction submitted to the API + */ + // :!:>transfer + async transfer( + from: AptosAccount, + to: AptosAccount | MaybeHexString, + amount: number | bigint, + extraArgs?: OptionalTransactionArgs & { + // The coin type to use, defaults to 0x1::aptos_coin::AptosCoin + coinType?: string; + // If set, create the `receiver` account if it doesn't exist on-chain. + // This is done by calling `0x1::aptos_account::transfer` instead, which + // will create the account on-chain first if it doesn't exist before + // transferring the coins to it. + // If this is the first time an account has received the specified coinType, + // and this is set to false, the transaction would fail. + createReceiverIfMissing?: boolean; + }, + ): Promise { + // If none is explicitly given, use 0x1::aptos_coin::AptosCoin as the coin type. + const coinTypeToTransfer = extraArgs?.coinType ?? APTOS_COIN; + + // If we should create the receiver account if it doesn't exist on-chain, + // use the `0x1::aptos_account::transfer` function. + const func = extraArgs?.createReceiverIfMissing ? "0x1::aptos_account::transfer_coins" : "0x1::coin::transfer"; + + // Get the receiver address from the AptosAccount or MaybeHexString. + const toAddress = getAddressFromAccountOrAddress(to); + + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: from.address(), ...extraArgs }); + const rawTxn = await builder.build(func, [coinTypeToTransfer], [toAddress, amount]); + + const bcsTxn = AptosClient.generateBCSTransaction(from, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } // <:!:transfer + + /** + * Get the balance of the account. By default it checks the balance of + * 0x1::aptos_coin::AptosCoin, but you can specify a different coin type. + * + * @param account Account that you want to get the balance of. + * @param extraArgs Extra args for checking the balance. + * @returns Promise that resolves to the balance as a bigint. + */ + // :!:>checkBalance + async checkBalance( + account: AptosAccount | MaybeHexString, + extraArgs?: { + // The coin type to use, defaults to 0x1::aptos_coin::AptosCoin + coinType?: string; + }, + ): Promise { + const coinType = extraArgs?.coinType ?? APTOS_COIN; + const typeTag = `0x1::coin::CoinStore<${coinType}>`; + const address = getAddressFromAccountOrAddress(account); + const accountResource = await this.aptosClient.getAccountResource(address, typeTag); + return BigInt((accountResource.data as any).coin.value); + } // <:!:checkBalance +} diff --git a/m1/JavaScript-client/src/plugins/faucet_client.ts b/m1/JavaScript-client/src/plugins/faucet_client.ts new file mode 100644 index 00000000..4f854205 --- /dev/null +++ b/m1/JavaScript-client/src/plugins/faucet_client.ts @@ -0,0 +1,69 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/** Faucet creates and funds accounts. This is a thin wrapper around that. */ +import { AptosClient } from "../providers/aptos_client"; +import { OpenAPIConfig } from "../generated"; +import { AxiosHttpRequest } from "../generated/core/AxiosHttpRequest"; +import { HexString, MaybeHexString, DEFAULT_TXN_TIMEOUT_SEC } from "../utils"; + +/** + * Class for requsting tokens from faucet + */ +export class FaucetClient extends AptosClient { + faucetRequester: AxiosHttpRequest; + + /** + * Establishes a connection to Aptos node + * @param nodeUrl A url of the Aptos Node API endpoint + * @param faucetUrl A faucet url + * @param config An optional config for inner axios instance + * Detailed config description: {@link https://github.com/axios/axios#request-config} + */ + constructor(nodeUrl: string, faucetUrl: string, config?: Partial) { + super(nodeUrl, config); + + if (!faucetUrl) { + throw new Error("Faucet URL cannot be empty."); + } + // Build a requester configured to talk to the faucet. + this.faucetRequester = new AxiosHttpRequest({ + BASE: faucetUrl, + VERSION: config?.VERSION ?? "0.1.0", + WITH_CREDENTIALS: config?.WITH_CREDENTIALS ?? false, + CREDENTIALS: config?.CREDENTIALS ?? "include", + TOKEN: config?.TOKEN, + USERNAME: config?.USERNAME, + PASSWORD: config?.PASSWORD, + HEADERS: config?.HEADERS, + ENCODE_PATH: config?.ENCODE_PATH, + }); + } + + /** + * This creates an account if it does not exist and mints the specified amount of + * coins into that account + * @param address Hex-encoded 16 bytes Aptos account address wich mints tokens + * @param amount Amount of tokens to mint + * @param timeoutSecs + * @returns Hashes of submitted transactions + */ + async fundAccount(address: MaybeHexString, amount: number, timeoutSecs = DEFAULT_TXN_TIMEOUT_SEC): Promise { + const tnxHashes = await this.faucetRequester.request>({ + method: "POST", + url: "/mint", + query: { + address: HexString.ensure(address).noPrefix(), + amount, + }, + }); + + const promises: Promise[] = []; + for (let i = 0; i < tnxHashes.length; i += 1) { + const tnxHash = tnxHashes[i]; + promises.push(this.waitForTransaction(tnxHash, { timeoutSecs })); + } + await Promise.all(promises); + return tnxHashes; + } +} diff --git a/m1/JavaScript-client/src/plugins/index.ts b/m1/JavaScript-client/src/plugins/index.ts new file mode 100644 index 00000000..00f561a2 --- /dev/null +++ b/m1/JavaScript-client/src/plugins/index.ts @@ -0,0 +1,4 @@ +export * from "./token_client"; +export * from "./aptos_token"; +export * from "./coin_client"; +export * from "./faucet_client"; diff --git a/m1/JavaScript-client/src/plugins/token_client.ts b/m1/JavaScript-client/src/plugins/token_client.ts new file mode 100644 index 00000000..507f143a --- /dev/null +++ b/m1/JavaScript-client/src/plugins/token_client.ts @@ -0,0 +1,672 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable max-len */ + +import { AptosAccount } from "../account/aptos_account"; +import { AptosClient, OptionalTransactionArgs } from "../providers/aptos_client"; +import * as TokenTypes from "../aptos_types/token_types"; +import * as Gen from "../generated/index"; +import { HexString, MaybeHexString } from "../utils"; +import { TransactionBuilder, TransactionBuilderRemoteABI, TxnBuilderTypes } from "../transaction_builder"; +import { MAX_U64_BIG_INT } from "../bcs/consts"; +import { AnyNumber, bcsToBytes, Bytes } from "../bcs"; +import { getPropertyValueRaw, PropertyMap } from "../utils/property_map_serde"; +import { Token, TokenData } from "../aptos_types/token_types"; + +/** + * Class for creating, minting and managing minting NFT collections and tokens + */ +export class TokenClient { + aptosClient: AptosClient; + + /** + * Creates new TokenClient instance + * + * @param aptosClient AptosClient instance + */ + constructor(aptosClient: AptosClient) { + this.aptosClient = aptosClient; + } + + /** + * Creates a new NFT collection within the specified account + * + * @param account AptosAccount where collection will be created + * @param name Collection name + * @param description Collection description + * @param uri URL to additional info about collection + * @param maxAmount Maximum number of `token_data` allowed within this collection + * @returns The hash of the transaction submitted to the API + */ + // :!:>createCollection + async createCollection( + account: AptosAccount, + name: string, + description: string, + uri: string, + maxAmount: AnyNumber = MAX_U64_BIG_INT, + extraArgs?: OptionalTransactionArgs, + ): Promise { + // <:!:createCollection + + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::create_collection_script", + [], + [name, description, uri, maxAmount, [false, false, false]], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Creates a new NFT within the specified account + * + * @param account AptosAccount where token will be created + * @param collectionName Name of collection, that token belongs to + * @param name Token name + * @param description Token description + * @param supply Token supply + * @param uri URL to additional info about token + * @param max The maxium of tokens can be minted from this token + * @param royalty_payee_address the address to receive the royalty, the address can be a shared account address. + * @param royalty_points_denominator the denominator for calculating royalty + * @param royalty_points_numerator the numerator for calculating royalty + * @param property_keys the property keys for storing on-chain properties + * @param property_values the property values to be stored on-chain + * @param property_types the type of property values + * @returns The hash of the transaction submitted to the API + */ + // :!:>createToken + async createToken( + account: AptosAccount, + collectionName: string, + name: string, + description: string, + supply: number, + uri: string, + max: AnyNumber = MAX_U64_BIG_INT, + royalty_payee_address: MaybeHexString = account.address(), + royalty_points_denominator: number = 0, + royalty_points_numerator: number = 0, + property_keys: Array = [], + property_values: Array = [], + property_types: Array = [], + extraArgs?: OptionalTransactionArgs, + ): Promise { + // <:!:createToken + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + + const rawTxn = await builder.build( + "0x3::token::create_token_script", + [], + [ + collectionName, + name, + description, + supply, + max, + uri, + royalty_payee_address, + royalty_points_denominator, + royalty_points_numerator, + [false, false, false, false, false], + property_keys, + getPropertyValueRaw(property_values, property_types), + property_types, + ], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Creates a new NFT within the specified account + * + * @param account AptosAccount where token will be created + * @param collectionName Name of collection, that token belongs to + * @param name Token name + * @param description Token description + * @param supply Token supply + * @param uri URL to additional info about token + * @param max The maxium of tokens can be minted from this token + * @param royalty_payee_address the address to receive the royalty, the address can be a shared account address. + * @param royalty_points_denominator the denominator for calculating royalty + * @param royalty_points_numerator the numerator for calculating royalty + * @param property_keys the property keys for storing on-chain properties + * @param property_values the property values to be stored on-chain + * @param property_types the type of property values + * @param mutability_config configs which field is mutable + * @returns The hash of the transaction submitted to the API + */ + // :!:>createToken + async createTokenWithMutabilityConfig( + account: AptosAccount, + collectionName: string, + name: string, + description: string, + supply: AnyNumber, + uri: string, + max: AnyNumber = MAX_U64_BIG_INT, + royalty_payee_address: MaybeHexString = account.address(), + royalty_points_denominator: AnyNumber = 0, + royalty_points_numerator: AnyNumber = 0, + property_keys: Array = [], + property_values: Array = [], + property_types: Array = [], + mutability_config: Array = [false, false, false, false, false], + extraArgs?: OptionalTransactionArgs, + ): Promise { + // <:!:createToken + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::create_token_script", + [], + [ + collectionName, + name, + description, + supply, + max, + uri, + royalty_payee_address, + royalty_points_denominator, + royalty_points_numerator, + mutability_config, + property_keys, + property_values, + property_types, + ], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Transfers specified amount of tokens from account to receiver + * + * @param account AptosAccount where token from which tokens will be transfered + * @param receiver Hex-encoded 32 byte Aptos account address to which tokens will be transfered + * @param creator Hex-encoded 32 byte Aptos account address to which created tokens + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param amount Amount of tokens which will be transfered + * @param property_version the version of token PropertyMap with a default value 0. + * @returns The hash of the transaction submitted to the API + */ + async offerToken( + account: AptosAccount, + receiver: MaybeHexString, + creator: MaybeHexString, + collectionName: string, + name: string, + amount: number, + property_version: number = 0, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token_transfers::offer_script", + [], + [receiver, creator, collectionName, name, property_version, amount], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Claims a token on specified account + * + * @param account AptosAccount which will claim token + * @param sender Hex-encoded 32 byte Aptos account address which holds a token + * @param creator Hex-encoded 32 byte Aptos account address which created a token + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param property_version the version of token PropertyMap with a default value 0. + * @returns The hash of the transaction submitted to the API + */ + async claimToken( + account: AptosAccount, + sender: MaybeHexString, + creator: MaybeHexString, + collectionName: string, + name: string, + property_version: number = 0, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token_transfers::claim_script", + [], + [sender, creator, collectionName, name, property_version], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Removes a token from pending claims list + * + * @param account AptosAccount which will remove token from pending list + * @param receiver Hex-encoded 32 byte Aptos account address which had to claim token + * @param creator Hex-encoded 32 byte Aptos account address which created a token + * @param collectionName Name of collection where token is strored + * @param name Token name + * @param property_version the version of token PropertyMap with a default value 0. + * @returns The hash of the transaction submitted to the API + */ + async cancelTokenOffer( + account: AptosAccount, + receiver: MaybeHexString, + creator: MaybeHexString, + collectionName: string, + name: string, + property_version: number = 0, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token_transfers::cancel_offer_script", + [], + [receiver, creator, collectionName, name, property_version], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Directly transfer the specified amount of tokens from account to receiver + * using a single multi signature transaction. + * + * @param sender AptosAccount where token from which tokens will be transfered + * @param receiver Hex-encoded 32 byte Aptos account address to which tokens will be transfered + * @param creator Hex-encoded 32 byte Aptos account address to which created tokens + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param amount Amount of tokens which will be transfered + * @param property_version the version of token PropertyMap with a default value 0. + * @returns The hash of the transaction submitted to the API + */ + async directTransferToken( + sender: AptosAccount, + receiver: AptosAccount, + creator: MaybeHexString, + collectionName: string, + name: string, + amount: AnyNumber, + propertyVersion: AnyNumber = 0, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: sender.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::direct_transfer_script", + [], + [creator, collectionName, name, propertyVersion, amount], + ); + + const multiAgentTxn = new TxnBuilderTypes.MultiAgentRawTransaction(rawTxn, [ + TxnBuilderTypes.AccountAddress.fromHex(receiver.address()), + ]); + + const senderSignature = new TxnBuilderTypes.Ed25519Signature( + sender.signBuffer(TransactionBuilder.getSigningMessage(multiAgentTxn)).toUint8Array(), + ); + + const senderAuthenticator = new TxnBuilderTypes.AccountAuthenticatorEd25519( + new TxnBuilderTypes.Ed25519PublicKey(sender.signingKey.publicKey), + senderSignature, + ); + + const receiverSignature = new TxnBuilderTypes.Ed25519Signature( + receiver.signBuffer(TransactionBuilder.getSigningMessage(multiAgentTxn)).toUint8Array(), + ); + + const receiverAuthenticator = new TxnBuilderTypes.AccountAuthenticatorEd25519( + new TxnBuilderTypes.Ed25519PublicKey(receiver.signingKey.publicKey), + receiverSignature, + ); + + const multiAgentAuthenticator = new TxnBuilderTypes.TransactionAuthenticatorMultiAgent( + senderAuthenticator, + [TxnBuilderTypes.AccountAddress.fromHex(receiver.address())], // Secondary signer addresses + [receiverAuthenticator], // Secondary signer authenticators + ); + + const bcsTxn = bcsToBytes(new TxnBuilderTypes.SignedTransaction(rawTxn, multiAgentAuthenticator)); + + const transactionRes = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + + return transactionRes.hash; + } + + /** + * User opt-in or out direct transfer through a boolean flag + * + * @param sender AptosAccount where the token will be transferred + * @param optIn boolean value indicates user want to opt-in or out of direct transfer + * @returns The hash of the transaction submitted to the API + */ + async optInTokenTransfer(sender: AptosAccount, optIn: boolean, extraArgs?: OptionalTransactionArgs): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: sender.address(), ...extraArgs }); + const rawTxn = await builder.build("0x3::token::opt_in_direct_transfer", [], [optIn]); + const bcsTxn = AptosClient.generateBCSTransaction(sender, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Directly transfer token to a receiver. The receiver should have opted in to direct transfer + * + * @param sender AptosAccount where the token will be transferred + * @param creator address of the token creator + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param property_version the version of token PropertyMap + * @param amount Amount of tokens which will be transfered + * @returns The hash of the transaction submitted to the API + */ + async transferWithOptIn( + sender: AptosAccount, + creator: MaybeHexString, + collectionName: string, + tokenName: string, + propertyVersion: AnyNumber, + receiver: MaybeHexString, + amount: AnyNumber, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: sender.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::transfer_with_opt_in", + [], + [creator, collectionName, tokenName, propertyVersion, receiver, amount], + ); + const bcsTxn = AptosClient.generateBCSTransaction(sender, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * BurnToken by Creator + * + * @param creator creator of the token + * @param ownerAddress address of the token owner + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param amount Amount of tokens which will be transfered + * @param property_version the version of token PropertyMap + * @returns The hash of the transaction submitted to the API + */ + async burnByCreator( + creator: AptosAccount, + ownerAddress: MaybeHexString, + collection: String, + name: String, + PropertyVersion: AnyNumber, + amount: AnyNumber, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: creator.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::burn_by_creator", + [], + [ownerAddress, collection, name, PropertyVersion, amount], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(creator, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * BurnToken by Owner + * + * @param owner creator of the token + * @param creatorAddress address of the token creator + * @param collectionName Name of collection where token is stored + * @param name Token name + * @param amount Amount of tokens which will be transfered + * @param property_version the version of token PropertyMap + * @returns The hash of the transaction submitted to the API + */ + async burnByOwner( + owner: AptosAccount, + creatorAddress: MaybeHexString, + collection: String, + name: String, + PropertyVersion: AnyNumber, + amount: AnyNumber, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: owner.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::burn", + [], + [creatorAddress, collection, name, PropertyVersion, amount], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(owner, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * creator mutates the properties of the tokens + * + * @param account AptosAccount who modifies the token properties + * @param tokenOwner the address of account owning the token + * @param creator the creator of the token + * @param collection_name the name of the token collection + * @param tokenName the name of created token + * @param propertyVersion the property_version of the token to be modified + * @param amount the number of tokens to be modified + * + * @returns The hash of the transaction submitted to the API + */ + async mutateTokenProperties( + account: AptosAccount, + tokenOwner: HexString, + creator: HexString, + collection_name: string, + tokenName: string, + propertyVersion: AnyNumber, + amount: AnyNumber, + keys: Array, + values: Array, + types: Array, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const builder = new TransactionBuilderRemoteABI(this.aptosClient, { sender: account.address(), ...extraArgs }); + const rawTxn = await builder.build( + "0x3::token::mutate_token_properties", + [], + [tokenOwner, creator, collection_name, tokenName, propertyVersion, amount, keys, values, types], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account, rawTxn); + const pendingTransaction = await this.aptosClient.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + } + + /** + * Queries collection data + * @param creator Hex-encoded 32 byte Aptos account address which created a collection + * @param collectionName Collection name + * @returns Collection data in below format + * ``` + * Collection { + * // Describes the collection + * description: string, + * // Unique name within this creators account for this collection + * name: string, + * // URL for additional information/media + * uri: string, + * // Total number of distinct Tokens tracked by the collection + * count: number, + * // Optional maximum number of tokens allowed within this collections + * maximum: number + * } + * ``` + */ + async getCollectionData(creator: MaybeHexString, collectionName: string): Promise { + const resources = await this.aptosClient.getAccountResources(creator); + const accountResource: { type: Gen.MoveStructTag; data: any } = resources.find( + (r) => r.type === "0x3::token::Collections", + )!; + const { handle }: { handle: string } = accountResource.data.collection_data; + const getCollectionTableItemRequest: Gen.TableItemRequest = { + key_type: "0x1::string::String", + value_type: "0x3::token::CollectionData", + key: collectionName, + }; + + const collectionTable = await this.aptosClient.getTableItem(handle, getCollectionTableItemRequest); + return collectionTable; + } + + /** + * Queries token data from collection + * + * @param creator Hex-encoded 32 byte Aptos account address which created a token + * @param collectionName Name of collection, which holds a token + * @param tokenName Token name + * @returns Token data in below format + * ``` + * TokenData { + * // Unique name within this creators account for this Token's collection + * collection: string; + * // Describes this Token + * description: string; + * // The name of this Token + * name: string; + * // Optional maximum number of this type of Token. + * maximum: number; + * // Total number of this type of Token + * supply: number; + * /// URL for additional information / media + * uri: string; + * } + * ``` + */ + // :!:>getTokenData + async getTokenData( + creator: MaybeHexString, + collectionName: string, + tokenName: string, + ): Promise { + const creatorHex = creator instanceof HexString ? creator.hex() : creator; + const collection: { type: Gen.MoveStructTag; data: any } = await this.aptosClient.getAccountResource( + creatorHex, + "0x3::token::Collections", + ); + const { handle } = collection.data.token_data; + const tokenDataId = { + creator: creatorHex, + collection: collectionName, + name: tokenName, + }; + + const getTokenTableItemRequest: Gen.TableItemRequest = { + key_type: "0x3::token::TokenDataId", + value_type: "0x3::token::TokenData", + key: tokenDataId, + }; + + // We know the response will be a struct containing TokenData, hence the + // implicit cast. + const rawTokenData = await this.aptosClient.getTableItem(handle, getTokenTableItemRequest); + return new TokenData( + rawTokenData.collection, + rawTokenData.description, + rawTokenData.name, + rawTokenData.maximum, + rawTokenData.supply, + rawTokenData.uri, + rawTokenData.default_properties, + rawTokenData.mutability_config, + ); + } // <:!:getTokenData + + /** + * Queries token balance for the token creator + */ + async getToken( + creator: MaybeHexString, + collectionName: string, + tokenName: string, + property_version: string = "0", + ): Promise { + const tokenDataId: TokenTypes.TokenDataId = { + creator: creator instanceof HexString ? creator.hex() : creator, + collection: collectionName, + name: tokenName, + }; + return this.getTokenForAccount(creator, { + token_data_id: tokenDataId, + property_version, + }); + } + + /** + * Queries token balance for a token account + * @param account Hex-encoded 32 byte Aptos account address which created a token + * @param tokenId token id + * + * TODO: Update this: + * @example + * ``` + * { + * creator: '0x1', + * collection: 'Some collection', + * name: 'Awesome token' + * } + * ``` + * @returns Token object in below format + * ``` + * Token { + * id: TokenId; + * value: number; + * } + * ``` + */ + async getTokenForAccount(account: MaybeHexString, tokenId: TokenTypes.TokenId): Promise { + const tokenStore: { type: Gen.MoveStructTag; data: any } = await this.aptosClient.getAccountResource( + account instanceof HexString ? account.hex() : account, + "0x3::token::TokenStore", + ); + const { handle } = tokenStore.data.tokens; + + const getTokenTableItemRequest: Gen.TableItemRequest = { + key_type: "0x3::token::TokenId", + value_type: "0x3::token::Token", + key: tokenId, + }; + + try { + const rawToken = await this.aptosClient.getTableItem(handle, getTokenTableItemRequest); + return new Token(rawToken.id, rawToken.amount, rawToken.token_properties); + } catch (error: any) { + if (error?.status === 404) { + return { + id: tokenId, + amount: "0", + token_properties: new PropertyMap(), + }; + } + return error; + } + } +} diff --git a/m1/JavaScript-client/src/providers/aptos_client.ts b/m1/JavaScript-client/src/providers/aptos_client.ts new file mode 100644 index 00000000..935e977a --- /dev/null +++ b/m1/JavaScript-client/src/providers/aptos_client.ts @@ -0,0 +1,1001 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { + clear, + DEFAULT_TXN_EXP_SEC_FROM_NOW, + DEFAULT_MAX_GAS_AMOUNT, + DEFAULT_TXN_TIMEOUT_SEC, + fixNodeUrl, + HexString, + paginateWithCursor, + MaybeHexString, + Memoize, + sleep, + APTOS_COIN, +} from "../utils"; +import { AptosAccount } from "../account/aptos_account"; +import * as Gen from "../generated/index"; +import { + TxnBuilderTypes, + TransactionBuilderEd25519, + TransactionBuilderRemoteABI, + RemoteABIBuilderConfig, + TransactionBuilderMultiEd25519, +} from "../transaction_builder"; +import { + bcsSerializeBytes, + bcsSerializeU8, + bcsToBytes, + Bytes, + Seq, + Serializer, + serializeVector, + Uint64, + AnyNumber, +} from "../bcs"; +import { Ed25519PublicKey, MultiEd25519PublicKey } from "../aptos_types"; + +export interface OptionalTransactionArgs { + maxGasAmount?: Uint64; + gasUnitPrice?: Uint64; + expireTimestamp?: Uint64; +} + +interface PaginationArgs { + start?: AnyNumber; + limit?: number; +} + +/** + * Provides methods for retrieving data from Aptos node. + * For more detailed API specification see {@link https://fullnode.devnet.aptoslabs.com/v1/spec} + */ +export class AptosClient { + client: Gen.AptosGeneratedClient; + + readonly nodeUrl: string; + + /** + * Build a client configured to connect to an Aptos node at the given URL. + * + * Note: If you forget to append `/v1` to the URL, the client constructor + * will automatically append it. If you don't want this URL processing to + * take place, set doNotFixNodeUrl to true. + * + * @param nodeUrl URL of the Aptos Node API endpoint. + * @param config Additional configuration options for the generated Axios client. + */ + constructor(nodeUrl: string, config?: Partial, doNotFixNodeUrl: boolean = false) { + if (!nodeUrl) { + throw new Error("Node URL cannot be empty."); + } + const conf = config === undefined || config === null ? {} : { ...config }; + + if (doNotFixNodeUrl) { + this.nodeUrl = nodeUrl; + } else { + this.nodeUrl = fixNodeUrl(nodeUrl); + } + conf.BASE = this.nodeUrl; + + // Do not carry cookies when `WITH_CREDENTIALS` is explicitly set to `false`. By default, cookies will be sent + if (config?.WITH_CREDENTIALS === false) { + conf.WITH_CREDENTIALS = false; + } else { + conf.WITH_CREDENTIALS = true; + } + this.client = new Gen.AptosGeneratedClient(conf); + } + + /** + * Queries an Aptos account by address + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @returns Core account resource, used for identifying account and transaction execution + * @example An example of the returned account + * ``` + * { + * sequence_number: "1", + * authentication_key: "0x5307b5f4bc67829097a8ba9b43dba3b88261eeccd1f709d9bde240fc100fbb69" + * } + * ``` + */ + @parseApiError + async getAccount(accountAddress: MaybeHexString): Promise { + return this.client.accounts.getAccount(HexString.ensure(accountAddress).hex()); + } + + /** + * Queries transactions sent by given account + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @param query Optional pagination object + * @param query.start The sequence number of the start transaction of the page. Default is 0. + * @param query.limit The max number of transactions should be returned for the page. Default is 25. + * @returns An array of on-chain transactions, sent by account + */ + @parseApiError + async getAccountTransactions(accountAddress: MaybeHexString, query?: PaginationArgs): Promise { + return this.client.transactions.getAccountTransactions( + HexString.ensure(accountAddress).hex(), + query?.start?.toString(), + query?.limit, + ); + } + + /** + * Queries modules associated with given account + * + * Note: In order to get all account modules, this function may call the API + * multiple times as it paginates. + * + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @param query.ledgerVersion Specifies ledger version of transactions. By default latest version will be used + * @returns Account modules array for a specific ledger version. + * Module is represented by MoveModule interface. It contains module `bytecode` and `abi`, + * which is JSON representation of a module + */ + @parseApiError + async getAccountModules( + accountAddress: MaybeHexString, + query?: { ledgerVersion?: AnyNumber }, + ): Promise { + // Note: This function does not expose a `limit` parameter because it might + // be ambiguous how this is being used. Is it being passed to getAccountModules + // to limit the number of items per response, or does it limit the total output + // of this function? We avoid this confusion by not exposing the parameter at all. + const f = this.client.accounts.getAccountModules.bind({ httpRequest: this.client.request }); + const out = await paginateWithCursor(f, accountAddress, 1000, query); + return out; + } + + /** + * Queries module associated with given account by module name + * + * Note: In order to get all account resources, this function may call the API + * multiple times as it paginates. + * + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @param moduleName The name of the module + * @param query.ledgerVersion Specifies ledger version of transactions. By default latest version will be used + * @returns Specified module. + * Module is represented by MoveModule interface. It contains module `bytecode` and `abi`, + * which JSON representation of a module + */ + @parseApiError + async getAccountModule( + accountAddress: MaybeHexString, + moduleName: string, + query?: { ledgerVersion?: AnyNumber }, + ): Promise { + return this.client.accounts.getAccountModule( + HexString.ensure(accountAddress).hex(), + moduleName, + query?.ledgerVersion?.toString(), + ); + } + + /** + * Queries all resources associated with given account + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @param query.ledgerVersion Specifies ledger version of transactions. By default latest version will be used + * @returns Account resources for a specific ledger version + */ + @parseApiError + async getAccountResources( + accountAddress: MaybeHexString, + query?: { ledgerVersion?: AnyNumber }, + ): Promise { + const f = this.client.accounts.getAccountResources.bind({ httpRequest: this.client.request }); + const out = await paginateWithCursor(f, accountAddress, 9999, query); + return out; + } + + /** + * Queries resource associated with given account by resource type + * @param accountAddress Hex-encoded 32 byte Aptos account address + * @param resourceType String representation of an on-chain Move struct type + * @param query.ledgerVersion Specifies ledger version of transactions. By default latest version will be used + * @returns Account resource of specified type and ledger version + * @example An example of an account resource + * ``` + * { + * type: "0x1::aptos_coin::AptosCoin", + * data: { value: 6 } + * } + * ``` + */ + @parseApiError + async getAccountResource( + accountAddress: MaybeHexString, + resourceType: Gen.MoveStructTag, + query?: { ledgerVersion?: AnyNumber }, + ): Promise { + return this.client.accounts.getAccountResource( + HexString.ensure(accountAddress).hex(), + resourceType, + query?.ledgerVersion?.toString(), + ); + } + + /** Generates a signed transaction that can be submitted to the chain for execution. */ + static generateBCSTransaction(accountFrom: AptosAccount, rawTxn: TxnBuilderTypes.RawTransaction): Uint8Array { + const txnBuilder = new TransactionBuilderEd25519((signingMessage: TxnBuilderTypes.SigningMessage) => { + // @ts-ignore + const sigHexStr = accountFrom.signBuffer(signingMessage); + return new TxnBuilderTypes.Ed25519Signature(sigHexStr.toUint8Array()); + }, accountFrom.pubKey().toUint8Array()); + + return txnBuilder.sign(rawTxn); + } + + /** + * Note: Unless you have a specific reason for using this, it'll probably be simpler + * to use `simulateTransaction`. + * + * Generates a BCS transaction that can be submitted to the chain for simulation. + * + * @param accountFrom The account that will be used to send the transaction + * for simulation. + * @param rawTxn The raw transaction to be simulated, likely created by calling + * the `generateTransaction` function. + * @returns The BCS encoded signed transaction, which you should then pass into + * the `submitBCSSimulation` function. + */ + static generateBCSSimulation(accountFrom: AptosAccount, rawTxn: TxnBuilderTypes.RawTransaction): Uint8Array { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const txnBuilder = new TransactionBuilderEd25519((_signingMessage: TxnBuilderTypes.SigningMessage) => { + // @ts-ignore + const invalidSigBytes = new Uint8Array(64); + return new TxnBuilderTypes.Ed25519Signature(invalidSigBytes); + }, accountFrom.pubKey().toUint8Array()); + + return txnBuilder.sign(rawTxn); + } + + /** Generates an entry function transaction request that can be submitted to produce a raw transaction that + * can be signed, which upon being signed can be submitted to the blockchain + * This function fetches the remote ABI and uses it to serialized the data, therefore + * users don't need to handle serialization by themselves. + * @param sender Hex-encoded 32 byte Aptos account address of transaction sender + * @param payload Entry function transaction payload type + * @param options Options allow to overwrite default transaction options. + * @returns A raw transaction object + */ + async generateTransaction( + sender: MaybeHexString, + payload: Gen.EntryFunctionPayload, + options?: Partial, + ): Promise { + const config: RemoteABIBuilderConfig = { sender }; + if (options?.sequence_number) { + config.sequenceNumber = options.sequence_number; + } + + if (options?.gas_unit_price) { + config.gasUnitPrice = options.gas_unit_price; + } + + if (options?.max_gas_amount) { + config.maxGasAmount = options.max_gas_amount; + } + + if (options?.expiration_timestamp_secs) { + const timestamp = Number.parseInt(options.expiration_timestamp_secs, 10); + config.expSecFromNow = timestamp - Math.floor(Date.now() / 1000); + } + + const builder = new TransactionBuilderRemoteABI(this, config); + return builder.build(payload.function, payload.type_arguments, payload.arguments); + } + + /** Converts a transaction request produced by `generateTransaction` into a properly + * signed transaction, which can then be submitted to the blockchain + * @param accountFrom AptosAccount of transaction sender + * @param rawTransaction A raw transaction generated by `generateTransaction` method + * @returns A transaction, signed with sender account + */ + // eslint-disable-next-line class-methods-use-this + async signTransaction( + accountFrom: AptosAccount, + rawTransaction: TxnBuilderTypes.RawTransaction, + ): Promise { + return Promise.resolve(AptosClient.generateBCSTransaction(accountFrom, rawTransaction)); + } + + /** + * Event types are globally identifiable by an account `address` and + * monotonically increasing `creation_number`, one per event type emitted + * to the given account. This API returns events corresponding to that + * that event type. + * @param address Hex-encoded 32 byte Aptos account, with or without a `0x` prefix, + * for which events are queried. This refers to the account that events were emitted + * to, not the account hosting the move module that emits that event type. + * @param creationNumber Creation number corresponding to the event type. + * @returns Array of events assotiated with the given account and creation number. + */ + @parseApiError + async getEventsByCreationNumber( + address: MaybeHexString, + creationNumber: AnyNumber | string, + query?: PaginationArgs, + ): Promise { + return this.client.events.getEventsByCreationNumber( + HexString.ensure(address).hex(), + creationNumber.toString(), + query?.start?.toString(), + query?.limit, + ); + } + + /** + * This API uses the given account `address`, `eventHandle`, and `fieldName` + * to build a key that can globally identify an event types. It then uses this + * key to return events emitted to the given account matching that event type. + * @param address Hex-encoded 32 byte Aptos account, with or without a `0x` prefix, + * for which events are queried. This refers to the account that events were emitted + * to, not the account hosting the move module that emits that event type. + * @param eventHandleStruct String representation of an on-chain Move struct type. + * (e.g. `0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>`) + * @param fieldName The field name of the EventHandle in the struct + * @param query Optional query object + * @param query.start The start sequence number in the EVENT STREAM, defaulting to the latest event. + * The events are returned in the reverse order of sequence number + * @param query.limit The number of events to be returned. The default is 25. + * @returns Array of events + */ + @parseApiError + async getEventsByEventHandle( + address: MaybeHexString, + eventHandleStruct: Gen.MoveStructTag, + fieldName: string, + query?: PaginationArgs, + ): Promise { + return this.client.events.getEventsByEventHandle( + HexString.ensure(address).hex(), + eventHandleStruct, + fieldName, + query?.start?.toString(), + query?.limit, + ); + } + + /** + * Submits a signed transaction to the transaction endpoint. + * @param signedTxn A transaction, signed by `signTransaction` method + * @returns Transaction that is accepted and submitted to mempool + */ + async submitTransaction(signedTxn: Uint8Array): Promise { + return this.submitSignedBCSTransaction(signedTxn); + } + + /** + * Generates and submits a transaction to the transaction simulation + * endpoint. For this we generate a transaction with a fake signature. + * + * @param accountOrPubkey The sender or sender's public key. When private key is available, `AptosAccount` instance + * can be used to send the transaction for simulation. If private key is not available, sender's public key can be + * used to send the transaction for simulation. + * @param rawTransaction The raw transaction to be simulated, likely created + * by calling the `generateTransaction` function. + * @param query.estimateGasUnitPrice If set to true, the gas unit price in the + * transaction will be ignored and the estimated value will be used. + * @param query.estimateMaxGasAmount If set to true, the max gas value in the + * transaction will be ignored and the maximum possible gas will be used. + * @param query.estimatePrioritizedGasUnitPrice If set to true, the transaction will use a higher price than the + * original estimate. + * @returns The BCS encoded signed transaction, which you should then provide + * + */ + async simulateTransaction( + accountOrPubkey: AptosAccount | Ed25519PublicKey | MultiEd25519PublicKey, + rawTransaction: TxnBuilderTypes.RawTransaction, + query?: { + estimateGasUnitPrice?: boolean; + estimateMaxGasAmount?: boolean; + estimatePrioritizedGasUnitPrice: boolean; + }, + ): Promise { + let signedTxn: Uint8Array; + + if (accountOrPubkey instanceof AptosAccount) { + signedTxn = AptosClient.generateBCSSimulation(accountOrPubkey, rawTransaction); + } else if (accountOrPubkey instanceof MultiEd25519PublicKey) { + const txnBuilder = new TransactionBuilderMultiEd25519(() => { + const { threshold } = accountOrPubkey; + const bits: Seq = []; + const signatures: TxnBuilderTypes.Ed25519Signature[] = []; + for (let i = 0; i < threshold; i += 1) { + bits.push(i); + signatures.push(new TxnBuilderTypes.Ed25519Signature(new Uint8Array(64))); + } + const bitmap = TxnBuilderTypes.MultiEd25519Signature.createBitmap(bits); + return new TxnBuilderTypes.MultiEd25519Signature(signatures, bitmap); + }, accountOrPubkey); + + signedTxn = txnBuilder.sign(rawTransaction); + } else { + const txnBuilder = new TransactionBuilderEd25519(() => { + const invalidSigBytes = new Uint8Array(64); + return new TxnBuilderTypes.Ed25519Signature(invalidSigBytes); + }, accountOrPubkey.toBytes()); + + signedTxn = txnBuilder.sign(rawTransaction); + } + return this.submitBCSSimulation(signedTxn, query); + } + + /** + * Submits a signed transaction to the endpoint that takes BCS payload + * + * @param signedTxn A BCS transaction representation + * @returns Transaction that is accepted and submitted to mempool + */ + @parseApiError + async submitSignedBCSTransaction(signedTxn: Uint8Array): Promise { + // Need to construct a customized post request for transactions in BCS payload + return this.client.request.request({ + url: "/transactions", + method: "POST", + body: signedTxn, + mediaType: "application/x.aptos.signed_transaction+bcs", + }); + } + + /** + * Submits the BCS serialization of a signed transaction to the simulation endpoint. + * + * @param bcsBody The output of `generateBCSSimulation`. + * @param query?.estimateGasUnitPrice If set to true, the gas unit price in the + * transaction will be ignored and the estimated value will be used. + * @param query?.estimateMaxGasAmount If set to true, the max gas value in the + * transaction will be ignored and the maximum possible gas will be used. + * @param query?.estimatePrioritizedGasUnitPrice If set to true, the transaction will use a higher price than the + * original estimate. + * @returns Simulation result in the form of UserTransaction. + */ + @parseApiError + async submitBCSSimulation( + bcsBody: Uint8Array, + query?: { + estimateGasUnitPrice?: boolean; + estimateMaxGasAmount?: boolean; + estimatePrioritizedGasUnitPrice?: boolean; + }, + ): Promise { + // Need to construct a customized post request for transactions in BCS payload. + const queryParams = { + estimate_gas_unit_price: query?.estimateGasUnitPrice ?? false, + estimate_max_gas_amount: query?.estimateMaxGasAmount ?? false, + estimate_prioritized_gas_unit_price: query?.estimatePrioritizedGasUnitPrice ?? false, + }; + return this.client.request.request({ + url: "/transactions/simulate", + query: queryParams, + method: "POST", + body: bcsBody, + mediaType: "application/x.aptos.signed_transaction+bcs", + }); + } + + /** + * Queries on-chain transactions. This function will not return pending + * transactions. For that, use `getTransactionsByHash`. + * + * @param query Optional pagination object + * @param query.start The start transaction version of the page. Default is the latest ledger version + * @param query.limit The max number of transactions should be returned for the page. Default is 25 + * @returns Array of on-chain transactions + */ + @parseApiError + async getTransactions(query?: PaginationArgs): Promise { + return this.client.transactions.getTransactions(query?.start?.toString(), query?.limit); + } + + /** + * @param txnHash - Transaction hash should be hex-encoded bytes string with 0x prefix. + * @returns Transaction from mempool (pending) or on-chain (committed) transaction + */ + @parseApiError + async getTransactionByHash(txnHash: string): Promise { + return this.client.transactions.getTransactionByHash(txnHash); + } + + /** + * @param txnVersion - Transaction version is an uint64 number. + * @returns On-chain transaction. Only on-chain transactions have versions, so this + * function cannot be used to query pending transactions. + */ + @parseApiError + async getTransactionByVersion(txnVersion: AnyNumber): Promise { + return this.client.transactions.getTransactionByVersion(txnVersion.toString()); + } + + /** + * Defines if specified transaction is currently in pending state + * @param txnHash A hash of transaction + * + * To create a transaction hash: + * + * 1. Create hash message bytes: "Aptos::Transaction" bytes + BCS bytes of Transaction. + * 2. Apply hash algorithm SHA3-256 to the hash message bytes. + * 3. Hex-encode the hash bytes with 0x prefix. + * + * @returns `true` if transaction is in pending state and `false` otherwise + */ + async transactionPending(txnHash: string): Promise { + try { + const response = await this.client.transactions.getTransactionByHash(txnHash); + return response.type === "pending_transaction"; + } catch (e: any) { + if (e?.status === 404) { + return true; + } + throw e; + } + } + + /** + * Wait for a transaction to move past pending state. + * + * There are 4 possible outcomes: + * 1. Transaction is processed and successfully committed to the blockchain. + * 2. Transaction is rejected for some reason, and is therefore not committed + * to the blockchain. + * 3. Transaction is committed but execution failed, meaning no changes were + * written to the blockchain state. + * 4. Transaction is not processed within the specified timeout. + * + * In case 1, this function resolves with the transaction response returned + * by the API. + * + * In case 2, the function will throw an ApiError, likely with an HTTP status + * code indicating some problem with the request (e.g. 400). + * + * In case 3, if `checkSuccess` is false (the default), this function returns + * the transaction response just like in case 1, in which the `success` field + * will be false. If `checkSuccess` is true, it will instead throw a + * FailedTransactionError. + * + * In case 4, this function throws a WaitForTransactionError. + * + * @param txnHash The hash of a transaction previously submitted to the blockchain. + * @param extraArgs.timeoutSecs Timeout in seconds. Defaults to 20 seconds. + * @param extraArgs.checkSuccess See above. Defaults to false. + * @returns See above. + * + * @example + * ``` + * const rawTransaction = await this.generateRawTransaction(sender.address(), payload, extraArgs); + * const bcsTxn = AptosClient.generateBCSTransaction(sender, rawTransaction); + * const pendingTransaction = await this.submitSignedBCSTransaction(bcsTxn); + * const transasction = await this.aptosClient.waitForTransactionWithResult(pendingTransaction.hash); + * ``` + */ + async waitForTransactionWithResult( + txnHash: string, + extraArgs?: { timeoutSecs?: number; checkSuccess?: boolean }, + ): Promise { + const timeoutSecs = extraArgs?.timeoutSecs ?? DEFAULT_TXN_TIMEOUT_SEC; + const checkSuccess = extraArgs?.checkSuccess ?? false; + + let isPending = true; + let count = 0; + let lastTxn: Gen.Transaction | undefined; + while (isPending) { + if (count >= timeoutSecs) { + break; + } + try { + // eslint-disable-next-line no-await-in-loop + lastTxn = await this.client.transactions.getTransactionByHash(txnHash); + isPending = lastTxn.type === "pending_transaction"; + if (!isPending) { + break; + } + } catch (e) { + // In short, this means we will retry if it was an ApiError and the code was 404 or 5xx. + const isApiError = e instanceof Gen.ApiError; + const isRequestError = isApiError && e.status !== 404 && e.status >= 400 && e.status < 500; + if (!isApiError || isRequestError) { + throw e; + } + } + // eslint-disable-next-line no-await-in-loop + await sleep(1000); + count += 1; + } + + // There is a chance that lastTxn is still undefined. Let's throw some error here + if (lastTxn === undefined) { + throw new Error(`Waiting for transaction ${txnHash} failed`); + } + + if (isPending) { + throw new WaitForTransactionError( + `Waiting for transaction ${txnHash} timed out after ${timeoutSecs} seconds`, + lastTxn, + ); + } + if (!checkSuccess) { + return lastTxn; + } + if (!(lastTxn as any)?.success) { + throw new FailedTransactionError( + `Transaction ${txnHash} committed to the blockchain but execution failed`, + lastTxn, + ); + } + return lastTxn; + } + + /** + * This function works the same as `waitForTransactionWithResult` except it + * doesn't return the transaction in those cases, it returns nothing. For + * more information, see the documentation for `waitForTransactionWithResult`. + */ + async waitForTransaction( + txnHash: string, + extraArgs?: { timeoutSecs?: number; checkSuccess?: boolean }, + ): Promise { + await this.waitForTransactionWithResult(txnHash, extraArgs); + } + + /** + * Queries the latest ledger information + * @returns Latest ledger information + * @example Example of returned data + * ``` + * { + * chain_id: 15, + * epoch: 6, + * ledgerVersion: "2235883", + * ledger_timestamp:"1654580922321826" + * } + * ``` + */ + @parseApiError + async getLedgerInfo(): Promise { + return this.client.general.getLedgerInfo(); + } + + /** + * @returns Current chain id + */ + @Memoize() + async getChainId(): Promise { + const result = await this.getLedgerInfo(); + return result.chain_id; + } + + /** + * Gets a table item for a table identified by the handle and the key for the item. + * Key and value types need to be passed in to help with key serialization and value deserialization. + * @param handle A pointer to where that table is stored + * @param data Object, that describes table item + * @param data.key_type Move type of table key (e.g. `vector`) + * @param data.value_type Move type of table value (e.g. `u64`) + * @param data.key Value of table key + * @returns Table item value rendered in JSON + */ + @parseApiError + async getTableItem(handle: string, data: Gen.TableItemRequest, query?: { ledgerVersion?: AnyNumber }): Promise { + const tableItem = await this.client.tables.getTableItem(handle, data, query?.ledgerVersion?.toString()); + return tableItem; + } + + /** + * Generates a raw transaction out of a transaction payload + * @param accountFrom + * @param payload + * @param extraArgs + * @returns A raw transaction object + */ + async generateRawTransaction( + accountFrom: HexString, + payload: TxnBuilderTypes.TransactionPayload, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const [{ sequence_number: sequenceNumber }, chainId, { gas_estimate: gasEstimate }] = await Promise.all([ + this.getAccount(accountFrom), + this.getChainId(), + extraArgs?.gasUnitPrice ? Promise.resolve({ gas_estimate: extraArgs.gasUnitPrice }) : this.estimateGasPrice(), + ]); + + const { maxGasAmount, gasUnitPrice, expireTimestamp } = { + maxGasAmount: BigInt(DEFAULT_MAX_GAS_AMOUNT), + gasUnitPrice: BigInt(gasEstimate), + expireTimestamp: BigInt(Math.floor(Date.now() / 1000) + DEFAULT_TXN_EXP_SEC_FROM_NOW), + ...extraArgs, + }; + + return new TxnBuilderTypes.RawTransaction( + TxnBuilderTypes.AccountAddress.fromHex(accountFrom), + BigInt(sequenceNumber), + payload, + maxGasAmount, + gasUnitPrice, + expireTimestamp, + new TxnBuilderTypes.ChainId(chainId), + ); + } + + /** + * Helper for generating, signing, and submitting a transaction. + * + * @param sender AptosAccount of transaction sender. + * @param payload Transaction payload. + * @param extraArgs Extra args for building the transaction payload. + * @returns The transaction response from the API. + */ + async generateSignSubmitTransaction( + sender: AptosAccount, + payload: TxnBuilderTypes.TransactionPayload, + extraArgs?: OptionalTransactionArgs, + ): Promise { + // :!:>generateSignSubmitTransactionInner + const rawTransaction = await this.generateRawTransaction(sender.address(), payload, extraArgs); + const bcsTxn = AptosClient.generateBCSTransaction(sender, rawTransaction); + const pendingTransaction = await this.submitSignedBCSTransaction(bcsTxn); + return pendingTransaction.hash; + // <:!:generateSignSubmitTransactionInner + } + + /** + * Publishes a move package. `packageMetadata` and `modules` can be generated with command + * `aptos move compile --save-metadata [ --included-artifacts=<...> ]`. + * @param sender + * @param packageMetadata package metadata bytes + * @param modules bytecodes of modules + * @param extraArgs + * @returns Transaction hash + */ + async publishPackage( + sender: AptosAccount, + packageMetadata: Bytes, + modules: Seq, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const codeSerializer = new Serializer(); + serializeVector(modules, codeSerializer); + + const payload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x1::code", + "publish_package_txn", + [], + [bcsSerializeBytes(packageMetadata), codeSerializer.getBytes()], + ), + ); + + return this.generateSignSubmitTransaction(sender, payload, extraArgs); + } + + /** + * Helper for generating, submitting, and waiting for a transaction, and then + * checking whether it was committed successfully. Under the hood this is just + * `generateSignSubmitTransaction` and then `waitForTransactionWithResult`, see + * those for information about the return / error semantics of this function. + */ + async generateSignSubmitWaitForTransaction( + sender: AptosAccount, + payload: TxnBuilderTypes.TransactionPayload, + extraArgs?: OptionalTransactionArgs & { + checkSuccess?: boolean; + timeoutSecs?: number; + }, + ): Promise { + const txnHash = await this.generateSignSubmitTransaction(sender, payload, extraArgs); + return this.waitForTransactionWithResult(txnHash, extraArgs); + } + + @parseApiError + @Memoize({ + ttlMs: 5 * 60 * 1000, // cache result for 5min + tags: ["gas_estimates"], + }) + async estimateGasPrice(): Promise { + return this.client.transactions.estimateGasPrice(); + } + + @parseApiError + async estimateMaxGasAmount(forAccount: MaybeHexString): Promise { + // Only Aptos utility coin is accepted as gas + const typeTag = `0x1::coin::CoinStore<${APTOS_COIN}>`; + + const [{ gas_estimate: gasUnitPrice }, resources] = await Promise.all([ + this.estimateGasPrice(), + this.getAccountResources(forAccount), + ]); + + const accountResource = resources.find((r) => r.type === typeTag); + const balance = BigInt((accountResource!.data as any).coin.value); + return balance / BigInt(gasUnitPrice); + } + + /** + * Rotate an account's auth key. After rotation, only the new private key can be used to sign txns for + * the account. + * WARNING: You must create a new instance of AptosAccount after using this function. + * @param forAccount Account of which the auth key will be rotated + * @param toPrivateKeyBytes New private key + * @param extraArgs Extra args for building the transaction payload. + * @returns PendingTransaction + */ + async rotateAuthKeyEd25519( + forAccount: AptosAccount, + toPrivateKeyBytes: Uint8Array, + extraArgs?: OptionalTransactionArgs, + ): Promise { + const { sequence_number: sequenceNumber, authentication_key: authKey } = await this.getAccount( + forAccount.address(), + ); + + const helperAccount = new AptosAccount(toPrivateKeyBytes); + + const challenge = new TxnBuilderTypes.RotationProofChallenge( + TxnBuilderTypes.AccountAddress.CORE_CODE_ADDRESS, + "account", + "RotationProofChallenge", + BigInt(sequenceNumber), + TxnBuilderTypes.AccountAddress.fromHex(forAccount.address()), + new TxnBuilderTypes.AccountAddress(new HexString(authKey).toUint8Array()), + helperAccount.pubKey().toUint8Array(), + ); + + const challengeHex = HexString.fromUint8Array(bcsToBytes(challenge)); + + const proofSignedByCurrentPrivateKey = forAccount.signHexString(challengeHex); + + const proofSignedByNewPrivateKey = helperAccount.signHexString(challengeHex); + + const payload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x1::account", + "rotate_authentication_key", + [], + [ + bcsSerializeU8(0), // ed25519 scheme + bcsSerializeBytes(forAccount.pubKey().toUint8Array()), + bcsSerializeU8(0), // ed25519 scheme + bcsSerializeBytes(helperAccount.pubKey().toUint8Array()), + bcsSerializeBytes(proofSignedByCurrentPrivateKey.toUint8Array()), + bcsSerializeBytes(proofSignedByNewPrivateKey.toUint8Array()), + ], + ), + ); + + const rawTransaction = await this.generateRawTransaction(forAccount.address(), payload, extraArgs); + const bcsTxn = AptosClient.generateBCSTransaction(forAccount, rawTransaction); + return this.submitSignedBCSTransaction(bcsTxn); + } + + /** + * Lookup the original address by the current derived address + * @param addressOrAuthKey + * @returns original address + */ + async lookupOriginalAddress(addressOrAuthKey: MaybeHexString): Promise { + const resource = await this.getAccountResource("0x1", "0x1::account::OriginatingAddress"); + + const { + address_map: { handle }, + } = resource.data as any; + + const origAddress = await this.getTableItem(handle, { + key_type: "address", + value_type: "address", + key: HexString.ensure(addressOrAuthKey).hex(), + }); + + return new HexString(origAddress); + } + + /** + * Get block by height + * + * @param blockHeight Block height to lookup. Starts at 0 + * @param withTransactions If set to true, include all transactions in the block + * + * @returns Block + */ + @parseApiError + async getBlockByHeight(blockHeight: number, withTransactions?: boolean): Promise { + return this.client.blocks.getBlockByHeight(blockHeight, withTransactions); + } + + /** + * Get block by block transaction version + * + * @param version Ledger version to lookup block information for + * @param withTransactions If set to true, include all transactions in the block + * + * @returns Block + */ + @parseApiError + async getBlockByVersion(version: number, withTransactions?: boolean): Promise { + return this.client.blocks.getBlockByVersion(version, withTransactions); + } + + /** + * Call for a move view function + * + * @param payload Transaction payload + * @param version (optional) Ledger version to lookup block information for + * + * @returns MoveValue[] + */ + @parseApiError + async view(payload: Gen.ViewRequest, ledger_version?: string): Promise { + return this.client.view.view(payload, ledger_version); + } + + // eslint-disable-next-line class-methods-use-this + clearCache(tags: string[]) { + clear(tags); + } +} + +export class ApiError extends Error { + constructor( + public readonly status: number, + public readonly message: string, + public readonly errorCode?: string, + public readonly vmErrorCode?: string, + ) { + super(message); + } +} + +/** + * This error is used by `waitForTransactionWithResult` when waiting for a + * transaction times out. + */ +export class WaitForTransactionError extends Error { + public readonly lastSubmittedTransaction: Gen.Transaction | undefined; + + constructor(message: string, lastSubmittedTransaction: Gen.Transaction | undefined) { + super(message); + this.lastSubmittedTransaction = lastSubmittedTransaction; + } +} + +/** + * This error is used by `waitForTransactionWithResult` if `checkSuccess` is true. + * See that function for more information. + */ +export class FailedTransactionError extends Error { + public readonly transaction: Gen.Transaction; + + constructor(message: string, transaction: Gen.Transaction) { + super(message); + this.transaction = transaction; + } +} + +/** + * Creates a decorator to parse Gen.ApiError and return a wrapped error that is more developer friendly + */ +function parseApiError(target: unknown, propertyKey: string, descriptor: PropertyDescriptor) { + const childFunction = descriptor.value; + // eslint-disable-next-line no-param-reassign + descriptor.value = async function wrapper(...args: any[]) { + try { + // We need to explicitly await here so that the function is called and + // potentially throws an error. If we just return without awaiting, the + // promise is returned directly and the catch block cannot trigger. + const res = await childFunction.apply(this, [...args]); + return res; + } catch (e) { + if (e instanceof Gen.ApiError) { + throw new ApiError( + e.status, + JSON.stringify({ message: e.message, ...e.body }), + e.body?.error_code, + e.body?.vm_error_code, + ); + } + throw e; + } + }; + return descriptor; +} diff --git a/m1/JavaScript-client/src/providers/index.ts b/m1/JavaScript-client/src/providers/index.ts new file mode 100644 index 00000000..a6528d3c --- /dev/null +++ b/m1/JavaScript-client/src/providers/index.ts @@ -0,0 +1,3 @@ +export * from "./indexer"; +export * from "./aptos_client"; +export * from "./provider"; diff --git a/m1/JavaScript-client/src/providers/indexer.ts b/m1/JavaScript-client/src/providers/indexer.ts new file mode 100644 index 00000000..1aed1580 --- /dev/null +++ b/m1/JavaScript-client/src/providers/indexer.ts @@ -0,0 +1,313 @@ +import axios from "axios"; + +import { AnyNumber } from "../bcs/types"; +import { HexString, MaybeHexString } from "../utils"; +import { + GetAccountTokensCountQuery, + GetAccountCoinsDataQuery, + GetAccountCurrentTokensQuery, + GetAccountTransactionsCountQuery, + GetAccountTransactionsDataQuery, + GetCurrentDelegatorBalancesCountQuery, + GetDelegatedStakingActivitiesQuery, + GetIndexerLedgerInfoQuery, + GetTokenActivitiesCountQuery, + GetTokenActivitiesQuery, + GetTokenDataQuery, + GetTokenOwnersDataQuery, + GetTopUserTransactionsQuery, + GetUserTransactionsQuery, +} from "../indexer/generated/operations"; +import { + GetAccountTokensCount, + GetAccountCoinsData, + GetAccountCurrentTokens, + GetAccountTransactionsCount, + GetAccountTransactionsData, + GetCurrentDelegatorBalancesCount, + GetDelegatedStakingActivities, + GetIndexerLedgerInfo, + GetTokenActivities, + GetTokenActivitiesCount, + GetTokenData, + GetTokenOwnersData, + GetTopUserTransactions, + GetUserTransactions, +} from "../indexer/generated/queries"; + +/** + * Controls the number of results that are returned and the starting position of those results. + * limit specifies the maximum number of items or records to return in a query result. + * offset parameter specifies the starting position of the query result within the set of data. + * For example, if you want to retrieve records 11-20, + * you would set the offset parameter to 10 (i.e., the index of the first record to retrieve is 10) + * and the limit parameter to 10 (i.e., the number of records to retrieve is 10)) + */ +interface PaginationArgs { + offset?: AnyNumber; + limit?: number; +} + +type GraphqlQuery = { + query: string; + variables?: {}; +}; +/** + * Provides methods for retrieving data from Aptos Indexer. + * For more detailed Queries specification see + * {@link https://cloud.hasura.io/public/graphiql?endpoint=https://indexer.mainnet.aptoslabs.com/v1/graphql} + */ +export class IndexerClient { + endpoint: string; + + /** + * @param endpoint URL of the Aptos Indexer API endpoint. + */ + constructor(endpoint: string) { + this.endpoint = endpoint; + } + + /** + * Indexer only accepts address in the long format, i.e a 66 chars long -> 0x<64 chars> + * This method makes sure address is 66 chars long. + * @param address + */ + static validateAddress(address: string): void { + if (address.length < 66) { + throw new Error("Address needs to be 66 chars long."); + } + } + + /** + * Builds a axios client call to fetch data from Aptos Indexer. + * + * @param graphqlQuery A GraphQL query to pass in the `data` axios call. + */ + async queryIndexer(graphqlQuery: GraphqlQuery): Promise { + const { data } = await axios.post(this.endpoint, graphqlQuery); + if (data.errors) { + throw new Error(`Indexer data error ${JSON.stringify(data.errors, null, " ")}`); + } + return data.data; + } + + /** + * Queries Indexer Ledger Info + * + * @returns GetLedgerInfoQuery response type + */ + async getIndexerLedgerInfo(): Promise { + const graphqlQuery = { + query: GetIndexerLedgerInfo, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries an Aptos account's NFTs by owner address + * + * @param ownerAddress Hex-encoded 32 byte Aptos account address + * @returns GetAccountCurrentTokensQuery response type + */ + async getAccountNFTs(ownerAddress: MaybeHexString, options?: PaginationArgs): Promise { + const address = HexString.ensure(ownerAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetAccountCurrentTokens, + variables: { address, offset: options?.offset, limit: options?.limit }, + }; + + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries a token activities by token id hash + * + * @param idHash token id hash + * @returns GetTokenActivitiesQuery response type + */ + async getTokenActivities(idHash: string, options?: PaginationArgs): Promise { + const graphqlQuery = { + query: GetTokenActivities, + variables: { idHash, offset: options?.offset, limit: options?.limit }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries an account coin data + * + * @param ownerAddress Owner address + * @returns GetAccountCoinsDataQuery response type + */ + async getAccountCoinsData(ownerAddress: MaybeHexString, options?: PaginationArgs): Promise { + const address = HexString.ensure(ownerAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetAccountCoinsData, + variables: { owner_address: address, offset: options?.offset, limit: options?.limit }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Gets the count of tokens owned by an account + * + * @param ownerAddress Owner address + * @returns AccountTokensCountQuery response type + */ + async getAccountTokensCount(ownerAddress: MaybeHexString): Promise { + const address = HexString.ensure(ownerAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetAccountTokensCount, + variables: { owner_address: address }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Gets the count of transactions submitted by an account + * + * @param address Account address + * @returns GetAccountTransactionsCountQuery response type + */ + async getAccountTransactionsCount(accountAddress: MaybeHexString): Promise { + const address = HexString.ensure(accountAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetAccountTransactionsCount, + variables: { address }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries an account transactions data + * + * @param address Account address + * @returns GetAccountTransactionsDataQuery response type + */ + async getAccountTransactionsData( + accountAddress: MaybeHexString, + options?: PaginationArgs, + ): Promise { + const address = HexString.ensure(accountAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetAccountTransactionsData, + variables: { address, offset: options?.offset, limit: options?.limit }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries delegated staking activities + * + * @param delegatorAddress Delegator address + * @param poolAddress Pool address + * @returns GetDelegatedStakingActivitiesQuery response type + */ + async getDelegatedStakingActivities( + delegatorAddress: MaybeHexString, + poolAddress: MaybeHexString, + ): Promise { + const delegator = HexString.ensure(delegatorAddress).hex(); + const pool = HexString.ensure(poolAddress).hex(); + IndexerClient.validateAddress(delegator); + IndexerClient.validateAddress(pool); + const graphqlQuery = { + query: GetDelegatedStakingActivities, + variables: { + delegatorAddress: delegator, + poolAddress: pool, + }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Gets the count of token's activities + * + * @param tokenId Token ID + * @returns GetTokenActivitiesCountQuery response type + */ + async getTokenActivitiesCount(tokenId: string): Promise { + const graphqlQuery = { + query: GetTokenActivitiesCount, + variables: { token_id: tokenId }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries token data + * + * @param tokenId Token ID + * @returns GetTokenDataQuery response type + */ + async getTokenData(tokenId: string): Promise { + const graphqlQuery = { + query: GetTokenData, + variables: { token_id: tokenId }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries token owners data + * + * @param tokenId Token ID + * @param propertyVersion Property version + * @returns GetTokenOwnersDataQuery response type + */ + async getTokenOwnersData(tokenId: string, propertyVersion: number): Promise { + const graphqlQuery = { + query: GetTokenOwnersData, + variables: { token_id: tokenId, property_version: propertyVersion }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries top user transactions + * + * @param limit + * @returns GetTopUserTransactionsQuery response type + */ + async getTopUserTransactions(limit: number): Promise { + const graphqlQuery = { + query: GetTopUserTransactions, + variables: { limit }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries top user transactions + * + * @returns GetUserTransactionsQuery response type + */ + async getUserTransactions(startVersion?: number, options?: PaginationArgs): Promise { + const graphqlQuery = { + query: GetUserTransactions, + variables: { start_version: startVersion, offset: options?.offset, limit: options?.limit }, + }; + return this.queryIndexer(graphqlQuery); + } + + /** + * Queries current delegator balances count + * + * @returns GetCurrentDelegatorBalancesCountQuery response type + */ + async getCurrentDelegatorBalancesCount(poolAddress: MaybeHexString): Promise { + const address = HexString.ensure(poolAddress).hex(); + IndexerClient.validateAddress(address); + const graphqlQuery = { + query: GetCurrentDelegatorBalancesCount, + variables: { poolAddress: address }, + }; + return this.queryIndexer(graphqlQuery); + } +} diff --git a/m1/JavaScript-client/src/providers/provider.ts b/m1/JavaScript-client/src/providers/provider.ts new file mode 100644 index 00000000..23de2527 --- /dev/null +++ b/m1/JavaScript-client/src/providers/provider.ts @@ -0,0 +1,95 @@ +import { AptosClient } from "./aptos_client"; +import { IndexerClient } from "./indexer"; + +import * as Gen from "../generated/index"; +import { CustomEndpoints, Network, NetworkToIndexerAPI, NetworkToNodeAPI } from "../utils"; + +type NetworkWithCustom = Network | "CUSTOM"; +/** + * Builds a Provider class with an aptos client configured to connect to an Aptos node + * and indexer client configured to connect to Aptos Indexer. + * + * It creates AptosClient and IndexerClient instances based on the network or custom endpoints provided. + * + * This class holds both AptosClient and IndexerClient classes's methods and properties so we + * can instantiate the Provider class and use it to query full node and/or Indexer. + * + * @example An example of how to use this class + * ``` + * const provider = new Provider(Network.DEVNET) + * const account = await provider.getAccount("0x123"); + * const accountNFTs = await provider.getAccountNFTs("0x123"); + * ``` + * + * @param network enum of type Network - MAINNET | TESTNET | DEVENET or custom endpoints of type CustomEndpoints + * @param config AptosClient config arg - additional configuration options for the generated Axios client. + */ +export class Provider { + aptosClient: AptosClient; + + indexerClient: IndexerClient; + + network: NetworkWithCustom; + + constructor( + network: Network | CustomEndpoints, + config?: Partial, + doNotFixNodeUrl: boolean = false, + ) { + let fullNodeUrl = null; + let indexerUrl = null; + + if (typeof network === "object" && isCustomEndpoints(network)) { + fullNodeUrl = network.fullnodeUrl; + indexerUrl = network.indexerUrl; + this.network = "CUSTOM"; + } else { + fullNodeUrl = NetworkToNodeAPI[network]; + indexerUrl = NetworkToIndexerAPI[network]; + this.network = network; + } + + if (!fullNodeUrl || !indexerUrl) { + throw new Error("network is not provided"); + } + + this.aptosClient = new AptosClient(fullNodeUrl, config, doNotFixNodeUrl); + this.indexerClient = new IndexerClient(indexerUrl); + } +} + +export interface Provider extends AptosClient, IndexerClient {} + +/** +In TypeScript, we can’t inherit or extend from more than one class, +Mixins helps us to get around that by creating a partial classes +that we can combine to form a single class that contains all the methods and properties from the partial classes. +{@link https://www.typescriptlang.org/docs/handbook/mixins.html#alternative-pattern} + +Here, we combine AptosClient and IndexerClient classes into one Provider class that holds all +methods and properties from both classes. +*/ +function applyMixin(targetClass: any, baseClass: any, baseClassProp: string) { + Object.getOwnPropertyNames(baseClass.prototype).forEach((propertyName) => { + const propertyDescriptor = Object.getOwnPropertyDescriptor(baseClass.prototype, propertyName); + if (!propertyDescriptor) return; + // eslint-disable-next-line func-names + propertyDescriptor.value = function (...args: any) { + return (this as any)[baseClassProp][propertyName](...args); + }; + Object.defineProperty(targetClass.prototype, propertyName, propertyDescriptor); + }); +} + +applyMixin(Provider, AptosClient, "aptosClient"); +applyMixin(Provider, IndexerClient, "indexerClient"); + +// use exhaustive type predicates +function isCustomEndpoints(network: CustomEndpoints): network is CustomEndpoints { + return ( + network.fullnodeUrl !== undefined && + typeof network.fullnodeUrl === "string" && + network.indexerUrl !== undefined && + typeof network.indexerUrl === "string" + ); +} diff --git a/m1/JavaScript-client/src/tests/e2e/ans_client.test.ts b/m1/JavaScript-client/src/tests/e2e/ans_client.test.ts new file mode 100644 index 00000000..641058e5 --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/ans_client.test.ts @@ -0,0 +1,149 @@ +import { AptosAccount } from "../../account"; +import { AnsClient } from "../../plugins/ans_client"; +import { Provider } from "../../providers"; +import { HexString, Network } from "../../utils"; +import { ANS_OWNER_ADDRESS, ANS_OWNER_PK, getFaucetClient, longTestTimeout, NODE_URL } from "../unit/test_helper.test"; + +const alice = new AptosAccount(); +const ACCOUNT_ADDRESS = alice.address().hex(); +// generate random name so we can run the test against local tesnet without the need to re-run it each time. +// This will produce a string anywhere between zero and 12 characters long, usually 11 characters, only lower-case and numbers +const DOMAIN_NAME = Math.random().toString(36).slice(2); + +describe("ANS", () => { + beforeAll(async () => { + const faucetClient = getFaucetClient(); + await faucetClient.fundAccount(alice.address(), 100_000_000_000); + }, longTestTimeout); + + test("fails to create a new ANS class instance", () => { + const provider = new Provider({ fullnodeUrl: "full-node-url", indexerUrl: "indexer-url" }); + expect(() => new AnsClient(provider)).toThrow("Error: For custom providers, you must pass in a contract address"); + }); + + test("creates a new ANS class instance", () => { + const provider = new Provider({ fullnodeUrl: "full-node-url", indexerUrl: "indexer-url" }); + const ans_client = new AnsClient(provider, ANS_OWNER_ADDRESS); + expect(ans_client).toHaveProperty("contractAddress"); + }); + + test("sets the contract address to be the provided one", () => { + const provider = new Provider({ fullnodeUrl: "full-node-url", indexerUrl: "indexer-url" }); + const ans_client = new AnsClient(provider, ANS_OWNER_ADDRESS); + expect(ans_client.contractAddress).toEqual(ANS_OWNER_ADDRESS); + }); + + test("sets the contract address to be the one that matches the provided node url", () => { + const provider = new Provider(Network.TESTNET); + const ans_client = new AnsClient(provider, ANS_OWNER_ADDRESS); + expect(ans_client.contractAddress).toEqual("0x5f8fd2347449685cf41d4db97926ec3a096eaf381332be4f1318ad4d16a8497c"); + }); + + test( + "init reverse lookup registry for contract admin", + async () => { + const owner = new AptosAccount(new HexString(ANS_OWNER_PK).toUint8Array()); + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans_client = new AnsClient(provider, ANS_OWNER_ADDRESS); + const txnHash = await ans_client.initReverseLookupRegistry(owner); + await provider.waitForTransactionWithResult(txnHash, { checkSuccess: true }); + }, + longTestTimeout, + ); + + test( + "mint name", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const txnHash = await ans.mintAptosName(alice, DOMAIN_NAME); + await provider.waitForTransactionWithResult(txnHash, { checkSuccess: true }); + }, + longTestTimeout, + ); + + test( + "get name by address", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const name = await ans.getPrimaryNameByAddress(ACCOUNT_ADDRESS); + expect(name).toEqual(DOMAIN_NAME); + }, + longTestTimeout, + ); + + test( + "get address by name", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(DOMAIN_NAME); + expect(address).toEqual(ACCOUNT_ADDRESS); + }, + longTestTimeout, + ); + + test( + "get address by name with .apt", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(`${DOMAIN_NAME}.apt`); + expect(address).toEqual(ACCOUNT_ADDRESS); + }, + longTestTimeout, + ); + + test( + "get address by subdomain_name", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(`sub.${DOMAIN_NAME}`); + expect(address).toBeNull; + }, + longTestTimeout, + ); + + test( + "get address by subdomain_name with .apt", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(`sub.${DOMAIN_NAME}.apt`); + expect(address).toBeNull; + }, + longTestTimeout, + ); + + test( + "returns null for an invalid domain", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(`${DOMAIN_NAME}-`); + expect(address).toBeNull; + }, + longTestTimeout, + ); + + test( + "returns null for an invalid subdomain", + async () => { + const provider = new Provider({ fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }); + const ans = new AnsClient(provider, ANS_OWNER_ADDRESS); + + const address = await ans.getAddressByName(`sub.${DOMAIN_NAME}.apt-`); + expect(address).toBeNull; + }, + longTestTimeout, + ); +}); diff --git a/m1/JavaScript-client/src/tests/e2e/aptos_client.test.ts b/m1/JavaScript-client/src/tests/e2e/aptos_client.test.ts new file mode 100644 index 00000000..de0a6e21 --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/aptos_client.test.ts @@ -0,0 +1,658 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosClient } from "../../providers/aptos_client"; +import * as Gen from "../../generated/index"; +import { AptosAccount } from "../../account/aptos_account"; +import { + TxnBuilderTypes, + TransactionBuilderMultiEd25519, + TransactionBuilderRemoteABI, +} from "../../transaction_builder"; +import { AptosToken, TokenClient } from "../../plugins"; +import { HexString } from "../../utils"; +import { getFaucetClient, longTestTimeout, NODE_URL, PROVIDER_LOCAL_NETWORK_CONFIG } from "../unit/test_helper.test"; +import { bcsSerializeUint64, bcsToBytes } from "../../bcs"; +import { AccountAddress, Ed25519PublicKey, stringStructTag, TypeTagStruct } from "../../aptos_types"; +import { Provider } from "../../providers"; +import { BCS } from "../.."; + +const account = "0x1::account::Account"; + +const aptosCoin = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; + +const coinTransferFunction = "0x1::coin::transfer"; + +test("node url empty", () => { + expect(() => { + const client = new AptosClient(""); + client.getAccount("0x1"); + }).toThrow("Node URL cannot be empty."); +}); + +test("gets genesis account", async () => { + const client = new AptosClient(NODE_URL); + const genesisAccount = await client.getAccount("0x1"); + expect(genesisAccount.authentication_key.length).toBe(66); + expect(genesisAccount.sequence_number).not.toBeNull(); +}); + +test("gets transactions", async () => { + const client = new AptosClient(NODE_URL); + const transactions = await client.getTransactions(); + expect(transactions.length).toBeGreaterThan(0); +}); + +test("gets genesis resources", async () => { + const client = new AptosClient(NODE_URL); + const resources = await client.getAccountResources("0x1"); + const accountResource = resources.find((r) => r.type === account); + expect(accountResource).toBeDefined(); +}); + +test("gets the Account resource", async () => { + const client = new AptosClient(NODE_URL); + const accountResource = await client.getAccountResource("0x1", account); + expect(accountResource).toBeDefined(); +}); + +test("gets ledger info", async () => { + const client = new AptosClient(NODE_URL); + const ledgerInfo = await client.getLedgerInfo(); + expect(ledgerInfo.chain_id).toBeGreaterThan(1); + expect(parseInt(ledgerInfo.ledger_version, 10)).toBeGreaterThan(0); +}); + +test("gets account modules", async () => { + const client = new AptosClient(NODE_URL); + const modules = await client.getAccountModules("0x1"); + const module = modules.find((r) => r.abi!.name === "aptos_coin"); + expect(module!.abi!.address).toBe("0x1"); +}); + +test("gets the AptosCoin module", async () => { + const client = new AptosClient(NODE_URL); + const module = await client.getAccountModule("0x1", "aptos_coin"); + expect(module!.abi!.address).toBe("0x1"); +}); + +test( + "submits bcs transaction", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("100000000"); + + const account2 = new AptosAccount(); + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("0"); + + const token = new TxnBuilderTypes.TypeTagStruct(TxnBuilderTypes.StructTag.fromString("0x1::aptos_coin::AptosCoin")); + + const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x1::coin", + "transfer", + [token], + [bcsToBytes(TxnBuilderTypes.AccountAddress.fromHex(account2.address())), bcsSerializeUint64(717)], + ), + ); + + const rawTxn = await client.generateRawTransaction(account1.address(), entryFunctionPayload); + + const bcsTxn = AptosClient.generateBCSTransaction(account1, rawTxn); + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("717"); + }, + longTestTimeout, +); + +test( + "submits generic type bcs transaction", + async () => { + const provider = new Provider(PROVIDER_LOCAL_NETWORK_CONFIG); + const aptosToken = new AptosToken(provider); + const account1 = new AptosAccount(); + const faucetClient = getFaucetClient(); + + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await provider.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("100000000"); + + let tokenAddress = ""; + + await provider.waitForTransaction( + await aptosToken.createCollection(account1, "Collection description", "Collection Name", "https://aptos.dev", 5, { + royaltyNumerator: 10, + royaltyDenominator: 10, + }), + ); + const txn = await provider.waitForTransactionWithResult( + await aptosToken.mint( + account1, + "Collection Name", + "Token Description", + "Token Name", + "https://aptos.dev/img/nyan.jpeg", + ["key"], + ["bool"], + ["true"], + ), + { checkSuccess: true }, + ); + tokenAddress = (txn as Gen.UserTransaction).events[0].data.token; + console.log(tokenAddress); + + const token = new TxnBuilderTypes.TypeTagStruct(TxnBuilderTypes.StructTag.fromString("0x4::token::Token")); + const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x4::aptos_token", + "add_typed_property", + [token, new TypeTagStruct(stringStructTag)], + [ + BCS.bcsToBytes(AccountAddress.fromHex(tokenAddress)), + BCS.bcsSerializeStr("bcsKey"), + BCS.bcsSerializeStr("bcs value"), + ], + ), + ); + const rawTxn = await provider.generateRawTransaction(account1.address(), entryFunctionPayload); + const bcsTxn = AptosClient.generateBCSTransaction(account1, rawTxn); + const transactionRes = await provider.submitSignedBCSTransaction(bcsTxn); + await provider.waitForTransaction(transactionRes.hash, { checkSuccess: true }); + }, + longTestTimeout, +); + +test( + "submits transaction with remote ABI", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + await faucetClient.fundAccount(account1.address(), 100_000_000); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("100000000"); + + const account2 = new AptosAccount(); + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("0"); + + const builder = new TransactionBuilderRemoteABI(client, { sender: account1.address() }); + const rawTxn = await builder.build( + "0x1::coin::transfer", + ["0x1::aptos_coin::AptosCoin"], + [account2.address(), 400], + ); + + const bcsTxn = AptosClient.generateBCSTransaction(account1, rawTxn); + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("400"); + }, + longTestTimeout, +); + +test( + "submits multisig transaction simulation", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + const account2 = new AptosAccount(); + const account3 = new AptosAccount(); + const multiSigPublicKey = new TxnBuilderTypes.MultiEd25519PublicKey( + [ + new TxnBuilderTypes.Ed25519PublicKey(account1.signingKey.publicKey), + new TxnBuilderTypes.Ed25519PublicKey(account2.signingKey.publicKey), + new TxnBuilderTypes.Ed25519PublicKey(account3.signingKey.publicKey), + ], + 2, + ); + + const authKey = TxnBuilderTypes.AuthenticationKey.fromMultiEd25519PublicKey(multiSigPublicKey); + + const mutisigAccountAddress = authKey.derivedAddress(); + await faucetClient.fundAccount(mutisigAccountAddress, 50000000); + + let resources = await client.getAccountResources(mutisigAccountAddress); + let accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("50000000"); + + const account4 = new AptosAccount(); + await faucetClient.fundAccount(account4.address(), 0); + resources = await client.getAccountResources(account4.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("0"); + + const token = new TxnBuilderTypes.TypeTagStruct(TxnBuilderTypes.StructTag.fromString("0x1::aptos_coin::AptosCoin")); + + const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x1::coin", + "transfer", + [token], + [bcsToBytes(TxnBuilderTypes.AccountAddress.fromHex(account4.address())), bcsSerializeUint64(123)], + ), + ); + + const rawTxn = await client.generateRawTransaction(mutisigAccountAddress, entryFunctionPayload); + + const txnBuilder = new TransactionBuilderMultiEd25519((signingMessage: TxnBuilderTypes.SigningMessage) => { + const sigHexStr1 = account1.signBuffer(signingMessage); + const sigHexStr3 = account3.signBuffer(signingMessage); + const bitmap = TxnBuilderTypes.MultiEd25519Signature.createBitmap([0, 2]); + + const muliEd25519Sig = new TxnBuilderTypes.MultiEd25519Signature( + [ + new TxnBuilderTypes.Ed25519Signature(sigHexStr1.toUint8Array()), + new TxnBuilderTypes.Ed25519Signature(sigHexStr3.toUint8Array()), + ], + bitmap, + ); + + return muliEd25519Sig; + }, multiSigPublicKey); + + // simulate transaction + const [simulateTransactionRes] = await client.simulateTransaction(multiSigPublicKey, rawTxn, { + estimateGasUnitPrice: true, + estimateMaxGasAmount: true, + estimatePrioritizedGasUnitPrice: true, + }); + + expect(parseInt(simulateTransactionRes.gas_used, 10) > 0); + expect(simulateTransactionRes.success); + + const bcsTxn = txnBuilder.sign(rawTxn); + const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); + + await client.waitForTransaction(transactionRes.hash); + + resources = await client.getAccountResources(account4.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as any).coin.value).toBe("123"); + }, + longTestTimeout, +); + +test( + "submits json transaction simulation", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + const account2 = new AptosAccount(); + const txns1 = await faucetClient.fundAccount(account1.address(), 1000000); + const txns2 = await faucetClient.fundAccount(account2.address(), 1000000); + const tx1 = await client.getTransactionByHash(txns1[0]); + const tx2 = await client.getTransactionByHash(txns2[0]); + expect(tx1.type).toBe("user_transaction"); + expect(tx2.type).toBe("user_transaction"); + const checkAptosCoin = async () => { + const resources1 = await client.getAccountResources(account1.address()); + const resources2 = await client.getAccountResources(account2.address()); + const account1Resource = resources1.find((r) => r.type === aptosCoin); + const account2Resource = resources2.find((r) => r.type === aptosCoin); + expect((account1Resource!.data as { coin: { value: string } }).coin.value).toBe("1000000"); + expect((account2Resource!.data as { coin: { value: string } }).coin.value).toBe("1000000"); + }; + await checkAptosCoin(); + + const payload: Gen.TransactionPayload = { + type: "entry_function_payload", + function: coinTransferFunction, + type_arguments: ["0x1::aptos_coin::AptosCoin"], + arguments: [account2.address().hex(), 100000], + }; + const txnRequest = await client.generateTransaction(account1.address(), payload); + [account1, new Ed25519PublicKey(account1.pubKey().toUint8Array())].forEach(async (accountOrAddress) => { + const transactionRes = ( + await client.simulateTransaction(accountOrAddress, txnRequest, { + estimateGasUnitPrice: true, + estimateMaxGasAmount: true, + estimatePrioritizedGasUnitPrice: true, + }) + )[0]; + expect(parseInt(transactionRes.gas_used, 10) > 0); + expect(transactionRes.success); + const account2AptosCoin = transactionRes.changes.filter((change) => { + if (change.type !== "write_resource") { + return false; + } + const write = change as Gen.WriteResource; + + return ( + write.address === account2.address().hex() && + write.data.type === aptosCoin && + (write.data.data as { coin: { value: string } }).coin.value === "1100000" + ); + }); + expect(account2AptosCoin).toHaveLength(1); + }); + await checkAptosCoin(); + }, + longTestTimeout, +); + +test( + "submits bcs transaction simulation", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + const account2 = new AptosAccount(); + const txns1 = await faucetClient.fundAccount(account1.address(), 100_000_000); + const txns2 = await faucetClient.fundAccount(account2.address(), 100_000_000); + const tx1 = await client.getTransactionByHash(txns1[0]); + const tx2 = await client.getTransactionByHash(txns2[0]); + expect(tx1.type).toBe("user_transaction"); + expect(tx2.type).toBe("user_transaction"); + const checkAptosCoin = async () => { + const resources1 = await client.getAccountResources(account1.address()); + const resources2 = await client.getAccountResources(account2.address()); + const account1Resource = resources1.find((r) => r.type === aptosCoin); + const account2Resource = resources2.find((r) => r.type === aptosCoin); + expect((account1Resource!.data as { coin: { value: string } }).coin.value).toBe("100000000"); + expect((account2Resource!.data as { coin: { value: string } }).coin.value).toBe("100000000"); + }; + await checkAptosCoin(); + + const token = new TxnBuilderTypes.TypeTagStruct(TxnBuilderTypes.StructTag.fromString("0x1::aptos_coin::AptosCoin")); + const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( + TxnBuilderTypes.EntryFunction.natural( + "0x1::coin", + "transfer", + [token], + [bcsToBytes(TxnBuilderTypes.AccountAddress.fromHex(account2.address())), bcsSerializeUint64(1000)], + ), + ); + + const rawTxn = await client.generateRawTransaction(account1.address(), entryFunctionPayload); + + const bcsTxn = AptosClient.generateBCSSimulation(account1, rawTxn); + const transactionRes = (await client.submitBCSSimulation(bcsTxn))[0]; + expect(parseInt(transactionRes.gas_used, 10) > 0); + expect(transactionRes.success); + const account2AptosCoin = transactionRes.changes.filter((change) => { + if (change.type !== "write_resource") { + return false; + } + const write = change as Gen.WriteResource; + + return ( + write.address === account2.address().toShortString() && + write.data.type === aptosCoin && + (write.data.data as { coin: { value: string } }).coin.value === "100001000" + ); + }); + expect(account2AptosCoin).toHaveLength(1); + await checkAptosCoin(); + }, + longTestTimeout, +); + +test( + "submits multiagent transaction", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + const tokenClient = new TokenClient(client); + + const alice = new AptosAccount(); + const bob = new AptosAccount(); + + // Fund both Alice's and Bob's Account + await faucetClient.fundAccount(alice.address(), 100000000); + await faucetClient.fundAccount(bob.address(), 100000000); + + const collectionName = "AliceCollection"; + const tokenName = "Alice Token"; + + async function ensureTxnSuccess(txnHashPromise: Promise) { + const txnHash = await txnHashPromise; + const txn = await client.waitForTransactionWithResult(txnHash); + expect((txn as any)?.success).toBe(true); + } + + // Create collection and token on Alice's account + await ensureTxnSuccess( + tokenClient.createCollection(alice, collectionName, "Alice's simple collection", "https://aptos.dev"), + ); + + await ensureTxnSuccess( + tokenClient.createToken( + alice, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + 1000, + alice.address(), + 0, + 0, + ["key"], + ["2"], + ["u64"], + ), + ); + + const propertyVersion = 0; + const tokenId = { + token_data_id: { + creator: alice.address().hex(), + collection: collectionName, + name: tokenName, + }, + property_version: `${propertyVersion}`, + }; + + // Transfer Token from Alice's Account to Bob's Account + await tokenClient.getCollectionData(alice.address().hex(), collectionName); + let aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("1"); + + const txnHash = await tokenClient.directTransferToken( + alice, + bob, + alice.address(), + collectionName, + tokenName, + 1, + propertyVersion, + ); + + await client.waitForTransaction(txnHash, { checkSuccess: true }); + + aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("0"); + + const bobBalance = await tokenClient.getTokenForAccount(bob.address().hex(), tokenId); + expect(bobBalance.amount).toBe("1"); + }, + longTestTimeout, +); + +test( + "publishes a package", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount( + new HexString("0x883fdd67576e5fdceb370ba665b8af8856d0cae63fd808b8d16077c6b008ea8c").toUint8Array(), + ); + await faucetClient.fundAccount(account1.address(), 100_000_000); + + const txnHash = await client.publishPackage( + account1, + new HexString( + // eslint-disable-next-line max-len + "084578616d706c657301000000000000000040314137344146383742383132393043323533323938383036373846304137444637393737373637383734334431434345443230413446354345334238464446388f011f8b08000000000002ff3dccc10ac2300c06e07b9ea2f46ee70b78f0a02f31c6886d74a55d5b1a51417c7713c1915c927cf9c7863ee18d2628b89239187b7ae1da32b18507758eb5e872efa42cc088217462269e60a19ceb7cc9d527bf60fcb9594da0462550f151d9b1dd2b9fbba43f6b4f82de465e302b776e90befe8f03aadd6db3351ff802f2294cdfa100000001076d6573736167658f051f8b08000000000002ff95555d6bdb30147dcfafb8ed20d8c52c1d8c3194a6ec83b0be747be8dec6108a7d9d983a5226c94dc3f07f9f2cd98e6cc769e787884857e79e73bfb4154991236c30cf055de5227e8c372ce3846c5129b646f83b01f3150a41e984109452c879774f656b8e834d2d33be3e6eb29d168aa6926d712fe423212c8e45c175dfc27979c2ea64329b910b722b518942c6682d0d6e116bb877f4ee449ea0840d53f088879a6cf5d5f409381e843cd835ea1b5023979bc57a5404ec4ac8b25aee184f72bca95d7db586f6e0d6c19486df8d21d8f23b41d0bb65592652ec226323247a6c5329b6f445ca5a9cb7291d81d96c063f37681c640ab86894c2cef03434ac4d2cb8d2b0fcfe83de2f1f1e3e7f5b12283ebc87055ccf1dc8ae58e5590c69c1618dbaf11bb0249104aa5fb311f669008bff149939eaa5e728942985525f04f89c29ad6e3a66b7163d8cc0d618215c689a9a1249028f6718ce5bb0abe94a18d33d5de762c5f293686f6be67e806a6d2616f260152a5fa12b4b23cd5675345649a1857a51708e1a6a485a11322176c0a6015c14a94883696de289cb52082e46c2e4e185a1e7ccd6b57842aa450b198d52eb75423476d06f91264284e3de6dd2cd68a71ca575f1cbb0fd5b02e60a7bc4aab819c24d5ae8c6b15f4027e5745be8b3d1990f40fd56337057d3a197a666ba97ebc980db4c3bd5c1d47887f1ebddb845a726c230193ebd6146fc09108bdde174eeca9eec718a260003ada5df2a6f7e6954ba89a931ff74fdfc2efc3dd646dc60d398717aa6a3c25736cdff34cbd8e342482c9169a44d51a44252a7a85b1d27f846d0767ca1d38fc1eaf2ae7a2423f8d2be9297d5341acc362ff6fdd119c262f10a543f9ddeec6b776be2e5a49cfc0325c63f11c007000000000300000000000000000000000000000000000000000000000000000000000000010e4170746f734672616d65776f726b00000000000000000000000000000000000000000000000000000000000000010b4170746f735374646c696200000000000000000000000000000000000000000000000000000000000000010a4d6f76655374646c696200", + ).toUint8Array(), + [ + new TxnBuilderTypes.Module( + new HexString( + // eslint-disable-next-line max-len + "a11ceb0b050000000c01000c020c12031e20043e04054228076ad50108bf024006ff020a108903450ace03150ce3035f0dc20404000001010102010301040105000606000007080005080700030e040106010009000100000a020300020f0404000410060000011106080106031209030106040705070105010802020c08020001030305080207080101060c010800010b0301090002070b030109000900076d657373616765076163636f756e74056572726f72056576656e74067369676e657206737472696e67124d6573736167654368616e67654576656e740d4d657373616765486f6c64657206537472696e670b6765745f6d6573736167650b7365745f6d6573736167650c66726f6d5f6d6573736167650a746f5f6d657373616765156d6573736167655f6368616e67655f6576656e74730b4576656e7448616e646c65096e6f745f666f756e640a616464726573735f6f66106e65775f6576656e745f68616e646c650a656d69745f6576656e746766284c3984add58e00d20ba272a40d33d5b4ea33c08a904254e28fdff97b9f000000000000000000000000000000000000000000000000000000000000000103080000000000000000126170746f733a3a6d657461646174615f7630310100000000000000000b454e4f5f4d4553534147451b5468657265206973206e6f206d6573736167652070726573656e740002020b08020c08020102020008020d0b030108000001000101030b0a002901030607001102270b002b0110001402010104010105210e0011030c020a022901200308050f0e000b010e00380012012d0105200b022a010c040a041000140c030a040f010b030a01120038010b010b040f0015020100010100", + ).toUint8Array(), + ), + ], + ); + + await client.waitForTransaction(txnHash); + + const txn = await client.getTransactionByHash(txnHash); + expect((txn as any).success).toBeTruthy(); + }, + longTestTimeout, +); + +test( + "rotates auth key ed25519", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const alice = new AptosAccount(); + await faucetClient.fundAccount(alice.address(), 100_000_000); + + const helperAccount = new AptosAccount(); + + const pendingTxn = await client.rotateAuthKeyEd25519(alice, helperAccount.signingKey.secretKey); + + await client.waitForTransaction(pendingTxn.hash); + + const origAddressHex = await client.lookupOriginalAddress(helperAccount.address()); + // Sometimes the returned addresses do not have leading 0s. To be safe, converting hex addresses to AccountAddress + const origAddress = TxnBuilderTypes.AccountAddress.fromHex(origAddressHex); + const aliceAddress = TxnBuilderTypes.AccountAddress.fromHex(alice.address()); + + expect(HexString.fromUint8Array(bcsToBytes(origAddress)).hex()).toBe( + HexString.fromUint8Array(bcsToBytes(aliceAddress)).hex(), + ); + }, + longTestTimeout, +); + +test( + "gets block by height", + async () => { + const blockHeight = 100; + const client = new AptosClient(NODE_URL); + const block = await client.getBlockByHeight(blockHeight); + expect(block.block_height).toBe(blockHeight.toString()); + }, + longTestTimeout, +); + +test( + "gets block by version", + async () => { + const version = 100; + const client = new AptosClient(NODE_URL); + const block = await client.getBlockByVersion(version); + expect(parseInt(block.first_version, 10)).toBeLessThanOrEqual(version); + expect(parseInt(block.last_version, 10)).toBeGreaterThanOrEqual(version); + }, + longTestTimeout, +); + +test( + "estimates max gas amount", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const alice = new AptosAccount(); + await faucetClient.fundAccount(alice.address(), 10000000); + + const maxGasAmount = await client.estimateMaxGasAmount(alice.address()); + + expect(maxGasAmount).toBeGreaterThan(BigInt(0)); + }, + longTestTimeout, +); + +test( + "view function", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const alice = new AptosAccount(); + await faucetClient.fundAccount(alice.address(), 100_000_000); + + const payload: Gen.ViewRequest = { + function: "0x1::coin::balance", + type_arguments: ["0x1::aptos_coin::AptosCoin"], + arguments: [alice.address().hex()], + }; + + const balance = await client.view(payload); + + expect(balance[0]).toBe("100000000"); + }, + longTestTimeout, +); + +test( + "view function with a struct return type", + async () => { + // This test is just to show that the view function supports a struct return type. + // We test against get_collection_mutability_config although + // b/c at the time of writing this is the only view function on the move side + // that can easily test a struct return type. + + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + const tokenClient = new TokenClient(client); + + const alice = new AptosAccount(); + // Fund Alice's Account + await faucetClient.fundAccount(alice.address(), 100000000); + + const collectionName = "AliceCollection"; + + // Create collection on Alice's account + await client.waitForTransaction( + await tokenClient.createCollection(alice, collectionName, "Alice's simple collection", "https://aptos.dev"), + { checkSuccess: true }, + ); + + const payload: Gen.ViewRequest = { + function: "0x3::token::get_collection_mutability_config", + type_arguments: [], + arguments: [alice.address().hex(), collectionName], + }; + + const collection = await client.view(payload); + expect(collection[0]).toMatchObject({ description: false, maximum: false, uri: false }); + }, + longTestTimeout, +); diff --git a/m1/JavaScript-client/src/tests/e2e/aptos_token.test.ts b/m1/JavaScript-client/src/tests/e2e/aptos_token.test.ts new file mode 100644 index 00000000..525841a1 --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/aptos_token.test.ts @@ -0,0 +1,203 @@ +import { AptosAccount } from "../../account"; +import { UserTransaction } from "../../generated"; +import { AptosToken } from "../../plugins"; +import { Provider } from "../../providers"; +import { PROVIDER_LOCAL_NETWORK_CONFIG, getFaucetClient, longTestTimeout } from "../unit/test_helper.test"; + +const provider = new Provider(PROVIDER_LOCAL_NETWORK_CONFIG); +const faucetClient = getFaucetClient(); +const aptosToken = new AptosToken(provider); + +const alice = new AptosAccount(); +const bob = new AptosAccount(); + +const collectionName = "AliceCollection"; +const tokenName = "Alice Token"; +let tokenAddress = ""; + +describe("token objects", () => { + beforeAll(async () => { + // Fund Alice's Account + await faucetClient.fundAccount(alice.address(), 100000000); + }, longTestTimeout); + + test( + "create collection", + async () => { + await provider.waitForTransaction( + await aptosToken.createCollection(alice, "Alice's simple collection", collectionName, "https://aptos.dev", 5, { + royaltyNumerator: 10, + royaltyDenominator: 10, + }), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "mint", + async () => { + const txn = await provider.waitForTransactionWithResult( + await aptosToken.mint( + alice, + collectionName, + "Alice's simple token", + tokenName, + "https://aptos.dev/img/nyan.jpeg", + ["key"], + ["bool"], + ["true"], + ), + { checkSuccess: true }, + ); + tokenAddress = (txn as UserTransaction).events[0].data.token; + }, + longTestTimeout, + ); + + test( + "mint soul bound", + async () => { + await provider.waitForTransaction( + await aptosToken.mintSoulBound( + alice, + collectionName, + "Alice's simple soul bound token", + "Alice's soul bound token", + "https://aptos.dev/img/nyan.jpeg", + bob, + ["key"], + ["bool"], + ["true"], + ), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "freeze transfer", + async () => { + await provider.waitForTransaction(await aptosToken.freezeTokenTransafer(alice, tokenAddress), { + checkSuccess: true, + }); + }, + longTestTimeout, + ); + + test( + "unfreeze token transfer", + async () => { + await provider.waitForTransaction(await aptosToken.unfreezeTokenTransafer(alice, tokenAddress), { + checkSuccess: true, + }); + }, + longTestTimeout, + ); + + test( + "set token description", + async () => { + await provider.waitForTransaction( + await aptosToken.setTokenDescription(alice, tokenAddress, "my updated token description"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "set token name", + async () => { + await provider.waitForTransaction(await aptosToken.setTokenName(alice, tokenAddress, "my updated token name"), { + checkSuccess: true, + }); + }, + longTestTimeout, + ); + + test( + "set token uri", + async () => { + await provider.waitForTransaction( + await aptosToken.setTokenName(alice, tokenAddress, "https://aptos.dev/img/hero.jpg"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "add token property", + async () => { + await provider.waitForTransaction( + await aptosToken.addTokenProperty(alice, tokenAddress, "newKey", "BOOLEAN", "true"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "add typed property", + async () => { + await provider.waitForTransaction( + await aptosToken.addTypedProperty(alice, tokenAddress, "newTypedKey", "VECTOR", "[hello,world]"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "update typed property", + async () => { + await provider.waitForTransaction( + await aptosToken.updateTypedProperty(alice, tokenAddress, "newTypedKey", "U8", "2"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "update token property", + async () => { + await provider.waitForTransaction( + await aptosToken.updateTokenProperty(alice, tokenAddress, "newKey", "U8", "5"), + { checkSuccess: true }, + ); + }, + longTestTimeout, + ); + + test( + "remove token property", + async () => { + await provider.waitForTransaction(await aptosToken.removeTokenProperty(alice, tokenAddress, "newKey"), { + checkSuccess: true, + }); + }, + longTestTimeout, + ); + + test( + "transfer token ownership", + async () => { + await provider.waitForTransaction(await aptosToken.transferTokenOwnership(alice, tokenAddress, bob.address()), { + checkSuccess: true, + }); + }, + longTestTimeout, + ); + + test( + "burn token", + async () => { + await provider.waitForTransaction(await aptosToken.burnToken(alice, tokenAddress), { checkSuccess: true }); + }, + longTestTimeout, + ); +}); diff --git a/m1/JavaScript-client/src/tests/e2e/coin_client.test.ts b/m1/JavaScript-client/src/tests/e2e/coin_client.test.ts new file mode 100644 index 00000000..ba489bec --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/coin_client.test.ts @@ -0,0 +1,35 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosClient } from "../../providers/aptos_client"; +import { getFaucetClient, longTestTimeout, NODE_URL } from "../unit/test_helper.test"; +import { AptosAccount } from "../../account/aptos_account"; +import { CoinClient } from "../../plugins/coin_client"; + +test( + "transfer and checkBalance works", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + const coinClient = new CoinClient(client); + + const alice = new AptosAccount(); + const bob = new AptosAccount(); + await faucetClient.fundAccount(alice.address(), 100_000_000); + await faucetClient.fundAccount(bob.address(), 0); + + await client.waitForTransaction(await coinClient.transfer(alice, bob, 42), { checkSuccess: true }); + + expect(await coinClient.checkBalance(bob)).toBe(BigInt(42)); + + // Test that `createReceiverIfMissing` works. + const jemima = new AptosAccount(); + await client.waitForTransaction(await coinClient.transfer(alice, jemima, 717, { createReceiverIfMissing: true }), { + checkSuccess: true, + }); + + // Check that using a string address instead of an account works with `checkBalance`. + expect(await coinClient.checkBalance(jemima.address().hex())).toBe(BigInt(717)); + }, + longTestTimeout, +); diff --git a/m1/JavaScript-client/src/tests/e2e/faucet_client.test.ts b/m1/JavaScript-client/src/tests/e2e/faucet_client.test.ts new file mode 100644 index 00000000..d4dd6e20 --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/faucet_client.test.ts @@ -0,0 +1,78 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosClient } from "../../providers"; +import { FaucetClient } from "../../plugins"; +import { AptosAccount } from "../../account"; +import { HexString } from "../../utils"; +import * as Gen from "../../generated/index"; + +import { NODE_URL, getFaucetClient, longTestTimeout } from "../unit/test_helper.test"; + +const aptosCoin = "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"; + +test("faucet url empty", () => { + expect(() => { + const faucetClient = new FaucetClient("http://localhost:8080", ""); + faucetClient.getAccount("0x1"); + }).toThrow("Faucet URL cannot be empty."); +}); + +test( + "full tutorial faucet flow", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + + const account1 = new AptosAccount(); + const txns = await faucetClient.fundAccount(account1.address(), 10000000); + const tx0 = await client.getTransactionByHash(txns[0]); + expect(tx0.type).toBe("user_transaction"); + let resources = await client.getAccountResources(account1.address()); + let accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as { coin: { value: string } }).coin.value).toBe("10000000"); + + const account2 = new AptosAccount(); + await faucetClient.fundAccount(account2.address(), 0); + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as { coin: { value: string } }).coin.value).toBe("0"); + + const payload: Gen.TransactionPayload_EntryFunctionPayload = { + type: "entry_function_payload", + function: "0x1::coin::transfer", + type_arguments: ["0x1::aptos_coin::AptosCoin"], + arguments: [account2.address().hex(), 717], + }; + + const txnRequest = await client.generateTransaction(account1.address(), payload, { max_gas_amount: "2000" }); + const signedTxn = await client.signTransaction(account1, txnRequest); + const transactionRes = await client.submitTransaction(signedTxn); + const txn = await client.waitForTransactionWithResult(transactionRes.hash); + expect((txn as any)?.success).toBe(true); + + resources = await client.getAccountResources(account2.address()); + accountResource = resources.find((r) => r.type === aptosCoin); + expect((accountResource!.data as { coin: { value: string } }).coin.value).toBe("717"); + + const res = await client.getAccountTransactions(account1.address(), { start: BigInt(0) }); + const tx = res.find((e) => e.type === "user_transaction") as Gen.UserTransaction; + expect(new HexString(tx.sender).toShortString()).toBe(account1.address().toShortString()); + + const events = await client.getEventsByEventHandle(tx.sender, aptosCoin, "withdraw_events"); + expect(events[0].type).toBe("0x1::coin::WithdrawEvent"); + + const eventSubset = await client.getEventsByEventHandle(tx.sender, aptosCoin, "withdraw_events", { + start: BigInt(0), + limit: 1, + }); + expect(eventSubset[0].type).toBe("0x1::coin::WithdrawEvent"); + + const events2 = await client.getEventsByCreationNumber( + events[0].guid.account_address, + events[0].guid.creation_number, + ); + expect(events2[0].type).toBe("0x1::coin::WithdrawEvent"); + }, + longTestTimeout, +); diff --git a/m1/JavaScript-client/src/tests/e2e/indexer.test.ts b/m1/JavaScript-client/src/tests/e2e/indexer.test.ts new file mode 100644 index 00000000..d62a55bd --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/indexer.test.ts @@ -0,0 +1,195 @@ +import { AptosAccount } from "../../account/aptos_account"; +import { AptosClient } from "../../providers/aptos_client"; +import { bcsSerializeBool } from "../../bcs"; +import { FaucetClient } from "../../plugins/faucet_client"; +import { IndexerClient } from "../../providers/indexer"; +import { TokenClient } from "../../plugins/token_client"; +import { FAUCET_AUTH_TOKEN, longTestTimeout } from "../unit/test_helper.test"; +import { Network, NetworkToIndexerAPI, NetworkToNodeAPI, sleep } from "../../utils"; + +const aptosClient = new AptosClient(NetworkToNodeAPI[Network.TESTNET]); +const faucetClient = new FaucetClient( + "https://fullnode.testnet.aptoslabs.com", + "https://faucet.testnet.aptoslabs.com", + { TOKEN: FAUCET_AUTH_TOKEN }, +); +const tokenClient = new TokenClient(aptosClient); +const alice = new AptosAccount(); +const collectionName = "AliceCollection"; +const tokenName = "Alice Token"; +const indexerClient = new IndexerClient(NetworkToIndexerAPI[Network.TESTNET]); + +describe("Indexer", () => { + it("should throw an error when account address is not valid", async () => { + expect(async () => { + await indexerClient.getAccountNFTs("702ca08576f66393140967fef983bb6bf160dafeb73de9c4ddac4d2dc"); + }).rejects.toThrow("Address needs to be 66 chars long."); + + expect(async () => { + await indexerClient.getAccountNFTs("0x702ca08576f66393140967fef983bb6bf160dafeb73de9c4ddac4d2dc"); + }).rejects.toThrow("Address needs to be 66 chars long."); + }); + + it("should not throw an error when account address is missing 0x", async () => { + expect(async () => { + await indexerClient.getAccountNFTs("790a34c702ca08576f66393140967fef983bb6bf160dafeb73de9c4ddac4d2dc"); + }).not.toThrow("Address needs to be 66 chars long."); + }); + + beforeAll(async () => { + await faucetClient.fundAccount(alice.address(), 100000000); + // Create collection and token on Alice's account + await aptosClient.waitForTransaction( + await tokenClient.createCollection(alice, collectionName, "Alice's simple collection", "https://aptos.dev"), + { checkSuccess: true }, + ); + await aptosClient.waitForTransaction( + await tokenClient.createTokenWithMutabilityConfig( + alice, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + 1000, + alice.address(), + 1, + 0, + ["TOKEN_BURNABLE_BY_OWNER"], + [bcsSerializeBool(true)], + ["bool"], + [false, false, false, false, true], + ), + { checkSuccess: true }, + ); + }, longTestTimeout); + + describe("get data", () => { + jest.retryTimes(5); + beforeEach(async () => { + await sleep(1000); + }); + it( + "gets account NFTs", + async () => { + const accountNFTs = await indexerClient.getAccountNFTs(alice.address().hex()); + expect(accountNFTs.current_token_ownerships).toHaveLength(1); + expect(accountNFTs.current_token_ownerships[0]).toHaveProperty("current_token_data"); + expect(accountNFTs.current_token_ownerships[0]).toHaveProperty("current_collection_data"); + expect(accountNFTs.current_token_ownerships[0].current_token_data?.name).toBe("Alice Token"); + }, + longTestTimeout, + ); + + it( + "gets token activities", + async () => { + const accountNFTs = await indexerClient.getAccountNFTs(alice.address().hex()); + const tokenActivity = await indexerClient.getTokenActivities( + accountNFTs.current_token_ownerships[0].current_token_data!.token_data_id_hash, + ); + expect(tokenActivity.token_activities).toHaveLength(2); + expect(tokenActivity.token_activities[0]).toHaveProperty("from_address"); + expect(tokenActivity.token_activities[0]).toHaveProperty("to_address"); + }, + longTestTimeout, + ); + + it( + "gets account coin data", + async () => { + const accountCoinData = await indexerClient.getAccountCoinsData(alice.address().hex()); + expect(accountCoinData.current_coin_balances[0].coin_type).toEqual("0x1::aptos_coin::AptosCoin"); + }, + longTestTimeout, + ); + + it( + "gets account token count", + async () => { + const accountTokenCount = await indexerClient.getAccountTokensCount(alice.address().hex()); + expect(accountTokenCount.current_token_ownerships_aggregate.aggregate?.count).toEqual(1); + }, + longTestTimeout, + ); + + it( + "gets account transactions count", + async () => { + const accountTransactionsCount = await indexerClient.getAccountTransactionsCount(alice.address().hex()); + expect(accountTransactionsCount.move_resources_aggregate.aggregate?.count).toEqual(3); + }, + longTestTimeout, + ); + + it( + "gets account transactions data", + async () => { + const accountTransactionsData = await indexerClient.getAccountTransactionsData(alice.address().hex()); + expect(accountTransactionsData.move_resources[0]).toHaveProperty("transaction_version"); + }, + longTestTimeout, + ); + + it( + "gets token activities count", + async () => { + const accountNFTs = await indexerClient.getAccountNFTs(alice.address().hex()); + const tokenActivitiesCount = await indexerClient.getTokenActivitiesCount( + accountNFTs.current_token_ownerships[0].current_token_data!.token_data_id_hash, + ); + expect(tokenActivitiesCount.token_activities_aggregate.aggregate?.count).toBe(2); + }, + longTestTimeout, + ); + + it( + "gets token data", + async () => { + const accountNFTs = await indexerClient.getAccountNFTs(alice.address().hex()); + const tokenData = await indexerClient.getTokenData( + accountNFTs.current_token_ownerships[0].current_token_data!.token_data_id_hash, + ); + expect(tokenData.current_token_datas[0].name).toEqual("Alice Token"); + }, + longTestTimeout, + ); + + it( + "gets token owners data", + async () => { + const accountNFTs = await indexerClient.getAccountNFTs(alice.address().hex()); + const tokenOwnersData = await indexerClient.getTokenOwnersData( + accountNFTs.current_token_ownerships[0].current_token_data!.token_data_id_hash, + 0, + ); + expect(tokenOwnersData.current_token_ownerships[0].owner_address).toEqual(alice.address().hex()); + }, + longTestTimeout, + ); + + it( + "gets top user transactions", + async () => { + const topUserTransactions = await indexerClient.getTopUserTransactions(5); + expect(topUserTransactions.user_transactions.length).toEqual(5); + }, + longTestTimeout, + ); + + it( + "gets user transactions", + async () => { + const userTransactions = await indexerClient.getUserTransactions(482294669, { limit: 4 }); + expect(userTransactions.user_transactions[0].version).toEqual(482294669); + expect(userTransactions.user_transactions.length).toEqual(4); + }, + longTestTimeout, + ); + + test("gets indexer ledger info", async () => { + const ledgerInfo = await indexerClient.getIndexerLedgerInfo(); + expect(ledgerInfo.ledger_infos[0].chain_id).toBeGreaterThan(1); + }); + }); +}); diff --git a/m1/JavaScript-client/src/tests/e2e/provider.test.ts b/m1/JavaScript-client/src/tests/e2e/provider.test.ts new file mode 100644 index 00000000..d9667bfb --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/provider.test.ts @@ -0,0 +1,99 @@ +import { AptosAccount } from "../../account/aptos_account"; +import { AptosClient } from "../../providers/aptos_client"; +import { bcsSerializeBool } from "../../bcs"; +import { Provider } from "../../providers/provider"; +import { FaucetClient } from "../../plugins/faucet_client"; +import { TokenClient } from "../../plugins/token_client"; +import { Network, NetworkToIndexerAPI, NetworkToNodeAPI, sleep } from "../../utils"; +import { FAUCET_AUTH_TOKEN, longTestTimeout } from "../unit/test_helper.test"; + +describe("Provider", () => { + const faucetClient = new FaucetClient( + "https://fullnode.testnet.aptoslabs.com", + "https://faucet.testnet.aptoslabs.com", + { TOKEN: FAUCET_AUTH_TOKEN }, + ); + const alice = new AptosAccount(); + + it("uses provided network as API", async () => { + const provider = new Provider(Network.TESTNET); + expect(provider.aptosClient.nodeUrl).toBe(NetworkToNodeAPI[Network.TESTNET]); + expect(provider.indexerClient.endpoint).toBe(NetworkToIndexerAPI[Network.TESTNET]); + }); + + it("uses custom endpoints as API", async () => { + const provider = new Provider({ fullnodeUrl: "full-node-url", indexerUrl: "indexer-url" }); + expect(provider.aptosClient.nodeUrl).toBe("full-node-url/v1"); + expect(provider.indexerClient.endpoint).toBe("indexer-url"); + }); + + it("throws error when endpoint not provided", async () => { + expect(() => { + new Provider({ fullnodeUrl: "", indexerUrl: "" }); + }).toThrow("network is not provided"); + }); + + describe("requests", () => { + beforeAll(async () => { + await faucetClient.fundAccount(alice.address(), 100000000); + }); + + describe("query full node", () => { + it("gets genesis account from fullnode", async () => { + const provider = new Provider(Network.TESTNET); + const genesisAccount = await provider.getAccount("0x1"); + expect(genesisAccount.authentication_key.length).toBe(66); + expect(genesisAccount.sequence_number).not.toBeNull(); + }); + }); + + describe("query indexer", () => { + const aptosClient = new AptosClient("https://fullnode.testnet.aptoslabs.com"); + const tokenClient = new TokenClient(aptosClient); + const collectionName = "AliceCollection"; + const tokenName = "Alice Token"; + + beforeAll(async () => { + // Create collection and token on Alice's account + await aptosClient.waitForTransaction( + await tokenClient.createCollection(alice, collectionName, "Alice's simple collection", "https://aptos.dev"), + { checkSuccess: true }, + ); + + await aptosClient.waitForTransaction( + await tokenClient.createTokenWithMutabilityConfig( + alice, + collectionName, + tokenName, + "Alice's simple token", + 1, + "https://aptos.dev/img/nyan.jpeg", + 1000, + alice.address(), + 1, + 0, + ["TOKEN_BURNABLE_BY_OWNER"], + [bcsSerializeBool(true)], + ["bool"], + [false, false, false, false, true], + ), + { checkSuccess: true }, + ); + }, longTestTimeout); + + jest.retryTimes(5); + beforeEach(async () => { + await sleep(1000); + }); + + it("gets account NFTs from indexer", async () => { + let provider = new Provider(Network.TESTNET); + const accountNFTs = await provider.getAccountNFTs(alice.address().hex(), { limit: 20, offset: 0 }); + expect(accountNFTs.current_token_ownerships).toHaveLength(1); + expect(accountNFTs.current_token_ownerships[0]).toHaveProperty("current_token_data"); + expect(accountNFTs.current_token_ownerships[0]).toHaveProperty("current_collection_data"); + expect(accountNFTs.current_token_ownerships[0].current_token_data?.name).toBe("Alice Token"); + }); + }); + }); +}); diff --git a/m1/JavaScript-client/src/tests/e2e/token_client.test.ts b/m1/JavaScript-client/src/tests/e2e/token_client.test.ts new file mode 100644 index 00000000..930b0cc2 --- /dev/null +++ b/m1/JavaScript-client/src/tests/e2e/token_client.test.ts @@ -0,0 +1,153 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosAccount } from "../../account/aptos_account"; +import { AptosClient } from "../../providers/aptos_client"; +import { TokenClient } from "../../plugins/token_client"; + +import { getFaucetClient, longTestTimeout, NODE_URL } from "../unit/test_helper.test"; +import { bcsSerializeBool } from "../../bcs"; + +test( + "full tutorial nft token flow", + async () => { + const client = new AptosClient(NODE_URL); + const faucetClient = getFaucetClient(); + const tokenClient = new TokenClient(client); + + const alice = new AptosAccount(); + const bob = new AptosAccount(); + + // Fund both Alice's and Bob's Account + await faucetClient.fundAccount(alice.address(), 100000000); + await faucetClient.fundAccount(bob.address(), 100000000); + + const collectionName = "AliceCollection"; + const tokenName = "Alice Token"; + + // Create collection and token on Alice's account + await client.waitForTransaction( + await tokenClient.createCollection(alice, collectionName, "Alice's simple collection", "https://aptos.dev"), + { checkSuccess: true }, + ); + + await client.waitForTransaction( + await tokenClient.createTokenWithMutabilityConfig( + alice, + collectionName, + tokenName, + "Alice's simple token", + 2, + "https://aptos.dev/img/nyan.jpeg", + 1000, + alice.address(), + 1, + 0, + ["TOKEN_BURNABLE_BY_OWNER"], + [bcsSerializeBool(true)], + ["bool"], + [false, false, false, false, true], + ), + { checkSuccess: true }, + ); + + const tokenId = { + token_data_id: { + creator: alice.address().hex(), + collection: collectionName, + name: tokenName, + }, + property_version: "0", + }; + + // Transfer Token from Alice's Account to Bob's Account + await tokenClient.getCollectionData(alice.address().hex(), collectionName); + let aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("2"); + const tokenData = await tokenClient.getTokenData(alice.address().hex(), collectionName, tokenName); + expect(tokenData.name).toBe(tokenName); + + await client.waitForTransaction( + await tokenClient.offerToken(alice, bob.address().hex(), alice.address().hex(), collectionName, tokenName, 1), + { checkSuccess: true }, + ); + aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("1"); + + await client.waitForTransaction( + await tokenClient.cancelTokenOffer(alice, bob.address().hex(), alice.address().hex(), collectionName, tokenName), + { checkSuccess: true }, + ); + aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("2"); + + await client.waitForTransaction( + await tokenClient.offerToken(alice, bob.address().hex(), alice.address().hex(), collectionName, tokenName, 1), + { checkSuccess: true }, + ); + aliceBalance = await tokenClient.getTokenForAccount(alice.address().hex(), tokenId); + expect(aliceBalance.amount).toBe("1"); + + await client.waitForTransaction( + await tokenClient.claimToken(bob, alice.address().hex(), alice.address().hex(), collectionName, tokenName), + { checkSuccess: true }, + ); + + const bobBalance = await tokenClient.getTokenForAccount(bob.address().hex(), tokenId); + expect(bobBalance.amount).toBe("1"); + + // default token property is configured to be mutable and then alice can make bob burn token after token creation + // test mutate Bob's token properties and allow owner to burn this token + let a = await tokenClient.mutateTokenProperties( + alice, + bob.address(), + alice.address(), + collectionName, + tokenName, + 0, + 1, + ["test"], + [bcsSerializeBool(true)], + ["bool"], + ); + await client.waitForTransactionWithResult(a); + + const newTokenId = { + token_data_id: { + creator: alice.address().hex(), + collection: collectionName, + name: tokenName, + }, + property_version: "1", + }; + const mutated_token = await tokenClient.getTokenForAccount(bob.address().hex(), newTokenId); + // expect property map deserialization works + expect(mutated_token.token_properties.data["test"].value).toBe("true"); + expect(mutated_token.token_properties.data["TOKEN_BURNABLE_BY_OWNER"].value).toBe("true"); + + // burn the token by owner + var txn_hash = await tokenClient.burnByOwner(bob, alice.address(), collectionName, tokenName, 1, 1); + await client.waitForTransactionWithResult(txn_hash); + const newbalance = await tokenClient.getTokenForAccount(bob.address().hex(), newTokenId); + expect(newbalance.amount).toBe("0"); + + //bob opt_in directly transfer and alice transfer token to bob directly + txn_hash = await tokenClient.optInTokenTransfer(bob, true); + await client.waitForTransactionWithResult(txn_hash); + + // alice still have one token with property version 0. + txn_hash = await tokenClient.transferWithOptIn( + alice, + alice.address(), + collectionName, + tokenName, + 0, + bob.address(), + 1, + ); + await client.waitForTransactionWithResult(txn_hash); + const balance = await tokenClient.getTokenForAccount(bob.address().hex(), tokenId); + expect(balance.amount).toBe("1"); + }, + longTestTimeout, +); diff --git a/m1/JavaScript-client/src/tests/unit/abi.test.ts b/m1/JavaScript-client/src/tests/unit/abi.test.ts new file mode 100644 index 00000000..3755e796 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/abi.test.ts @@ -0,0 +1,67 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { HexString } from "../../utils"; +import { Deserializer, Serializer } from "../../bcs"; +import { ScriptABI, EntryFunctionABI, TransactionScriptABI, ArgumentABI } from "../../aptos_types/abi"; +import { TypeTagAddress, TypeTagU64 } from "../../aptos_types"; +import { ModuleId } from "../../aptos_types"; + +// eslint-disable-next-line operator-linebreak +const SCRIPT_FUNCTION_ABI = + // eslint-disable-next-line max-len + "010E6372656174655F6163636F756E740000000000000000000000000000000000000000000000000000000000000001074163636F756E7420204261736963206163636F756E74206372656174696F6E206D6574686F64732E000108617574685F6B657904"; + +// eslint-disable-next-line operator-linebreak +const TRANSACTION_SCRIPT_ABI = + // eslint-disable-next-line max-len + "00046D61696E0F20412074657374207363726970742E8B01A11CEB0B050000000501000403040A050E0B071924083D200000000101020301000003010400020C0301050001060C0101074163636F756E74065369676E65720A616464726573735F6F66096578697374735F617400000000000000000000000000000000000000000000000000000000000000010000010A0E0011000C020B021101030705090B0127020001016902"; + +describe("ABI", () => { + it("parses create_account successfully", async () => { + const name = "create_account"; + const doc = " Basic account creation methods."; + const typeArgABIs = [new ArgumentABI("auth_key", new TypeTagAddress())]; + + const abi = new EntryFunctionABI(name, ModuleId.fromStr("0x1::Account"), doc, [], typeArgABIs); + const serializer = new Serializer(); + abi.serialize(serializer); + expect(HexString.fromUint8Array(serializer.getBytes()).noPrefix()).toBe(SCRIPT_FUNCTION_ABI.toLowerCase()); + + const deserializer = new Deserializer(new HexString(SCRIPT_FUNCTION_ABI).toUint8Array()); + const entryFunctionABI = ScriptABI.deserialize(deserializer) as EntryFunctionABI; + const { address: moduleAddress, name: moduleName } = entryFunctionABI.module_name; + expect(entryFunctionABI.name).toBe("create_account"); + expect(HexString.fromUint8Array(moduleAddress.address).toShortString()).toBe("0x1"); + expect(moduleName.value).toBe("Account"); + expect(entryFunctionABI.doc.trim()).toBe("Basic account creation methods."); + + const arg = entryFunctionABI.args[0]; + expect(arg.name).toBe("auth_key"); + expect(arg.type_tag instanceof TypeTagAddress).toBeTruthy(); + }); + + it("parses script abi successfully", async () => { + const name = "main"; + // eslint-disable-next-line max-len + const code = + "0xa11ceb0b050000000501000403040a050e0b071924083d200000000101020301000003010400020c0301050001060c0101074163636f756e74065369676e65720a616464726573735f6f66096578697374735f617400000000000000000000000000000000000000000000000000000000000000010000010a0e0011000c020b021101030705090b012702"; + const doc = " A test script."; + const typeArgABIs = [new ArgumentABI("i", new TypeTagU64())]; + const abi = new TransactionScriptABI(name, doc, HexString.ensure(code).toUint8Array(), [], typeArgABIs); + const serializer = new Serializer(); + abi.serialize(serializer); + expect(HexString.fromUint8Array(serializer.getBytes()).noPrefix()).toBe(TRANSACTION_SCRIPT_ABI.toLowerCase()); + + const deserializer = new Deserializer(new HexString(TRANSACTION_SCRIPT_ABI).toUint8Array()); + const transactionScriptABI = ScriptABI.deserialize(deserializer) as TransactionScriptABI; + expect(transactionScriptABI.name).toBe("main"); + expect(transactionScriptABI.doc.trim()).toBe("A test script."); + + expect(HexString.fromUint8Array(transactionScriptABI.code).hex()).toBe(code); + + const arg = transactionScriptABI.args[0]; + expect(arg.name).toBe("i"); + expect(arg.type_tag instanceof TypeTagU64).toBeTruthy(); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/account_address.test.ts b/m1/JavaScript-client/src/tests/unit/account_address.test.ts new file mode 100644 index 00000000..687c531f --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/account_address.test.ts @@ -0,0 +1,80 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AccountAddress } from "../../aptos_types"; + +const ADDRESS_LONG = "000000000000000000000000000000000000000000000000000000000a550c18"; +const ADDRESS_SHORT = "a550c18"; + +describe("AccountAddress", () => { + it("gets created from full hex string", async () => { + const addr = AccountAddress.fromHex(ADDRESS_LONG); + expect(Buffer.from(addr.address).toString("hex")).toBe(ADDRESS_LONG); + }); + + it("gets created from short hex string", async () => { + const addr = AccountAddress.fromHex(ADDRESS_SHORT); + expect(Buffer.from(addr.address).toString("hex")).toBe(ADDRESS_LONG); + }); + + it("gets created from prefixed full hex string", async () => { + const addr = AccountAddress.fromHex(`0x${ADDRESS_LONG}`); + expect(Buffer.from(addr.address).toString("hex")).toBe(ADDRESS_LONG); + }); + + it("gets created from prefixed short hex string", async () => { + const addr = AccountAddress.fromHex(`0x${ADDRESS_SHORT}`); + expect(Buffer.from(addr.address).toString("hex")).toBe(ADDRESS_LONG); + }); + + it("gets created from prefixed short hex string with leading 0s", async () => { + const addr = AccountAddress.fromHex(`0x000${ADDRESS_SHORT}`); + expect(Buffer.from(addr.address).toString("hex")).toBe(ADDRESS_LONG); + }); + + it("throws exception when initiating from a long hex string", async () => { + expect(() => { + AccountAddress.fromHex(`1${ADDRESS_LONG}`); + // eslint-disable-next-line quotes + }).toThrow("Hex string is too long. Address's length is 32 bytes."); + }); + + it("throws exception when initiating from a long hex string", async () => { + expect(() => { + AccountAddress.fromHex(`1${ADDRESS_LONG}`); + // eslint-disable-next-line quotes + }).toThrow("Hex string is too long. Address's length is 32 bytes."); + }); + + it("isValid short with 0x", async () => { + expect(AccountAddress.isValid(`0x${ADDRESS_SHORT}`)).toBe(true); + }); + + it("isValid short with leading 0s 0x", async () => { + expect(AccountAddress.isValid(`0x000${ADDRESS_SHORT}`)).toBe(true); + }); + + it("isValid short with leading 0s 0x", async () => { + expect(AccountAddress.isValid(`0x000${ADDRESS_SHORT}`)).toBe(true); + }); + + it("isValid long with leading 0s without 0x", async () => { + expect(AccountAddress.isValid(`${ADDRESS_LONG}`)).toBe(true); + }); + + it("isValid long with leading 0s with 0x", async () => { + expect(AccountAddress.isValid(`0x${ADDRESS_LONG}`)).toBe(true); + }); + + it("not isValid empty string", async () => { + expect(AccountAddress.isValid("")).toBe(false); + }); + + it("not isValid too long without 0x", async () => { + expect(AccountAddress.isValid(`00${ADDRESS_LONG}`)).toBe(false); + }); + + it("not isValid too long with 0x", async () => { + expect(AccountAddress.isValid(`0x00${ADDRESS_LONG}`)).toBe(false); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/aptos_account.test.ts b/m1/JavaScript-client/src/tests/unit/aptos_account.test.ts new file mode 100644 index 00000000..cdbaf82e --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/aptos_account.test.ts @@ -0,0 +1,89 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosAccount, AptosAccountObject, getAddressFromAccountOrAddress } from "../../account"; +import { HexString } from "../../utils"; +import nacl from "tweetnacl"; + +const aptosAccountObject: AptosAccountObject = { + address: "0x978c213990c4833df71548df7ce49d54c759d6b6d932de22b24d56060b7af2aa", + privateKeyHex: + // eslint-disable-next-line max-len + "0xc5338cd251c22daa8c9c9cc94f498cc8a5c7e1d2e75287a5dda91096fe64efa5de19e5d1880cac87d57484ce9ed2e84cf0f9599f12e7cc3a52e4e7657a763f2c", + publicKeyHex: "0xde19e5d1880cac87d57484ce9ed2e84cf0f9599f12e7cc3a52e4e7657a763f2c", +}; + +const mnemonic = "shoot island position soft burden budget tooth cruel issue economy destroy above"; + +test("generates random accounts", () => { + const a1 = new AptosAccount(); + const a2 = new AptosAccount(); + expect(a1.authKey()).not.toBe(a2.authKey()); + expect(a1.address().hex()).not.toBe(a2.address().hex()); +}); + +test("generates derive path accounts", () => { + const address = "0x07968dab936c1bad187c60ce4082f307d030d780e91e694ae03aef16aba73f30"; + const a1 = AptosAccount.fromDerivePath("m/44'/637'/0'/0'/0'", mnemonic); + expect(a1.address().hex()).toBe(address); +}); + +test("generates derive path accounts", () => { + expect(() => { + AptosAccount.fromDerivePath("", mnemonic); + }).toThrow(new Error("Invalid derivation path")); +}); + +test("accepts custom address", () => { + const address = "0x777"; + const a1 = new AptosAccount(undefined, address); + expect(a1.address().hex()).toBe(address); +}); + +test("Deserializes from AptosAccountObject", () => { + const a1 = AptosAccount.fromAptosAccountObject(aptosAccountObject); + expect(a1.address().hex()).toBe(aptosAccountObject.address); + expect(a1.pubKey().hex()).toBe(aptosAccountObject.publicKeyHex); +}); + +test("Deserializes from AptosAccountObject without address", () => { + const privateKeyObject = { privateKeyHex: aptosAccountObject.privateKeyHex }; + const a1 = AptosAccount.fromAptosAccountObject(privateKeyObject); + expect(a1.address().hex()).toBe(aptosAccountObject.address); + expect(a1.pubKey().hex()).toBe(aptosAccountObject.publicKeyHex); +}); + +test("Serializes/Deserializes", () => { + const a1 = new AptosAccount(); + const a2 = AptosAccount.fromAptosAccountObject(a1.toPrivateKeyObject()); + expect(a1.authKey().hex()).toBe(a2.authKey().hex()); + expect(a1.address().hex()).toBe(a2.address().hex()); +}); + +test("Signs and verifies strings", () => { + const a1 = AptosAccount.fromAptosAccountObject(aptosAccountObject); + const messageHex = "0x7777"; + const expectedSignedMessage = + "0xc5de9e40ac00b371cd83b1c197fa5b665b7449b33cd3cdd305bb78222e06a671a49625ab9aea8a039d4bb70e275768084d62b094bc1b31964f2357b7c1af7e0d"; + expect(a1.signHexString(messageHex).hex()).toBe(expectedSignedMessage); + expect(a1.verifySignature(messageHex, expectedSignedMessage)).toBe(true); + expect(a1.verifySignature(messageHex + "00", expectedSignedMessage)).toBe(false); +}); + +test("Gets the resource account address", () => { + const sourceAddress = "0xca843279e3427144cead5e4d5999a3d0"; + const seed = new Uint8Array([1]); + + expect(AptosAccount.getResourceAccountAddress(sourceAddress, seed).hex()).toBe( + "0xcbed05b37b6981a57f535c1f5d136734df822abaf4cd30c51c9b4d60eae79d5d", + ); +}); + +test("Test getAddressFromAccountOrAddress", () => { + const account = AptosAccount.fromAptosAccountObject(aptosAccountObject); + expect(getAddressFromAccountOrAddress(aptosAccountObject.address!).toString()).toBe(aptosAccountObject.address); + expect(getAddressFromAccountOrAddress(HexString.ensure(aptosAccountObject.address!)).toString()).toBe( + aptosAccountObject.address, + ); + expect(getAddressFromAccountOrAddress(account).toString()).toBe(aptosAccountObject.address); +}); diff --git a/m1/JavaScript-client/src/tests/unit/builder.test.ts b/m1/JavaScript-client/src/tests/unit/builder.test.ts new file mode 100644 index 00000000..6e66ee6c --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/builder.test.ts @@ -0,0 +1,234 @@ +import { TransactionBuilderRemoteABI } from "../../transaction_builder"; +import { AptosClient } from "../../providers"; +import { getFaucetClient, longTestTimeout, NODE_URL } from "./test_helper.test"; +import { AptosAccount } from "../../account"; +import { + RawTransaction, + TransactionPayloadEntryFunction, + TypeTagBool, + TypeTagStruct, + TypeTagU8, + TypeTagVector, +} from "../../aptos_types"; +import { HexString } from "../../utils"; + +describe.only("TransactionBuilderRemoteABI", () => { + test( + "generates raw txn from an entry function", + async () => { + const client = new AptosClient(NODE_URL); + const alice = new AptosAccount(); + const faucetClient = getFaucetClient(); + await faucetClient.fundAccount(alice.address(), 100000000); + // Create an instance of the class + const builder = new TransactionBuilderRemoteABI(client, { sender: alice.address() }); + + // Spy on the fetchABI method + const fetchABISpy = jest.spyOn(builder, "fetchABI"); + + // Mock the implementation of the fetchABI method to return a mock data + const abi = new Map(); + abi.set("0x1::some_modules::SomeName", { + fullName: "0x1::some_modules::SomeName", + name: "SomeName", + is_entry: true, + is_view: false, + generic_type_params: [], + params: ["&signer", "0x1::string::String"], + return: [], + visibility: "public", + }); + fetchABISpy.mockResolvedValue(abi); + + // Call the build method with some arguments + const rawTxn = await builder.build("0x1::some_modules::SomeName", [], ["key"]); + expect(rawTxn instanceof RawTransaction).toBeTruthy(); + expect(rawTxn.sender.address).toEqual(new HexString(alice.address().hex()).toUint8Array()); + expect(rawTxn.payload instanceof TransactionPayloadEntryFunction).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.module_name.name.value).toBe("some_modules"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.function_name.value).toBe("SomeName"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args).toHaveLength(0); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.args).toHaveLength(1); + + // Restore the original implementation of the fetch method + fetchABISpy.mockRestore(); + }, + longTestTimeout, + ); + + test( + "generates raw txn from an entry function with Object type", + async () => { + const client = new AptosClient(NODE_URL); + const alice = new AptosAccount(); + const faucetClient = getFaucetClient(); + await faucetClient.fundAccount(alice.address(), 100000000); + // Create an instance of the class + const builder = new TransactionBuilderRemoteABI(client, { sender: alice.address() }); + + // Spy on the fetchABI method + const fetchABISpy = jest.spyOn(builder, "fetchABI"); + + // Mock the implementation of the fetchABI method to return a mock data + const abi = new Map(); + abi.set("0x1::some_modules::SomeName", { + fullName: "0x1::some_modules::SomeName", + name: "SomeName", + is_entry: true, + is_view: false, + generic_type_params: [ + { + constraints: ["key"], + }, + ], + params: ["&signer", "0x1::object::Object", "0x1::string::String"], + return: [], + visibility: "public", + }); + fetchABISpy.mockResolvedValue(abi); + + // Call the build method with some arguments + const rawTxn = await builder.build( + "0x1::some_modules::SomeName", + ["0x1::type::SomeType"], + ["0x2b4d540735a4e128fda896f988415910a45cab41c9ddd802b32dd16e8f9ca3cd", "key"], + ); + + expect(rawTxn instanceof RawTransaction).toBeTruthy(); + expect(rawTxn.sender.address).toEqual(new HexString(alice.address().hex()).toUint8Array()); + expect(rawTxn.payload instanceof TransactionPayloadEntryFunction).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.module_name.name.value).toBe("some_modules"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.function_name.value).toBe("SomeName"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args).toHaveLength(1); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.args).toHaveLength(2); + + // Restore the original implementation of the fetch method + fetchABISpy.mockRestore(); + }, + longTestTimeout, + ); + + test( + "generates raw txn from a generic entry function", + async () => { + const client = new AptosClient(NODE_URL); + const alice = new AptosAccount(); + const faucetClient = getFaucetClient(); + await faucetClient.fundAccount(alice.address(), 100000000); + // Create an instance of the class + const builder = new TransactionBuilderRemoteABI(client, { sender: alice.address() }); + + // Spy on the fetchABI method + const fetchABISpy = jest.spyOn(builder, "fetchABI"); + + // Mock the implementation of the fetchABI method to return a mock data + const abi = new Map(); + abi.set("0x1::some_modules::SomeName", { + fullName: "0x1::some_modules::SomeName", + name: "SomeName", + is_entry: true, + is_view: false, + generic_type_params: [ + { + constraints: ["key"], + }, + { + constraints: ["drop"], + }, + ], + params: ["&signer", "0x1::object::Object", "0x1::string::String", "T1"], + return: [], + visibility: "public", + }); + fetchABISpy.mockResolvedValue(abi); + + // Call the build method with some arguments + const rawTxn = await builder.build( + "0x1::some_modules::SomeName", + ["0x1::type::SomeType", "vector"], + ["0x2b4d540735a4e128fda896f988415910a45cab41c9ddd802b32dd16e8f9ca3cd", "key", "[hello,world]"], + ); + + expect(rawTxn instanceof RawTransaction).toBeTruthy(); + expect(rawTxn.sender.address).toEqual(new HexString(alice.address().hex()).toUint8Array()); + expect(rawTxn.payload instanceof TransactionPayloadEntryFunction).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.module_name.name.value).toBe("some_modules"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.function_name.value).toBe("SomeName"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args).toHaveLength(2); + expect( + (rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args[0] instanceof TypeTagStruct, + ).toBeTruthy(); + expect( + (rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args[1] instanceof TypeTagVector, + ).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.args).toHaveLength(3); + + // Restore the original implementation of the fetch method + fetchABISpy.mockRestore(); + }, + longTestTimeout, + ); + + test( + "generates raw txn from an entry function with multiple generic params", + async () => { + const client = new AptosClient(NODE_URL); + const alice = new AptosAccount(); + const faucetClient = getFaucetClient(); + await faucetClient.fundAccount(alice.address(), 100000000); + // Create an instance of the class + const builder = new TransactionBuilderRemoteABI(client, { sender: alice.address() }); + + // Spy on the fetchABI method + const fetchABISpy = jest.spyOn(builder, "fetchABI"); + + // Mock the implementation of the fetchABI method to return a mock data + const abi = new Map(); + abi.set("0x1::some_modules::SomeName", { + fullName: "0x1::some_modules::SomeName", + name: "SomeName", + is_entry: true, + is_view: false, + generic_type_params: [ + { + constraints: ["key"], + }, + { + constraints: ["drop"], + }, + { + constraints: ["key"], + }, + ], + params: ["&signer", "T0", "0x1::string::String", "T1", "T2"], + return: [], + visibility: "public", + }); + fetchABISpy.mockResolvedValue(abi); + + // Call the build method with some arguments + const rawTxn = await builder.build( + "0x1::some_modules::SomeName", + ["bool", "vector", "u8"], + ["true", "key", "[hello,world]", "6"], + ); + + expect(rawTxn instanceof RawTransaction).toBeTruthy(); + expect(rawTxn.sender.address).toEqual(new HexString(alice.address().hex()).toUint8Array()); + expect(rawTxn.payload instanceof TransactionPayloadEntryFunction).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.module_name.name.value).toBe("some_modules"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.function_name.value).toBe("SomeName"); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args).toHaveLength(3); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args[0] instanceof TypeTagBool).toBeTruthy(); + expect( + (rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args[1] instanceof TypeTagVector, + ).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.ty_args[2] instanceof TypeTagU8).toBeTruthy(); + expect((rawTxn.payload as TransactionPayloadEntryFunction).value.args).toHaveLength(4); + + // Restore the original implementation of the fetch method + fetchABISpy.mockRestore(); + }, + longTestTimeout, + ); +}); diff --git a/m1/JavaScript-client/src/tests/unit/builder_utils.test.ts b/m1/JavaScript-client/src/tests/unit/builder_utils.test.ts new file mode 100644 index 00000000..885765d2 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/builder_utils.test.ts @@ -0,0 +1,399 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { HexString } from "../../utils"; +import { + AccountAddress, + Identifier, + StructTag, + TransactionArgumentAddress, + TransactionArgumentBool, + TransactionArgumentU128, + TransactionArgumentU64, + TransactionArgumentU8, + TransactionArgumentU8Vector, + TypeTagAddress, + TypeTagBool, + TypeTagParser, + TypeTagStruct, + TypeTagU128, + TypeTagU16, + TypeTagU256, + TypeTagU32, + TypeTagU64, + TypeTagU8, + TypeTagVector, +} from "../../aptos_types"; +import { Serializer } from "../../bcs"; +import { + argToTransactionArgument, + serializeArg, + ensureBoolean, + ensureNumber, + ensureBigInt, +} from "../../transaction_builder/builder_utils"; + +describe("BuilderUtils", () => { + it("parses a bool TypeTag", async () => { + expect(new TypeTagParser("bool").parseTypeTag() instanceof TypeTagBool).toBeTruthy(); + }); + + it("parses a u8 TypeTag", async () => { + expect(new TypeTagParser("u8").parseTypeTag() instanceof TypeTagU8).toBeTruthy(); + }); + + it("parses a u16 TypeTag", async () => { + expect(new TypeTagParser("u16").parseTypeTag() instanceof TypeTagU16).toBeTruthy(); + }); + + it("parses a u32 TypeTag", async () => { + expect(new TypeTagParser("u32").parseTypeTag() instanceof TypeTagU32).toBeTruthy(); + }); + + it("parses a u64 TypeTag", async () => { + expect(new TypeTagParser("u64").parseTypeTag() instanceof TypeTagU64).toBeTruthy(); + }); + + it("parses a u128 TypeTag", async () => { + expect(new TypeTagParser("u128").parseTypeTag() instanceof TypeTagU128).toBeTruthy(); + }); + + it("parses a u256 TypeTag", async () => { + expect(new TypeTagParser("u256").parseTypeTag() instanceof TypeTagU256).toBeTruthy(); + }); + + it("parses a address TypeTag", async () => { + expect(new TypeTagParser("address").parseTypeTag() instanceof TypeTagAddress).toBeTruthy(); + }); + + it("parses a vector TypeTag", async () => { + const vectorAddress = new TypeTagParser("vector
").parseTypeTag(); + expect(vectorAddress instanceof TypeTagVector).toBeTruthy(); + expect((vectorAddress as TypeTagVector).value instanceof TypeTagAddress).toBeTruthy(); + + const vectorU64 = new TypeTagParser(" vector < u64 > ").parseTypeTag(); + expect(vectorU64 instanceof TypeTagVector).toBeTruthy(); + expect((vectorU64 as TypeTagVector).value instanceof TypeTagU64).toBeTruthy(); + }); + + it("parses a sturct TypeTag", async () => { + const assertStruct = (struct: TypeTagStruct, accountAddress: string, moduleName: string, structName: string) => { + expect(HexString.fromUint8Array(struct.value.address.address).toShortString()).toBe(accountAddress); + expect(struct.value.module_name.value).toBe(moduleName); + expect(struct.value.name.value).toBe(structName); + }; + const coin = new TypeTagParser("0x1::test_coin::Coin").parseTypeTag(); + expect(coin instanceof TypeTagStruct).toBeTruthy(); + assertStruct(coin as TypeTagStruct, "0x1", "test_coin", "Coin"); + + const aptosCoin = new TypeTagParser( + "0x1::coin::CoinStore < 0x1::test_coin::AptosCoin1 , 0x1::test_coin::AptosCoin2 > ", + ).parseTypeTag(); + expect(aptosCoin instanceof TypeTagStruct).toBeTruthy(); + assertStruct(aptosCoin as TypeTagStruct, "0x1", "coin", "CoinStore"); + + const aptosCoinTrailingComma = new TypeTagParser( + "0x1::coin::CoinStore < 0x1::test_coin::AptosCoin1 , 0x1::test_coin::AptosCoin2, > ", + ).parseTypeTag(); + expect(aptosCoinTrailingComma instanceof TypeTagStruct).toBeTruthy(); + assertStruct(aptosCoinTrailingComma as TypeTagStruct, "0x1", "coin", "CoinStore"); + + const structTypeTags = (aptosCoin as TypeTagStruct).value.type_args; + expect(structTypeTags.length).toBe(2); + + const structTypeTag1 = structTypeTags[0]; + assertStruct(structTypeTag1 as TypeTagStruct, "0x1", "test_coin", "AptosCoin1"); + + const structTypeTag2 = structTypeTags[1]; + assertStruct(structTypeTag2 as TypeTagStruct, "0x1", "test_coin", "AptosCoin2"); + + const coinComplex = new TypeTagParser( + // eslint-disable-next-line max-len + "0x1::coin::CoinStore < 0x2::coin::LPCoin < 0x1::test_coin::AptosCoin1 , vector<0x1::test_coin::AptosCoin2 > > >", + ).parseTypeTag(); + + expect(coinComplex instanceof TypeTagStruct).toBeTruthy(); + assertStruct(coinComplex as TypeTagStruct, "0x1", "coin", "CoinStore"); + const coinComplexTypeTag = (coinComplex as TypeTagStruct).value.type_args[0]; + assertStruct(coinComplexTypeTag as TypeTagStruct, "0x2", "coin", "LPCoin"); + + expect(() => { + new TypeTagParser("0x1::test_coin").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1::test_coin::CoinStore<0x1::test_coin::AptosCoin").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1::test_coin::CoinStore<0x1::test_coin>").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1:test_coin::AptosCoin").parseTypeTag(); + }).toThrow("Unrecognized token."); + + expect(() => { + new TypeTagParser("0x!::test_coin::AptosCoin").parseTypeTag(); + }).toThrow("Unrecognized token."); + + expect(() => { + new TypeTagParser("0x1::test_coin::AptosCoin<").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1::test_coin::CoinStore<0x1::test_coin::AptosCoin,").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1::<::CoinStore<0x1::test_coin::AptosCoin,").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("0x1::test_coin::><0x1::test_coin::AptosCoin,").parseTypeTag(); + }).toThrow("Invalid type tag."); + + expect(() => { + new TypeTagParser("u3").parseTypeTag(); + }).toThrow("Invalid type tag."); + }); + + it("serializes a boolean arg", async () => { + let serializer = new Serializer(); + serializeArg(true, new TypeTagBool(), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x01])); + serializer = new Serializer(); + expect(() => { + serializeArg(123, new TypeTagBool(), serializer); + }).toThrow(/Invalid arg/); + }); + + it("serializes a u8 arg", async () => { + let serializer = new Serializer(); + serializeArg(255, new TypeTagU8(), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff])); + + serializer = new Serializer(); + expect(() => { + serializeArg("u8", new TypeTagU8(), serializer); + }).toThrow(/Invalid number string/); + }); + + it("serializes a u16 arg", async () => { + let serializer = new Serializer(); + serializeArg(0x7fff, new TypeTagU16(), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff, 0x7f])); + + serializer = new Serializer(); + expect(() => { + serializeArg("u16", new TypeTagU16(), serializer); + }).toThrow(/Invalid number string/); + }); + + it("serializes a u32 arg", async () => { + let serializer = new Serializer(); + serializeArg(0x01020304, new TypeTagU32(), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x04, 0x03, 0x02, 0x01])); + + serializer = new Serializer(); + expect(() => { + serializeArg("u32", new TypeTagU32(), serializer); + }).toThrow(/Invalid number string/); + }); + + it("serializes a u64 arg", async () => { + let serializer = new Serializer(); + serializeArg(BigInt("18446744073709551615"), new TypeTagU64(), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])); + + serializer = new Serializer(); + expect(() => { + serializeArg("u64", new TypeTagU64(), serializer); + }).toThrow(/^Cannot convert/); + }); + + it("serializes a u128 arg", async () => { + let serializer = new Serializer(); + serializeArg(BigInt("340282366920938463463374607431768211455"), new TypeTagU128(), serializer); + expect(serializer.getBytes()).toEqual( + new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), + ); + + serializer = new Serializer(); + expect(() => { + serializeArg("u128", new TypeTagU128(), serializer); + }).toThrow(/^Cannot convert/); + }); + + it("serializes a u256 arg", async () => { + let serializer = new Serializer(); + serializeArg( + BigInt("0x0001020304050607080910111213141516171819202122232425262728293031"), + new TypeTagU256(), + serializer, + ); + expect(serializer.getBytes()).toEqual( + new Uint8Array([ + 0x31, 0x30, 0x29, 0x28, 0x27, 0x26, 0x25, 0x24, 0x23, 0x22, 0x21, 0x20, 0x19, 0x18, 0x17, 0x16, 0x15, 0x14, + 0x13, 0x12, 0x11, 0x10, 0x09, 0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00, + ]), + ); + + serializer = new Serializer(); + expect(() => { + serializeArg("u256", new TypeTagU256(), serializer); + }).toThrow(/^Cannot convert/); + }); + + it("serializes an AccountAddress arg", async () => { + let serializer = new Serializer(); + serializeArg("0x1", new TypeTagAddress(), serializer); + expect(HexString.fromUint8Array(serializer.getBytes()).toShortString()).toEqual("0x1"); + + serializer = new Serializer(); + serializeArg(AccountAddress.fromHex("0x1"), new TypeTagAddress(), serializer); + expect(HexString.fromUint8Array(serializer.getBytes()).toShortString()).toEqual("0x1"); + + serializer = new Serializer(); + expect(() => { + serializeArg(123456, new TypeTagAddress(), serializer); + }).toThrow("Invalid account address."); + }); + + it("serializes a vector arg", async () => { + let serializer = new Serializer(); + serializeArg([255], new TypeTagVector(new TypeTagU8()), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x1, 0xff])); + + serializer = new Serializer(); + serializeArg("abc", new TypeTagVector(new TypeTagU8()), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x3, 0x61, 0x62, 0x63])); + + serializer = new Serializer(); + serializeArg(new Uint8Array([0x61, 0x62, 0x63]), new TypeTagVector(new TypeTagU8()), serializer); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x3, 0x61, 0x62, 0x63])); + + serializer = new Serializer(); + expect(() => { + serializeArg(123456, new TypeTagVector(new TypeTagU8()), serializer); + }).toThrow("Invalid vector args."); + }); + + it("serializes a struct arg", async () => { + let serializer = new Serializer(); + serializeArg( + "abc", + new TypeTagStruct( + new StructTag(AccountAddress.fromHex("0x1"), new Identifier("string"), new Identifier("String"), []), + ), + serializer, + ); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x3, 0x61, 0x62, 0x63])); + + serializer = new Serializer(); + expect(() => { + serializeArg( + "abc", + new TypeTagStruct( + new StructTag(AccountAddress.fromHex("0x3"), new Identifier("token"), new Identifier("Token"), []), + ), + serializer, + ); + }).toThrow("The only supported struct arg is of type 0x1::string::String"); + }); + + it("throws at unrecognized arg types", async () => { + const serializer = new Serializer(); + expect(() => { + // @ts-ignore + serializeArg(123456, "unknown_type", serializer); + }).toThrow("Unsupported arg type."); + }); + + it("converts a boolean TransactionArgument", async () => { + const res = argToTransactionArgument(true, new TypeTagBool()); + expect((res as TransactionArgumentBool).value).toEqual(true); + expect(() => { + argToTransactionArgument(123, new TypeTagBool()); + }).toThrow(/Invalid arg/); + }); + + it("converts a u8 TransactionArgument", async () => { + const res = argToTransactionArgument(123, new TypeTagU8()); + expect((res as TransactionArgumentU8).value).toEqual(123); + expect(() => { + argToTransactionArgument("u8", new TypeTagBool()); + }).toThrow(/Invalid boolean string/); + }); + + it("converts a u64 TransactionArgument", async () => { + const res = argToTransactionArgument(123, new TypeTagU64()); + expect((res as TransactionArgumentU64).value).toEqual(BigInt(123)); + expect(() => { + argToTransactionArgument("u64", new TypeTagU64()); + }).toThrow(/Cannot convert/); + }); + + it("converts a u128 TransactionArgument", async () => { + const res = argToTransactionArgument(123, new TypeTagU128()); + expect((res as TransactionArgumentU128).value).toEqual(BigInt(123)); + expect(() => { + argToTransactionArgument("u128", new TypeTagU128()); + }).toThrow(/Cannot convert/); + }); + + it("converts an AccountAddress TransactionArgument", async () => { + let res = argToTransactionArgument("0x1", new TypeTagAddress()) as TransactionArgumentAddress; + expect(HexString.fromUint8Array(res.value.address).toShortString()).toEqual("0x1"); + + res = argToTransactionArgument(AccountAddress.fromHex("0x2"), new TypeTagAddress()) as TransactionArgumentAddress; + expect(HexString.fromUint8Array(res.value.address).toShortString()).toEqual("0x2"); + + expect(() => { + argToTransactionArgument(123456, new TypeTagAddress()); + }).toThrow("Invalid account address."); + }); + + it("converts a vector TransactionArgument", async () => { + const res = argToTransactionArgument( + new Uint8Array([0x1]), + new TypeTagVector(new TypeTagU8()), + ) as TransactionArgumentU8Vector; + expect(res.value).toEqual(new Uint8Array([0x1])); + + expect(() => { + argToTransactionArgument(123456, new TypeTagVector(new TypeTagU8())); + }).toThrow(/.*should be an instance of Uint8Array$/); + }); + + it("throws at unrecognized TransactionArgument types", async () => { + expect(() => { + // @ts-ignore + argToTransactionArgument(123456, "unknown_type"); + }).toThrow("Unknown type for TransactionArgument."); + }); + + it("ensures a boolean", async () => { + expect(ensureBoolean(false)).toBe(false); + expect(ensureBoolean(true)).toBe(true); + expect(ensureBoolean("true")).toBe(true); + expect(ensureBoolean("false")).toBe(false); + expect(() => ensureBoolean("True")).toThrow("Invalid boolean string."); + }); + + it("ensures a number", async () => { + expect(ensureNumber(10)).toBe(10); + expect(ensureNumber("123")).toBe(123); + expect(() => ensureNumber("True")).toThrow("Invalid number string."); + }); + + it("ensures a bigint", async () => { + expect(ensureBigInt(10)).toBe(BigInt(10)); + expect(ensureBigInt("123")).toBe(BigInt(123)); + expect(() => ensureBigInt("True")).toThrow(/^Cannot convert/); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/deserializer.test.ts b/m1/JavaScript-client/src/tests/unit/deserializer.test.ts new file mode 100644 index 00000000..5baee449 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/deserializer.test.ts @@ -0,0 +1,132 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Deserializer } from "../../bcs/deserializer"; + +describe("BCS Deserializer", () => { + it("deserializes a non-empty string", () => { + const deserializer = new Deserializer( + new Uint8Array([ + 24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, 0x88, + 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab, + ]), + ); + expect(deserializer.deserializeStr()).toBe("çå∞≠¢õß∂ƒ∫"); + }); + + it("deserializes an empty string", () => { + const deserializer = new Deserializer(new Uint8Array([0])); + expect(deserializer.deserializeStr()).toBe(""); + }); + + it("deserializes dynamic length bytes", () => { + const deserializer = new Deserializer(new Uint8Array([5, 0x41, 0x70, 0x74, 0x6f, 0x73])); + expect(deserializer.deserializeBytes()).toEqual(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + }); + + it("deserializes dynamic length bytes with zero elements", () => { + const deserializer = new Deserializer(new Uint8Array([0])); + expect(deserializer.deserializeBytes()).toEqual(new Uint8Array([])); + }); + + it("deserializes fixed length bytes", () => { + const deserializer = new Deserializer(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + expect(deserializer.deserializeFixedBytes(5)).toEqual(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + }); + + it("deserializes fixed length bytes with zero element", () => { + const deserializer = new Deserializer(new Uint8Array([])); + expect(deserializer.deserializeFixedBytes(0)).toEqual(new Uint8Array([])); + }); + + it("deserializes a boolean value", () => { + let deserializer = new Deserializer(new Uint8Array([0x01])); + expect(deserializer.deserializeBool()).toEqual(true); + deserializer = new Deserializer(new Uint8Array([0x00])); + expect(deserializer.deserializeBool()).toEqual(false); + }); + + it("throws when dserializing a boolean with disallowed values", () => { + expect(() => { + const deserializer = new Deserializer(new Uint8Array([0x12])); + deserializer.deserializeBool(); + }).toThrow("Invalid boolean value"); + }); + + it("deserializes a uint8", () => { + const deserializer = new Deserializer(new Uint8Array([0xff])); + expect(deserializer.deserializeU8()).toEqual(255); + }); + + it("deserializes a uint16", () => { + let deserializer = new Deserializer(new Uint8Array([0xff, 0xff])); + expect(deserializer.deserializeU16()).toEqual(65535); + deserializer = new Deserializer(new Uint8Array([0x34, 0x12])); + expect(deserializer.deserializeU16()).toEqual(4660); + }); + + it("deserializes a uint32", () => { + let deserializer = new Deserializer(new Uint8Array([0xff, 0xff, 0xff, 0xff])); + expect(deserializer.deserializeU32()).toEqual(4294967295); + deserializer = new Deserializer(new Uint8Array([0x78, 0x56, 0x34, 0x12])); + expect(deserializer.deserializeU32()).toEqual(305419896); + }); + + it("deserializes a uint64", () => { + let deserializer = new Deserializer(new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])); + expect(deserializer.deserializeU64()).toEqual(BigInt("18446744073709551615")); + deserializer = new Deserializer(new Uint8Array([0x00, 0xef, 0xcd, 0xab, 0x78, 0x56, 0x34, 0x12])); + expect(deserializer.deserializeU64()).toEqual(BigInt("1311768467750121216")); + }); + + it("deserializes a uint128", () => { + let deserializer = new Deserializer( + new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), + ); + expect(deserializer.deserializeU128()).toEqual(BigInt("340282366920938463463374607431768211455")); + deserializer = new Deserializer( + new Uint8Array([0x00, 0xef, 0xcd, 0xab, 0x78, 0x56, 0x34, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), + ); + expect(deserializer.deserializeU128()).toEqual(BigInt("1311768467750121216")); + }); + it("deserializes a uint256", () => { + let deserializer = new Deserializer( + new Uint8Array([ + 0x31, 0x30, 0x29, 0x28, 0x27, 0x26, 0x25, 0x24, 0x23, 0x22, 0x21, 0x20, 0x19, 0x18, 0x17, 0x16, 0x15, 0x14, + 0x13, 0x12, 0x11, 0x10, 0x09, 0x08, 0x07, 0x06, 0x05, 0x04, 0x03, 0x02, 0x01, 0x00, + ]), + ); + expect(deserializer.deserializeU256()).toEqual( + BigInt("0x0001020304050607080910111213141516171819202122232425262728293031"), + ); + }); + + it("deserializes a uleb128", () => { + let deserializer = new Deserializer(new Uint8Array([0xcd, 0xea, 0xec, 0x31])); + expect(deserializer.deserializeUleb128AsU32()).toEqual(104543565); + + deserializer = new Deserializer(new Uint8Array([0xff, 0xff, 0xff, 0xff, 0x0f])); + expect(deserializer.deserializeUleb128AsU32()).toEqual(4294967295); + }); + + it("throws when deserializing a uleb128 with out ranged value", () => { + expect(() => { + const deserializer = new Deserializer(new Uint8Array([0x80, 0x80, 0x80, 0x80, 0x10])); + deserializer.deserializeUleb128AsU32(); + }).toThrow("Overflow while parsing uleb128-encoded uint32 value"); + }); + + it("throws when deserializing against buffer that has been drained", () => { + expect(() => { + const deserializer = new Deserializer( + new Uint8Array([ + 24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, + 0x88, 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab, + ]), + ); + + deserializer.deserializeStr(); + deserializer.deserializeStr(); + }).toThrow("Reached to the end of buffer"); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/helper.test.ts b/m1/JavaScript-client/src/tests/unit/helper.test.ts new file mode 100644 index 00000000..fc524184 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/helper.test.ts @@ -0,0 +1,96 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AccountAddress } from "../../aptos_types"; +import { Deserializer } from "../../bcs/deserializer"; +import { + bcsSerializeBool, + bcsSerializeBytes, + bcsSerializeFixedBytes, + bcsSerializeStr, + bcsSerializeU128, + bcsSerializeU16, + bcsSerializeU32, + bcsSerializeU8, + bcsSerializeUint64, + bcsToBytes, + deserializeVector, + serializeVector, + serializeVectorWithFunc, +} from "../../bcs/helper"; +import { Serializer } from "../../bcs/serializer"; + +test("serializes and deserializes a vector of serializables", () => { + const address0 = AccountAddress.fromHex("0x1"); + const address1 = AccountAddress.fromHex("0x2"); + + const serializer = new Serializer(); + serializeVector([address0, address1], serializer); + + const addresses: AccountAddress[] = deserializeVector(new Deserializer(serializer.getBytes()), AccountAddress); + + expect(addresses[0].address).toEqual(address0.address); + expect(addresses[1].address).toEqual(address1.address); +}); + +test("bcsToBytes", () => { + const address = AccountAddress.fromHex("0x1"); + bcsToBytes(address); + + expect(bcsToBytes(address)).toEqual( + new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]), + ); +}); + +test("bcsSerializeU8", () => { + expect(bcsSerializeU8(255)).toEqual(new Uint8Array([0xff])); +}); + +test("bcsSerializeU16", () => { + expect(bcsSerializeU16(65535)).toEqual(new Uint8Array([0xff, 0xff])); +}); + +test("bcsSerializeU32", () => { + expect(bcsSerializeU32(4294967295)).toEqual(new Uint8Array([0xff, 0xff, 0xff, 0xff])); +}); + +test("bcsSerializeU64", () => { + expect(bcsSerializeUint64(BigInt("18446744073709551615"))).toEqual( + new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), + ); +}); + +test("bcsSerializeU128", () => { + expect(bcsSerializeU128(BigInt("340282366920938463463374607431768211455"))).toEqual( + new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), + ); +}); + +test("bcsSerializeBool", () => { + expect(bcsSerializeBool(true)).toEqual(new Uint8Array([0x01])); +}); + +test("bcsSerializeStr", () => { + expect(bcsSerializeStr("çå∞≠¢õß∂ƒ∫")).toEqual( + new Uint8Array([ + 24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, 0x88, + 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab, + ]), + ); +}); + +test("bcsSerializeBytes", () => { + expect(bcsSerializeBytes(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73]))).toEqual( + new Uint8Array([5, 0x41, 0x70, 0x74, 0x6f, 0x73]), + ); +}); + +test("bcsSerializeFixedBytes", () => { + expect(bcsSerializeFixedBytes(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73]))).toEqual( + new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73]), + ); +}); + +test("serializeVectorWithFunc", () => { + expect(serializeVectorWithFunc([false, true], "serializeBool")).toEqual(new Uint8Array([0x2, 0x0, 0x1])); +}); diff --git a/m1/JavaScript-client/src/tests/unit/hex_string.test.ts b/m1/JavaScript-client/src/tests/unit/hex_string.test.ts new file mode 100644 index 00000000..60b6cd48 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/hex_string.test.ts @@ -0,0 +1,47 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { HexString } from "../../utils"; + +const withoutPrefix = "007711b4d0"; +const withPrefix = `0x${withoutPrefix}`; + +function validate(hexString: HexString) { + expect(hexString.hex()).toBe(withPrefix); + expect(hexString.toString()).toBe(withPrefix); + expect(`${hexString}`).toBe(withPrefix); + expect(hexString.noPrefix()).toBe(withoutPrefix); +} + +test("from/to Uint8Array", () => { + const hs = new HexString(withoutPrefix); + expect(HexString.fromUint8Array(hs.toUint8Array()).hex()).toBe(withPrefix); +}); + +test("accepts input without prefix", () => { + const hs = new HexString(withoutPrefix); + validate(hs); +}); + +test("accepts input with prefix", () => { + const hs = new HexString(withPrefix); + validate(hs); +}); + +test("ensures input when string", () => { + const hs = HexString.ensure(withoutPrefix); + validate(hs); +}); + +test("ensures input when HexString", () => { + const hs1 = new HexString(withPrefix); + const hs = HexString.ensure(hs1); + validate(hs); +}); + +test("short address form correct", () => { + const hs1 = new HexString(withoutPrefix); + expect(hs1.toShortString()).toBe("0x7711b4d0"); + const hs2 = new HexString("0x2185b82cef9bc46249ff2dbc56c265f6a0e3bdb7b9498cc45e4f6e429530fdc0"); + expect(hs2.toShortString()).toBe("0x2185b82cef9bc46249ff2dbc56c265f6a0e3bdb7b9498cc45e4f6e429530fdc0"); +}); diff --git a/m1/JavaScript-client/src/tests/unit/misc.test.ts b/m1/JavaScript-client/src/tests/unit/misc.test.ts new file mode 100644 index 00000000..958f940e --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/misc.test.ts @@ -0,0 +1,12 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { AptosClient } from "../../providers/aptos_client"; + +test("test fixNodeUrl", () => { + expect(new AptosClient("https://test.com").client.request.config.BASE).toBe("https://test.com/v1"); + expect(new AptosClient("https://test.com/").client.request.config.BASE).toBe("https://test.com/v1"); + expect(new AptosClient("https://test.com/v1").client.request.config.BASE).toBe("https://test.com/v1"); + expect(new AptosClient("https://test.com/v1/").client.request.config.BASE).toBe("https://test.com/v1"); + expect(new AptosClient("https://test.com", {}, true).client.request.config.BASE).toBe("https://test.com"); +}); diff --git a/m1/JavaScript-client/src/tests/unit/multi_ed25519.test.ts b/m1/JavaScript-client/src/tests/unit/multi_ed25519.test.ts new file mode 100644 index 00000000..7e489c47 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/multi_ed25519.test.ts @@ -0,0 +1,111 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable max-len */ +import { HexString } from "../../utils"; +import { bcsToBytes, Deserializer } from "../../bcs"; +import { Ed25519PublicKey, Ed25519Signature } from "../../aptos_types/ed25519"; +import { MultiEd25519PublicKey, MultiEd25519Signature } from "../../aptos_types/multi_ed25519"; + +describe("MultiEd25519", () => { + it("public key serializes to bytes correctly", async () => { + const publicKey1 = "b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200"; + const publicKey2 = "aef3f4a4b8eca1dfc343361bf8e436bd42de9259c04b8314eb8e2054dd6e82ab"; + const publicKey3 = "8a5762e21ac1cdb3870442c77b4c3af58c7cedb8779d0270e6d4f1e2f7367d74"; + + const pubKeyMultiSig = new MultiEd25519PublicKey( + [ + new Ed25519PublicKey(new HexString(publicKey1).toUint8Array()), + new Ed25519PublicKey(new HexString(publicKey2).toUint8Array()), + new Ed25519PublicKey(new HexString(publicKey3).toUint8Array()), + ], + 2, + ); + + expect(HexString.fromUint8Array(pubKeyMultiSig.toBytes()).noPrefix()).toEqual( + "b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200aef3f4a4b8eca1dfc343361bf8e436bd42de9259c04b8314eb8e2054dd6e82ab8a5762e21ac1cdb3870442c77b4c3af58c7cedb8779d0270e6d4f1e2f7367d7402", + ); + }); + + it("public key deserializes from bytes correctly", async () => { + const publicKey1 = "b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200"; + const publicKey2 = "aef3f4a4b8eca1dfc343361bf8e436bd42de9259c04b8314eb8e2054dd6e82ab"; + const publicKey3 = "8a5762e21ac1cdb3870442c77b4c3af58c7cedb8779d0270e6d4f1e2f7367d74"; + + const pubKeyMultiSig = new MultiEd25519PublicKey( + [ + new Ed25519PublicKey(new HexString(publicKey1).toUint8Array()), + new Ed25519PublicKey(new HexString(publicKey2).toUint8Array()), + new Ed25519PublicKey(new HexString(publicKey3).toUint8Array()), + ], + 2, + ); + const deserialzed = MultiEd25519PublicKey.deserialize(new Deserializer(bcsToBytes(pubKeyMultiSig))); + expect(HexString.fromUint8Array(deserialzed.toBytes()).noPrefix()).toEqual( + HexString.fromUint8Array(pubKeyMultiSig.toBytes()).noPrefix(), + ); + }); + + it("signature serializes to bytes correctly", async () => { + // eslint-disable-next-line operator-linebreak + const sig1 = + "e6f3ba05469b2388492397840183945d4291f0dd3989150de3248e06b4cefe0ddf6180a80a0f04c045ee8f362870cb46918478cd9b56c66076f94f3efd5a8805"; + // eslint-disable-next-line operator-linebreak + const sig2 = + "2ae0818b7e51b853f1e43dc4c89a1f5fabc9cb256030a908f9872f3eaeb048fb1e2b4ffd5a9d5d1caedd0c8b7d6155ed8071e913536fa5c5a64327b6f2d9a102"; + const bitmap = "c0000000"; + + const multisig = new MultiEd25519Signature( + [ + new Ed25519Signature(new HexString(sig1).toUint8Array()), + new Ed25519Signature(new HexString(sig2).toUint8Array()), + ], + new HexString(bitmap).toUint8Array(), + ); + + expect(HexString.fromUint8Array(multisig.toBytes()).noPrefix()).toEqual( + "e6f3ba05469b2388492397840183945d4291f0dd3989150de3248e06b4cefe0ddf6180a80a0f04c045ee8f362870cb46918478cd9b56c66076f94f3efd5a88052ae0818b7e51b853f1e43dc4c89a1f5fabc9cb256030a908f9872f3eaeb048fb1e2b4ffd5a9d5d1caedd0c8b7d6155ed8071e913536fa5c5a64327b6f2d9a102c0000000", + ); + }); + + it("signature deserializes from bytes correctly", async () => { + // eslint-disable-next-line operator-linebreak + const sig1 = + "e6f3ba05469b2388492397840183945d4291f0dd3989150de3248e06b4cefe0ddf6180a80a0f04c045ee8f362870cb46918478cd9b56c66076f94f3efd5a8805"; + // eslint-disable-next-line operator-linebreak + const sig2 = + "2ae0818b7e51b853f1e43dc4c89a1f5fabc9cb256030a908f9872f3eaeb048fb1e2b4ffd5a9d5d1caedd0c8b7d6155ed8071e913536fa5c5a64327b6f2d9a102"; + const bitmap = "c0000000"; + + const multisig = new MultiEd25519Signature( + [ + new Ed25519Signature(new HexString(sig1).toUint8Array()), + new Ed25519Signature(new HexString(sig2).toUint8Array()), + ], + new HexString(bitmap).toUint8Array(), + ); + + const deserialzed = MultiEd25519Signature.deserialize(new Deserializer(bcsToBytes(multisig))); + expect(HexString.fromUint8Array(deserialzed.toBytes()).noPrefix()).toEqual( + HexString.fromUint8Array(multisig.toBytes()).noPrefix(), + ); + }); + + it("creates a valid bitmap", () => { + expect(MultiEd25519Signature.createBitmap([0, 2, 31])).toEqual( + new Uint8Array([0b10100000, 0b00000000, 0b00000000, 0b00000001]), + ); + }); + + it("throws exception when creating a bitmap with wrong bits", async () => { + expect(() => { + MultiEd25519Signature.createBitmap([32]); + }).toThrow("Invalid bit value 32."); + }); + + it("throws exception when creating a bitmap with duplicate bits", async () => { + expect(() => { + MultiEd25519Signature.createBitmap([2, 2]); + }).toThrow("Duplicated bits detected."); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/property_map_serde.test.ts b/m1/JavaScript-client/src/tests/unit/property_map_serde.test.ts new file mode 100644 index 00000000..ae64deb3 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/property_map_serde.test.ts @@ -0,0 +1,55 @@ +import { deserializeValueBasedOnTypeTag, getPropertyType, getPropertyValueRaw } from "../../utils/property_map_serde"; +import { + bcsSerializeBool, + bcsSerializeStr, + bcsSerializeU128, + bcsSerializeU8, + bcsSerializeUint64, + bcsToBytes, + Bytes, +} from "../../bcs"; +import { AccountAddress } from "../../aptos_types"; +import { HexString } from "../../utils"; + +test("test property_map_serializer", () => { + function isSame(array1: Bytes, array2: Bytes): boolean { + return array1.length === array2.length && array1.every((element, index) => element === array2[index]); + } + const values = [ + "false", + "10", + "18446744073709551615", + "340282366920938463463374607431768211455", + "hello", + "0x1", + "I am a string", + ]; + const types = ["bool", "u8", "u64", "u128", "0x1::string::String", "address", "string"]; + const newValues = getPropertyValueRaw(values, types); + expect(isSame(newValues[0], bcsSerializeBool(false))).toBe(true); + expect(isSame(newValues[1], bcsSerializeU8(10))).toBe(true); + expect(isSame(newValues[2], bcsSerializeUint64(18446744073709551615n))).toBe(true); + expect(isSame(newValues[3], bcsSerializeU128(340282366920938463463374607431768211455n))).toBe(true); + expect(isSame(newValues[4], bcsSerializeStr(values[4]))).toBe(true); + expect(isSame(newValues[5], bcsToBytes(AccountAddress.fromHex(new HexString("0x1"))))).toBe(true); +}); + +test("test propertymap deserializer", () => { + function toHexString(data: Bytes): string { + return HexString.fromUint8Array(data).hex(); + } + const values = [ + "false", + "10", + "18446744073709551615", + "340282366920938463463374607431768211455", + "hello", + "0x0000000000000000000000000000000000000000000000000000000000000001", + "I am a string", + ]; + const types = ["bool", "u8", "u64", "u128", "0x1::string::String", "address", "string"]; + const newValues = getPropertyValueRaw(values, types); + for (let i = 0; i < values.length; i += 1) { + expect(deserializeValueBasedOnTypeTag(getPropertyType(types[i]), toHexString(newValues[i]))).toBe(values[i]); + } +}); diff --git a/m1/JavaScript-client/src/tests/unit/serializer.test.ts b/m1/JavaScript-client/src/tests/unit/serializer.test.ts new file mode 100644 index 00000000..4d6942a3 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/serializer.test.ts @@ -0,0 +1,179 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { Serializer } from "../../bcs/serializer"; + +describe("BCS Serializer", () => { + let serializer: Serializer; + + beforeEach(() => { + serializer = new Serializer(); + }); + + it("serializes a non-empty string", () => { + serializer.serializeStr("çå∞≠¢õß∂ƒ∫"); + expect(serializer.getBytes()).toEqual( + new Uint8Array([ + 24, 0xc3, 0xa7, 0xc3, 0xa5, 0xe2, 0x88, 0x9e, 0xe2, 0x89, 0xa0, 0xc2, 0xa2, 0xc3, 0xb5, 0xc3, 0x9f, 0xe2, 0x88, + 0x82, 0xc6, 0x92, 0xe2, 0x88, 0xab, + ]), + ); + }); + + it("serializes an empty string", () => { + serializer.serializeStr(""); + expect(serializer.getBytes()).toEqual(new Uint8Array([0])); + }); + + it("serializes dynamic length bytes", () => { + serializer.serializeBytes(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + expect(serializer.getBytes()).toEqual(new Uint8Array([5, 0x41, 0x70, 0x74, 0x6f, 0x73])); + }); + + it("serializes dynamic length bytes with zero elements", () => { + serializer.serializeBytes(new Uint8Array([])); + expect(serializer.getBytes()).toEqual(new Uint8Array([0])); + }); + + it("serializes fixed length bytes", () => { + serializer.serializeFixedBytes(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x41, 0x70, 0x74, 0x6f, 0x73])); + }); + + it("serializes fixed length bytes with zero element", () => { + serializer.serializeFixedBytes(new Uint8Array([])); + expect(serializer.getBytes()).toEqual(new Uint8Array([])); + }); + + it("serializes a boolean value", () => { + serializer.serializeBool(true); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x01])); + + serializer = new Serializer(); + serializer.serializeBool(false); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x00])); + }); + + it("throws when serializing a boolean value with wrong data type", () => { + expect(() => { + // @ts-ignore + serializer.serializeBool(12); + }).toThrow("Value needs to be a boolean"); + }); + + it("serializes a uint8", () => { + serializer.serializeU8(255); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff])); + }); + + it("throws when serializing uint8 with out of range value", () => { + expect(() => { + serializer.serializeU8(256); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU8(-1); + }).toThrow("Value is out of range"); + }); + + it("serializes a uint16", () => { + serializer.serializeU16(65535); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff, 0xff])); + + serializer = new Serializer(); + serializer.serializeU16(4660); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x34, 0x12])); + }); + + it("throws when serializing uint16 with out of range value", () => { + expect(() => { + serializer.serializeU16(65536); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU16(-1); + }).toThrow("Value is out of range"); + }); + + it("serializes a uint32", () => { + serializer.serializeU32(4294967295); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff, 0xff, 0xff, 0xff])); + + serializer = new Serializer(); + serializer.serializeU32(305419896); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x78, 0x56, 0x34, 0x12])); + }); + + it("throws when serializing uint32 with out of range value", () => { + expect(() => { + serializer.serializeU32(4294967296); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU32(-1); + }).toThrow("Value is out of range"); + }); + + it("serializes a uint64", () => { + serializer.serializeU64(BigInt("18446744073709551615")); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff])); + + serializer = new Serializer(); + serializer.serializeU64(BigInt("1311768467750121216")); + expect(serializer.getBytes()).toEqual(new Uint8Array([0x00, 0xef, 0xcd, 0xab, 0x78, 0x56, 0x34, 0x12])); + }); + + it("throws when serializing uint64 with out of range value", () => { + expect(() => { + serializer.serializeU64(BigInt("18446744073709551616")); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU64(-1); + }).toThrow("Value is out of range"); + }); + + it("serializes a uint128", () => { + serializer.serializeU128(BigInt("340282366920938463463374607431768211455")); + expect(serializer.getBytes()).toEqual( + new Uint8Array([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), + ); + + serializer = new Serializer(); + serializer.serializeU128(BigInt("1311768467750121216")); + expect(serializer.getBytes()).toEqual( + new Uint8Array([0x00, 0xef, 0xcd, 0xab, 0x78, 0x56, 0x34, 0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]), + ); + }); + + it("throws when serializing uint128 with out of range value", () => { + expect(() => { + serializer.serializeU128(BigInt("340282366920938463463374607431768211456")); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU128(-1); + }).toThrow("Value is out of range"); + }); + + it("serializes a uleb128", () => { + serializer.serializeU32AsUleb128(104543565); + expect(serializer.getBytes()).toEqual(new Uint8Array([0xcd, 0xea, 0xec, 0x31])); + }); + + it("throws when serializing uleb128 with out of range value", () => { + expect(() => { + serializer.serializeU32AsUleb128(4294967296); + }).toThrow("Value is out of range"); + + expect(() => { + serializer = new Serializer(); + serializer.serializeU32AsUleb128(-1); + }).toThrow("Value is out of range"); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/test_helper.test.ts b/m1/JavaScript-client/src/tests/unit/test_helper.test.ts new file mode 100644 index 00000000..679e1d77 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/test_helper.test.ts @@ -0,0 +1,35 @@ +import { FaucetClient } from "../../plugins/faucet_client"; +import { OpenAPIConfig } from "../../generated"; +import { CustomEndpoints } from "../../utils/api-endpoints"; + +export const NODE_URL = process.env.APTOS_NODE_URL!; +export const FAUCET_URL = process.env.APTOS_FAUCET_URL!; +export const API_TOKEN = process.env.API_TOKEN!; +export const FAUCET_AUTH_TOKEN = process.env.FAUCET_AUTH_TOKEN!; +export const PROVIDER_LOCAL_NETWORK_CONFIG: CustomEndpoints = { fullnodeUrl: NODE_URL, indexerUrl: NODE_URL }; + +// account to use for ANS tests, this account matches the one in sdk-release.yaml +export const ANS_OWNER_ADDRESS = "0x585fc9f0f0c54183b039ffc770ca282ebd87307916c215a3e692f2f8e4305e82"; +export const ANS_OWNER_PK = "0x37368b46ce665362562c6d1d4ec01a08c8644c488690df5a17e13ba163e20221"; + +/** + * Returns an instance of a FaucetClient with NODE_URL and FAUCET_URL from the + * environment. If the FAUCET_AUTH_TOKEN environment variable is set, it will + * pass that along in the header in the format the faucet expects. + */ +export function getFaucetClient(): FaucetClient { + const config: Partial = {}; + if (process.env.FAUCET_AUTH_TOKEN) { + config.HEADERS = { Authorization: `Bearer ${process.env.FAUCET_AUTH_TOKEN}` }; + } + return new FaucetClient(NODE_URL, FAUCET_URL, config); +} + +test("noop", () => { + // All TS files are compiled by default into the npm package + // Adding this empty test allows us to: + // 1. Guarantee that this test library won't get compiled + // 2. Prevent jest from exploding when it finds a file with no tests in it +}); + +export const longTestTimeout = 120 * 1000; diff --git a/m1/JavaScript-client/src/tests/unit/transaction_builder.test.ts b/m1/JavaScript-client/src/tests/unit/transaction_builder.test.ts new file mode 100644 index 00000000..a154638d --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/transaction_builder.test.ts @@ -0,0 +1,295 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/* eslint-disable max-len */ +import nacl from "tweetnacl"; +import { bytesToHex, hexToBytes } from "@noble/hashes/utils"; +import { bcsSerializeBool, bcsSerializeUint64, bcsToBytes, Bytes } from "../../bcs"; +import { HexString } from "../../utils"; + +import { TransactionBuilderEd25519, TransactionBuilder } from "../../transaction_builder/index"; +import { + ChainId, + Ed25519Signature, + RawTransaction, + Script, + EntryFunction, + StructTag, + TransactionArgumentAddress, + TransactionArgumentU8, + TransactionArgumentU8Vector, + TransactionPayloadScript, + TransactionPayloadEntryFunction, + TypeTagStruct, + TransactionArgumentU16, + TransactionArgumentU32, + TransactionArgumentU256, + AccountAddress, + TypeTagBool, +} from "../../aptos_types"; + +const ADDRESS_1 = "0x1222"; +const ADDRESS_2 = "0xdd"; +const ADDRESS_3 = "0x0a550c18"; +const ADDRESS_4 = "0x01"; +const PRIVATE_KEY = "9bf49a6a0755f953811fce125f2683d50429c3bb49e074147e0089a52eae155f"; +const TXN_EXPIRE = "18446744073709551615"; + +function hexSignedTxn(signedTxn: Uint8Array): string { + return bytesToHex(signedTxn); +} + +function sign(rawTxn: RawTransaction): Bytes { + const privateKeyBytes = new HexString(PRIVATE_KEY).toUint8Array(); + const signingKey = nacl.sign.keyPair.fromSeed(privateKeyBytes.slice(0, 32)); + const { publicKey } = signingKey; + + const txnBuilder = new TransactionBuilderEd25519( + (signingMessage) => new Ed25519Signature(nacl.sign(signingMessage, signingKey.secretKey).slice(0, 64)), + publicKey, + ); + + return txnBuilder.sign(rawTxn); +} + +test("throws when preparing signing message with invalid payload", () => { + expect(() => { + // @ts-ignore + TransactionBuilder.getSigningMessage("invalid"); + }).toThrow("Unknown transaction type."); +}); + +test("serialize entry function payload with no type args", () => { + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + `${ADDRESS_1}::aptos_coin`, + "transfer", + [], + [bcsToBytes(AccountAddress.fromHex(ADDRESS_2)), bcsSerializeUint64(1)], + ), + ); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(new HexString(ADDRESS_3)), + BigInt(0), + entryFunctionPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000200000000000000000000000000000000000000000000000000000000000012220a6170746f735f636f696e087472616e7366657200022000000000000000000000000000000000000000000000000000000000000000dd080100000000000000d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200409c570996380897f38b8d7008d726fb45d6ded0689216e56b73f523492cba92deb6671c27e9a44d2a6fdfdb497420d00c621297a23d6d0298895e0d58cff6060c", + ); +}); + +test("serialize entry function payload with type args", () => { + const token = new TypeTagStruct(StructTag.fromString(`${ADDRESS_4}::aptos_coin::AptosCoin`)); + + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + `${ADDRESS_1}::coin`, + "transfer", + [token], + [bcsToBytes(AccountAddress.fromHex(ADDRESS_2)), bcsSerializeUint64(1)], + ), + ); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + entryFunctionPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c18000000000000000002000000000000000000000000000000000000000000000000000000000000122204636f696e087472616e73666572010700000000000000000000000000000000000000000000000000000000000000010a6170746f735f636f696e094170746f73436f696e00022000000000000000000000000000000000000000000000000000000000000000dd080100000000000000d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a4920040112162f543ca92b4f14c1b09b7f52894a127f5428b0d407c09c8efb3a136cff50e550aea7da1226f02571d79230b80bd79096ea0d796789ad594b8fbde695404", + ); +}); + +test("serialize entry function payload with type args but no function args", () => { + const token = new TypeTagStruct(StructTag.fromString(`${ADDRESS_4}::aptos_coin::AptosCoin`)); + + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural(`${ADDRESS_1}::coin`, "fake_func", [token], []), + ); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + entryFunctionPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c18000000000000000002000000000000000000000000000000000000000000000000000000000000122204636f696e0966616b655f66756e63010700000000000000000000000000000000000000000000000000000000000000010a6170746f735f636f696e094170746f73436f696e0000d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200400e2d1cc4a27893cbae36d8b6a7150977c7620e065f359840413c5478a25f20a383250a9cdcb4fd71f7d171856f38972da30a9d10072e164614d96379004aa500", + ); +}); + +test("serialize entry function payload with generic type args and function args", () => { + const token = new TypeTagStruct(StructTag.fromString(`0x14::token::Token`)); + + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + `${ADDRESS_1}::aptos_token`, + "fake_typed_func", + [token, new TypeTagBool()], + [bcsToBytes(AccountAddress.fromHex(ADDRESS_2)), bcsSerializeBool(true)], + ), + ); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + entryFunctionPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000200000000000000000000000000000000000000000000000000000000000012220b6170746f735f746f6b656e0f66616b655f74797065645f66756e630207000000000000000000000000000000000000000000000000000000000000001405746f6b656e05546f6b656e0000022000000000000000000000000000000000000000000000000000000000000000dd0101d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a4920040367085186aeef58a0256fc64ecb86b88a86f8a8e42151e0e9aae1ab6d426c4968f2cab664261ea6bb868869154fe6e946c082774741d5143e57a1d802fd1b700", + ); +}); + +test("serialize script payload with no type args and no function args", () => { + const script = hexToBytes("a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102"); + + const scriptPayload = new TransactionPayloadScript(new Script(script, [], [])); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + scriptPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000026a11ceb0b030000000105000100000000050601000000000000000600000000000000001a01020000d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a4920040266935990105df40f3a82a3f41ad9ceb4b79451495403dd976191382bb07f8c9b401702968a64b5176762e62036f75c6fc2b770a0988716e41d469fff2349a08", + ); +}); + +test("serialize script payload with type args but no function args", () => { + const token = new TypeTagStruct(StructTag.fromString(`${ADDRESS_4}::aptos_coin::AptosCoin`)); + + const script = hexToBytes("a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102"); + + const scriptPayload = new TransactionPayloadScript(new Script(script, [token], [])); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + scriptPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000026a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102010700000000000000000000000000000000000000000000000000000000000000010a6170746f735f636f696e094170746f73436f696e0000d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a4920040bd241a6f31dfdfca0031ca5874fbf81800b5f632642321a11c41b4fead4b41d808617e91dd655fde7e9f263127f07bb5d56c7c925fe797728dcc9b55be120604", + ); +}); + +test("serialize script payload with type arg and function arg", () => { + const token = new TypeTagStruct(StructTag.fromString(`${ADDRESS_4}::aptos_coin::AptosCoin`)); + + const argU8 = new TransactionArgumentU8(2); + + const script = hexToBytes("a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102"); + + const scriptPayload = new TransactionPayloadScript(new Script(script, [token], [argU8])); + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + scriptPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000026a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102010700000000000000000000000000000000000000000000000000000000000000010a6170746f735f636f696e094170746f73436f696e00010002d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200409936b8d22cec685e720761f6c6135e020911f1a26e220e2a0f3317f5a68942531987259ac9e8688158c77df3e7136637056047d9524edad88ee45d61a9346602", + ); +}); + +test("serialize script payload with one type arg and two function args", () => { + const token = new TypeTagStruct(StructTag.fromString(`${ADDRESS_4}::aptos_coin::AptosCoin`)); + + const argU8Vec = new TransactionArgumentU8Vector(bcsSerializeUint64(1)); + const argAddress = new TransactionArgumentAddress(AccountAddress.fromHex("0x01")); + + const script = hexToBytes("a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102"); + + const scriptPayload = new TransactionPayloadScript(new Script(script, [token], [argU8Vec, argAddress])); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + scriptPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c1800000000000000000026a11ceb0b030000000105000100000000050601000000000000000600000000000000001a0102010700000000000000000000000000000000000000000000000000000000000000010a6170746f735f636f696e094170746f73436f696e000204080100000000000000030000000000000000000000000000000000000000000000000000000000000001d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a492004055c7499795ea68d7acfa64a58f19efa2ba3b977fa58ae93ae8c0732c0f6d6dd084d92bbe4edc2a0d687031cae90da117abfac16ebd902e764bdc38a2154a2102", + ); +}); + +test("serialize script payload with new integer types (u16, u32, u256) as args", () => { + const argU16 = new TransactionArgumentU16(0xf111); + const argU32 = new TransactionArgumentU32(0xf1111111); + const argU256 = new TransactionArgumentU256( + BigInt("0xf111111111111111111111111111111111111111111111111111111111111111"), + ); + + const script = hexToBytes(""); + + const scriptPayload = new TransactionPayloadScript(new Script(script, [], [argU16, argU32, argU256])); + + const rawTxn = new RawTransaction( + AccountAddress.fromHex(ADDRESS_3), + BigInt(0), + scriptPayload, + BigInt(2000), + BigInt(0), + BigInt(TXN_EXPIRE), + new ChainId(4), + ); + + const signedTxn = sign(rawTxn); + + expect(hexSignedTxn(signedTxn)).toBe( + "000000000000000000000000000000000000000000000000000000000a550c180000000000000000000000030611f107111111f10811111111111111111111111111111111111111111111111111111111111111f1d0070000000000000000000000000000ffffffffffffffff040020b9c6ee1630ef3e711144a648db06bbb2284f7274cfbee53ffcee503cc1a49200409402b773f66cf5444efe4de38a026cf9b34e0327798ea01f0695db8e8e0888e20387b08f504b620dcffbc382e3ac141c0ec9a820c5f58b5da2eec589a9e86b0b", + ); +}); diff --git a/m1/JavaScript-client/src/tests/unit/transaction_vector.test.ts b/m1/JavaScript-client/src/tests/unit/transaction_vector.test.ts new file mode 100644 index 00000000..861fbaf0 --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/transaction_vector.test.ts @@ -0,0 +1,226 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +/** + * Do fuzzing tests with test vectors. The test vectors are produced by the same code + * used by the Aptos Blockchain. The test vectors are arrays of JSON objects. + * Each JSON object contains randomized inputs to Transaction Builder and BCS and + * the expected outputs. + */ + +import path from "path"; +import nacl from "tweetnacl"; +import fs from "fs"; +import { bytesToHex } from "@noble/hashes/utils"; +import { + AccountAddress, + ChainId, + RawTransaction, + EntryFunction, + StructTag, + TypeTag, + TypeTagVector, + TransactionPayloadEntryFunction, + Identifier, + TypeTagStruct, + TypeTagAddress, + TypeTagBool, + TypeTagU8, + TypeTagU64, + TypeTagU128, + TypeTagSigner, + Ed25519Signature, + TransactionPayloadScript, + Script, + TransactionArgument, + TransactionArgumentBool, + TransactionArgumentU8, + TransactionArgumentU64, + TransactionArgumentAddress, + TransactionArgumentU8Vector, + TransactionArgumentU128, +} from "../../aptos_types"; +import { HexString } from "../../utils"; +import { TransactionBuilderEd25519 } from "../../transaction_builder/builder"; + +// eslint-disable-next-line operator-linebreak +const VECTOR_FILES_ROOT_DIR = + process.env.VECTOR_FILES_ROOT_DIR || path.resolve(__dirname, "..", "..", "..", "..", "..", "..", "api", "goldens"); + +const ENTRY_FUNCTION_VECTOR = path.join( + VECTOR_FILES_ROOT_DIR, + "aptos_api__tests__transaction_vector_test__test_entry_function_payload.json", +); + +const SCRIPT_VECTOR = path.join( + VECTOR_FILES_ROOT_DIR, + "aptos_api__tests__transaction_vector_test__test_script_payload.json", +); + +function parseTypeTag(typeTag: any): TypeTag { + if (typeTag.vector) { + return new TypeTagVector(parseTypeTag(typeTag.vector)); + } + + if (typeTag.struct) { + const { + address, + module, + name, + // eslint-disable-next-line @typescript-eslint/naming-convention + type_args, + }: { + address: string; + module: string; + name: string; + type_args: any[]; + } = typeTag.struct; + + const typeArgs = type_args.map((arg) => parseTypeTag(arg)); + const structTag = new StructTag( + AccountAddress.fromHex(address), + new Identifier(module), + new Identifier(name), + typeArgs, + ); + + return new TypeTagStruct(structTag); + } + + switch (typeTag) { + case "bool": + return new TypeTagBool(); + case "u8": + return new TypeTagU8(); + case "u64": + return new TypeTagU64(); + case "u128": + return new TypeTagU128(); + case "address": + return new TypeTagAddress(); + case "signer": + return new TypeTagSigner(); + default: + throw new Error("Unknown type tag"); + } +} + +function parseTransactionArgument(arg: any): TransactionArgument { + const argHasOwnProperty = (propertyName: string) => Object.prototype.hasOwnProperty.call(arg, propertyName); + if (argHasOwnProperty("U8")) { + // arg.U8 is a number + return new TransactionArgumentU8(arg.U8); + } + + if (argHasOwnProperty("U64")) { + // arg.U64 is a string literal + return new TransactionArgumentU64(BigInt(arg.U64)); + } + + if (argHasOwnProperty("U128")) { + // arg.U128 is a string literal + return new TransactionArgumentU128(BigInt(arg.U128)); + } + + if (argHasOwnProperty("Address")) { + // arg.Address is a hex string + return new TransactionArgumentAddress(AccountAddress.fromHex(arg.Address)); + } + + if (argHasOwnProperty("U8Vector")) { + // arg.U8Vector is a hex string + return new TransactionArgumentU8Vector(new HexString(arg.U8Vector).toUint8Array()); + } + + if (argHasOwnProperty("Bool")) { + return new TransactionArgumentBool(arg.Bool); + } + + throw new Error("Invalid Transaction Argument"); +} + +function sign(rawTxn: RawTransaction, privateKey: string): string { + const privateKeyBytes = new HexString(privateKey).toUint8Array(); + const signingKey = nacl.sign.keyPair.fromSeed(privateKeyBytes.slice(0, 32)); + const { publicKey } = signingKey; + + const txnBuilder = new TransactionBuilderEd25519( + (signingMessage) => new Ed25519Signature(nacl.sign(signingMessage, signingKey.secretKey).slice(0, 64)), + publicKey, + ); + + return bytesToHex(txnBuilder.sign(rawTxn)); +} + +type IRawTxn = { + // hex string for an AccountAddress + sender: string; + // u64 string literal + sequence_number: string; + // u64 string literal + max_gas_amount: string; + // u64 string literal + gas_unit_price: string; + // u64 string literal + expiration_timestamp_secs: string; + + chain_id: number; +}; + +function verify( + raw_txn: IRawTxn, + payload: TransactionPayloadEntryFunction | TransactionPayloadScript, + private_key: string, + expected_output: string, +) { + const rawTxn = new RawTransaction( + AccountAddress.fromHex(raw_txn.sender), + BigInt(raw_txn.sequence_number), + payload, + BigInt(raw_txn.max_gas_amount), + BigInt(raw_txn.gas_unit_price), + BigInt(raw_txn.expiration_timestamp_secs), + new ChainId(raw_txn.chain_id), + ); + + const signedTxn = sign(rawTxn, private_key); + + expect(signedTxn).toBe(expected_output); +} + +describe("Transaction builder vector test", () => { + it("should pass on entry function payload", () => { + const vector: any[] = JSON.parse(fs.readFileSync(ENTRY_FUNCTION_VECTOR, "utf8")); + vector.forEach(({ raw_txn, signed_txn_bcs, private_key }) => { + const payload = raw_txn.payload.EntryFunction; + const entryFunctionPayload = new TransactionPayloadEntryFunction( + EntryFunction.natural( + `${payload.module.address}::${payload.module.name}`, + payload.function, + payload.ty_args.map((tag: any) => parseTypeTag(tag)), + payload.args.map((arg: any) => new HexString(arg).toUint8Array()), + ), + ); + + verify(raw_txn, entryFunctionPayload, private_key, signed_txn_bcs); + }); + }); + + it("should pass on script payload", () => { + const vector: any[] = JSON.parse(fs.readFileSync(SCRIPT_VECTOR, "utf8")); + vector.forEach(({ raw_txn, signed_txn_bcs, private_key }) => { + const payload = raw_txn.payload.Script; + // payload.code is hex string + const code = new HexString(payload.code).toUint8Array(); + const scriptPayload = new TransactionPayloadScript( + new Script( + code, + payload.ty_args.map((tag: any) => parseTypeTag(tag)), + payload.args.map((arg: any) => parseTransactionArgument(arg)), + ), + ); + + verify(raw_txn, scriptPayload, private_key, signed_txn_bcs); + }); + }); +}); diff --git a/m1/JavaScript-client/src/tests/unit/type_tag.test.ts b/m1/JavaScript-client/src/tests/unit/type_tag.test.ts new file mode 100644 index 00000000..8b14a1dc --- /dev/null +++ b/m1/JavaScript-client/src/tests/unit/type_tag.test.ts @@ -0,0 +1,139 @@ +import { + StructTag, + TypeTagAddress, + TypeTagBool, + TypeTagParser, + TypeTagParserError, + TypeTagStruct, +} from "../../aptos_types/type_tag"; + +const expectedTypeTag = { + string: "0x0000000000000000000000000000000000000000000000000000000000000001::some_module::SomeResource", + address: "0x0000000000000000000000000000000000000000000000000000000000000001", + module_name: "some_module", + name: "SomeResource", +}; + +describe("StructTag", () => { + test("make sure StructTag.fromString works with un-nested type tag", () => { + const structTag = StructTag.fromString(expectedTypeTag.string); + expect(structTag.address.toHexString()).toEqual(expectedTypeTag.address); + expect(structTag.module_name.value).toEqual(expectedTypeTag.module_name); + expect(structTag.name.value).toEqual(expectedTypeTag.name); + expect(structTag.type_args.length).toEqual(0); + }); + + test("make sure StructTag.fromString works with nested type tag", () => { + const structTag = StructTag.fromString( + `${expectedTypeTag.string}<${expectedTypeTag.string}, ${expectedTypeTag.string}>`, + ); + expect(structTag.address.toHexString()).toEqual(expectedTypeTag.address); + expect(structTag.module_name.value).toEqual(expectedTypeTag.module_name); + expect(structTag.name.value).toEqual(expectedTypeTag.name); + expect(structTag.type_args.length).toEqual(2); + + // make sure the nested type tag is correct + for (const typeArg of structTag.type_args) { + const nestedTypeTag = typeArg as TypeTagStruct; + expect(nestedTypeTag.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(nestedTypeTag.value.module_name.value).toEqual(expectedTypeTag.module_name); + expect(nestedTypeTag.value.name.value).toEqual(expectedTypeTag.name); + expect(nestedTypeTag.value.type_args.length).toEqual(0); + } + }); +}); + +describe("TypeTagParser", () => { + test("make sure parseTypeTag throws TypeTagParserError 'Invalid type tag' if invalid format", () => { + let typeTag = "0x000"; + let parser = new TypeTagParser(typeTag); + + try { + parser.parseTypeTag(); + } catch (error) { + expect(error).toBeInstanceOf(TypeTagParserError); + const typeTagError = error as TypeTagParserError; + expect(typeTagError.message).toEqual("Invalid type tag."); + } + + typeTag = "0x1::some_module::SomeResource<0x1>"; + parser = new TypeTagParser(typeTag); + expect(() => parser.parseTypeTag()).toThrowError("Invalid type tag."); + }); + + test("make sure parseTypeTag works with un-nested type tag", () => { + const parser = new TypeTagParser(expectedTypeTag.string); + const result = parser.parseTypeTag() as TypeTagStruct; + expect(result.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(result.value.module_name.value).toEqual(expectedTypeTag.module_name); + expect(result.value.name.value).toEqual(expectedTypeTag.name); + expect(result.value.type_args.length).toEqual(0); + }); + + test("make sure parseTypeTag works with nested type tag", () => { + const typeTag = "0x1::some_module::SomeResource<0x1::some_module::SomeResource, 0x1::some_module::SomeResource>"; + const parser = new TypeTagParser(typeTag); + const result = parser.parseTypeTag() as TypeTagStruct; + expect(result.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(result.value.module_name.value).toEqual(expectedTypeTag.module_name); + expect(result.value.name.value).toEqual(expectedTypeTag.name); + expect(result.value.type_args.length).toEqual(2); + + // make sure the nested type tag is correct + for (const typeArg of result.value.type_args) { + const nestedTypeTag = typeArg as TypeTagStruct; + expect(nestedTypeTag.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(nestedTypeTag.value.module_name.value).toEqual(expectedTypeTag.module_name); + expect(nestedTypeTag.value.name.value).toEqual(expectedTypeTag.name); + expect(nestedTypeTag.value.type_args.length).toEqual(0); + } + }); + + describe("parse Object type", () => { + test("TypeTagParser successfully parses an Object type", () => { + const typeTag = "0x1::object::Object"; + const parser = new TypeTagParser(typeTag); + const result = parser.parseTypeTag(); + expect(result instanceof TypeTagAddress).toBeTruthy(); + }); + + test("TypeTagParser successfully parses a strcut with a nested Object type", () => { + const typeTag = "0x1::some_module::SomeResource<0x1::object::Object>"; + const parser = new TypeTagParser(typeTag); + const result = parser.parseTypeTag() as TypeTagStruct; + expect(result.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(result.value.module_name.value).toEqual("some_module"); + expect(result.value.name.value).toEqual("SomeResource"); + expect(result.value.type_args[0] instanceof TypeTagAddress).toBeTruthy(); + }); + + test("TypeTagParser successfully parses a strcut with a nested Object and Struct types", () => { + const typeTag = "0x1::some_module::SomeResource<0x4::object::Object, 0x1::some_module::SomeResource>"; + const parser = new TypeTagParser(typeTag); + const result = parser.parseTypeTag() as TypeTagStruct; + expect(result.value.address.toHexString()).toEqual(expectedTypeTag.address); + expect(result.value.module_name.value).toEqual("some_module"); + expect(result.value.name.value).toEqual("SomeResource"); + expect(result.value.type_args.length).toEqual(2); + expect(result.value.type_args[0] instanceof TypeTagAddress).toBeTruthy(); + expect(result.value.type_args[1] instanceof TypeTagStruct).toBeTruthy(); + }); + }); + + describe("supports generic types", () => { + test("throws an error when the type to use is not provided", () => { + const typeTag = "T0"; + const parser = new TypeTagParser(typeTag); + expect(() => { + parser.parseTypeTag(); + }).toThrow("Can't convert generic type since no typeTags were specified."); + }); + + test("successfully parses a generic type tag to the provided type", () => { + const typeTag = "T0"; + const parser = new TypeTagParser(typeTag, ["bool"]); + const result = parser.parseTypeTag(); + expect(result instanceof TypeTagBool).toBeTruthy(); + }); + }); +}); diff --git a/m1/JavaScript-client/src/transaction_builder/builder.ts b/m1/JavaScript-client/src/transaction_builder/builder.ts new file mode 100644 index 00000000..62f0821a --- /dev/null +++ b/m1/JavaScript-client/src/transaction_builder/builder.ts @@ -0,0 +1,437 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { sha3_256 as sha3Hash } from "@noble/hashes/sha3"; +import { + Ed25519PublicKey, + Ed25519Signature, + MultiEd25519PublicKey, + MultiEd25519Signature, + RawTransaction, + SignedTransaction, + TransactionAuthenticatorEd25519, + TransactionAuthenticatorMultiEd25519, + SigningMessage, + MultiAgentRawTransaction, + AccountAddress, + EntryFunction, + Identifier, + ChainId, + Script, + TransactionPayload, + TransactionArgument, + TransactionPayloadEntryFunction, + TransactionPayloadScript, + ModuleId, + TypeTagParser, +} from "../aptos_types"; +import { bcsToBytes, Bytes, Deserializer, Serializer, Uint64, Uint8 } from "../bcs"; +import { ArgumentABI, EntryFunctionABI, ScriptABI, TransactionScriptABI, TypeArgumentABI } from "../aptos_types/abi"; +import { argToTransactionArgument, serializeArg } from "./builder_utils"; +import * as Gen from "../generated/index"; +import { + DEFAULT_TXN_EXP_SEC_FROM_NOW, + DEFAULT_MAX_GAS_AMOUNT, + HexString, + MaybeHexString, + MemoizeExpiring, +} from "../utils"; + +export { TypeTagParser } from "../aptos_types"; + +const RAW_TRANSACTION_SALT = "APTOS::RawTransaction"; +const RAW_TRANSACTION_WITH_DATA_SALT = "APTOS::RawTransactionWithData"; + +type AnyRawTransaction = RawTransaction | MultiAgentRawTransaction; + +/** + * Function that takes in a Signing Message (serialized raw transaction) + * and returns a signature + */ +export type SigningFn = (txn: SigningMessage) => Ed25519Signature | MultiEd25519Signature; + +export class TransactionBuilder { + protected readonly signingFunction: F; + + constructor(signingFunction: F, public readonly rawTxnBuilder?: TransactionBuilderABI) { + this.signingFunction = signingFunction; + } + + /** + * Builds a RawTransaction. Relays the call to TransactionBuilderABI.build + * @param func + * @param ty_tags + * @param args + */ + build(func: string, ty_tags: string[], args: any[]): RawTransaction { + if (!this.rawTxnBuilder) { + throw new Error("this.rawTxnBuilder doesn't exist."); + } + + return this.rawTxnBuilder.build(func, ty_tags, args); + } + + /** Generates a Signing Message out of a raw transaction. */ + static getSigningMessage(rawTxn: AnyRawTransaction): SigningMessage { + const hash = sha3Hash.create(); + if (rawTxn instanceof RawTransaction) { + hash.update(RAW_TRANSACTION_SALT); + } else if (rawTxn instanceof MultiAgentRawTransaction) { + hash.update(RAW_TRANSACTION_WITH_DATA_SALT); + } else { + throw new Error("Unknown transaction type."); + } + + const prefix = hash.digest(); + + const body = bcsToBytes(rawTxn); + + const mergedArray = new Uint8Array(prefix.length + body.length); + mergedArray.set(prefix); + mergedArray.set(body, prefix.length); + + return mergedArray; + } +} + +/** + * Provides signing method for signing a raw transaction with single public key. + */ +export class TransactionBuilderEd25519 extends TransactionBuilder { + private readonly publicKey: Uint8Array; + + constructor(signingFunction: SigningFn, publicKey: Uint8Array, rawTxnBuilder?: TransactionBuilderABI) { + super(signingFunction, rawTxnBuilder); + this.publicKey = publicKey; + } + + rawToSigned(rawTxn: RawTransaction): SignedTransaction { + const signingMessage = TransactionBuilder.getSigningMessage(rawTxn); + const signature = this.signingFunction(signingMessage); + + const authenticator = new TransactionAuthenticatorEd25519( + new Ed25519PublicKey(this.publicKey), + signature as Ed25519Signature, + ); + + return new SignedTransaction(rawTxn, authenticator); + } + + /** Signs a raw transaction and returns a bcs serialized transaction. */ + sign(rawTxn: RawTransaction): Bytes { + return bcsToBytes(this.rawToSigned(rawTxn)); + } +} + +/** + * Provides signing method for signing a raw transaction with multisig public key. + */ +export class TransactionBuilderMultiEd25519 extends TransactionBuilder { + private readonly publicKey: MultiEd25519PublicKey; + + constructor(signingFunction: SigningFn, publicKey: MultiEd25519PublicKey) { + super(signingFunction); + this.publicKey = publicKey; + } + + rawToSigned(rawTxn: RawTransaction): SignedTransaction { + const signingMessage = TransactionBuilder.getSigningMessage(rawTxn); + const signature = this.signingFunction(signingMessage); + + const authenticator = new TransactionAuthenticatorMultiEd25519(this.publicKey, signature as MultiEd25519Signature); + + return new SignedTransaction(rawTxn, authenticator); + } + + /** Signs a raw transaction and returns a bcs serialized transaction. */ + sign(rawTxn: RawTransaction): Bytes { + return bcsToBytes(this.rawToSigned(rawTxn)); + } +} + +/** + * Config for creating raw transactions. + */ +interface ABIBuilderConfig { + sender: MaybeHexString | AccountAddress; + sequenceNumber: Uint64 | string; + gasUnitPrice: Uint64 | string; + maxGasAmount?: Uint64 | string; + expSecFromNow?: number | string; + chainId: Uint8 | string; +} + +/** + * Builds raw transactions based on ABI + */ +export class TransactionBuilderABI { + private readonly abiMap: Map; + + private readonly builderConfig: Partial; + + /** + * Constructs a TransactionBuilderABI instance + * @param abis List of binary ABIs. + * @param builderConfig Configs for creating a raw transaction. + */ + constructor(abis: Bytes[], builderConfig?: ABIBuilderConfig) { + this.abiMap = new Map(); + + abis.forEach((abi) => { + const deserializer = new Deserializer(abi); + const scriptABI = ScriptABI.deserialize(deserializer); + let k: string; + if (scriptABI instanceof EntryFunctionABI) { + const funcABI = scriptABI as EntryFunctionABI; + const { address: addr, name: moduleName } = funcABI.module_name; + k = `${HexString.fromUint8Array(addr.address).toShortString()}::${moduleName.value}::${funcABI.name}`; + } else { + const funcABI = scriptABI as TransactionScriptABI; + k = funcABI.name; + } + + if (this.abiMap.has(k)) { + throw new Error("Found conflicting ABI interfaces"); + } + + this.abiMap.set(k, scriptABI); + }); + + this.builderConfig = { + maxGasAmount: BigInt(DEFAULT_MAX_GAS_AMOUNT), + expSecFromNow: DEFAULT_TXN_EXP_SEC_FROM_NOW, + ...builderConfig, + }; + } + + private static toBCSArgs(abiArgs: any[], args: any[]): Bytes[] { + if (abiArgs.length !== args.length) { + throw new Error("Wrong number of args provided."); + } + + return args.map((arg, i) => { + const serializer = new Serializer(); + serializeArg(arg, abiArgs[i].type_tag, serializer); + return serializer.getBytes(); + }); + } + + private static toTransactionArguments(abiArgs: any[], args: any[]): TransactionArgument[] { + if (abiArgs.length !== args.length) { + throw new Error("Wrong number of args provided."); + } + + return args.map((arg, i) => argToTransactionArgument(arg, abiArgs[i].type_tag)); + } + + setSequenceNumber(seqNumber: Uint64 | string) { + this.builderConfig.sequenceNumber = BigInt(seqNumber); + } + + /** + * Builds a TransactionPayload. For dApps, chain ID and account sequence numbers are only known to the wallet. + * Instead of building a RawTransaction (requires chainID and sequenceNumber), dApps can build a TransactionPayload + * and pass the payload to the wallet for signing and sending. + * @param func Fully qualified func names, e.g. 0x1::Coin::transfer + * @param ty_tags TypeTag strings + * @param args Function arguments + * @returns TransactionPayload + */ + buildTransactionPayload(func: string, ty_tags: string[], args: any[]): TransactionPayload { + const typeTags = ty_tags.map((ty_arg) => new TypeTagParser(ty_arg).parseTypeTag()); + + let payload: TransactionPayload; + + if (!this.abiMap.has(func)) { + throw new Error(`Cannot find function: ${func}`); + } + + const scriptABI = this.abiMap.get(func); + + if (scriptABI instanceof EntryFunctionABI) { + const funcABI = scriptABI as EntryFunctionABI; + const bcsArgs = TransactionBuilderABI.toBCSArgs(funcABI.args, args); + payload = new TransactionPayloadEntryFunction( + new EntryFunction(funcABI.module_name, new Identifier(funcABI.name), typeTags, bcsArgs), + ); + } else if (scriptABI instanceof TransactionScriptABI) { + const funcABI = scriptABI as TransactionScriptABI; + const scriptArgs = TransactionBuilderABI.toTransactionArguments(funcABI.args, args); + + payload = new TransactionPayloadScript(new Script(funcABI.code, typeTags, scriptArgs)); + } else { + /* istanbul ignore next */ + throw new Error("Unknown ABI format."); + } + + return payload; + } + + /** + * Builds a RawTransaction + * @param func Fully qualified func names, e.g. 0x1::Coin::transfer + * @param ty_tags TypeTag strings. + * @example Below are valid value examples + * ``` + * // Structs are in format `AccountAddress::ModuleName::StructName` + * 0x1::aptos_coin::AptosCoin + * // Vectors are in format `vector` + * vector<0x1::aptos_coin::AptosCoin> + * bool + * u8 + * u16 + * u32 + * u64 + * u128 + * u256 + * address + * ``` + * @param args Function arguments + * @returns RawTransaction + */ + build(func: string, ty_tags: string[], args: any[]): RawTransaction { + const { sender, sequenceNumber, gasUnitPrice, maxGasAmount, expSecFromNow, chainId } = this.builderConfig; + + if (!gasUnitPrice) { + throw new Error("No gasUnitPrice provided."); + } + + const senderAccount = sender instanceof AccountAddress ? sender : AccountAddress.fromHex(sender!); + const expTimestampSec = BigInt(Math.floor(Date.now() / 1000) + Number(expSecFromNow)); + const payload = this.buildTransactionPayload(func, ty_tags, args); + + if (payload) { + return new RawTransaction( + senderAccount, + BigInt(sequenceNumber!), + payload, + BigInt(maxGasAmount!), + BigInt(gasUnitPrice!), + expTimestampSec, + new ChainId(Number(chainId)), + ); + } + + throw new Error("Invalid ABI."); + } +} + +export type RemoteABIBuilderConfig = Partial> & { + sender: MaybeHexString | AccountAddress; +}; + +interface AptosClientInterface { + getAccountModules: (accountAddress: MaybeHexString) => Promise; + getAccount: (accountAddress: MaybeHexString) => Promise; + getChainId: () => Promise; + estimateGasPrice: () => Promise; +} + +/** + * This transaction builder downloads JSON ABIs from the fullnodes. + * It then translates the JSON ABIs to the format that is accepted by TransactionBuilderABI + */ +export class TransactionBuilderRemoteABI { + // We don't want the builder to depend on the actual AptosClient. There might be circular dependencies. + constructor( + private readonly aptosClient: AptosClientInterface, + private readonly builderConfig: RemoteABIBuilderConfig, + ) {} + + // Cache for 10 minutes + @MemoizeExpiring(10 * 60 * 1000) + async fetchABI(addr: string) { + const modules = await this.aptosClient.getAccountModules(addr); + const abis = modules + .map((module) => module.abi) + .flatMap((abi) => + abi!.exposed_functions + .filter((ef) => ef.is_entry) + .map( + (ef) => + ({ + fullName: `${abi!.address}::${abi!.name}::${ef.name}`, + ...ef, + } as Gen.MoveFunction & { fullName: string }), + ), + ); + + const abiMap = new Map(); + abis.forEach((abi) => { + abiMap.set(abi.fullName, abi); + }); + + return abiMap; + } + + /** + * Builds a raw transaction. Only support script function a.k.a entry function payloads + * + * @param func fully qualified function name in format
::::, e.g. 0x1::coins::transfer + * @param ty_tags + * @param args + * @returns RawTransaction + */ + async build(func: Gen.EntryFunctionId, ty_tags: Gen.MoveType[], args: any[]): Promise { + /* eslint no-param-reassign: ["off"] */ + const normlize = (s: string) => s.replace(/^0[xX]0*/g, "0x"); + func = normlize(func); + const funcNameParts = func.split("::"); + if (funcNameParts.length !== 3) { + throw new Error( + // eslint-disable-next-line max-len + "'func' needs to be a fully qualified function name in format
::::, e.g. 0x1::coins::transfer", + ); + } + + const [addr, module] = func.split("::"); + + // Downloads the JSON abi + const abiMap = await this.fetchABI(addr); + if (!abiMap.has(func)) { + throw new Error(`${func} doesn't exist.`); + } + + const funcAbi = abiMap.get(func); + + // Remove all `signer` and `&signer` from argument list because the Move VM injects those arguments. Clients do not + // need to care about those args. `signer` and `&signer` are required be in the front of the argument list. But we + // just loop through all arguments and filter out `signer` and `&signer`. + const abiArgs = funcAbi!.params.filter((param) => param !== "signer" && param !== "&signer"); + + // Convert abi string arguments to TypeArgumentABI + const typeArgABIs = abiArgs.map( + (abiArg, i) => new ArgumentABI(`var${i}`, new TypeTagParser(abiArg, ty_tags).parseTypeTag()), + ); + + const entryFunctionABI = new EntryFunctionABI( + funcAbi!.name, + ModuleId.fromStr(`${addr}::${module}`), + "", // Doc string + funcAbi!.generic_type_params.map((_, i) => new TypeArgumentABI(`${i}`)), + typeArgABIs, + ); + + const { sender, ...rest } = this.builderConfig; + + const senderAddress = sender instanceof AccountAddress ? HexString.fromUint8Array(sender.address) : sender; + + const [{ sequence_number: sequenceNumber }, chainId, { gas_estimate: gasUnitPrice }] = await Promise.all([ + rest?.sequenceNumber + ? Promise.resolve({ sequence_number: rest?.sequenceNumber }) + : this.aptosClient.getAccount(senderAddress), + rest?.chainId ? Promise.resolve(rest?.chainId) : this.aptosClient.getChainId(), + rest?.gasUnitPrice ? Promise.resolve({ gas_estimate: rest?.gasUnitPrice }) : this.aptosClient.estimateGasPrice(), + ]); + + const builderABI = new TransactionBuilderABI([bcsToBytes(entryFunctionABI)], { + sender, + sequenceNumber, + chainId, + gasUnitPrice: BigInt(gasUnitPrice), + ...rest, + }); + + return builderABI.build(func, ty_tags, args); + } +} diff --git a/m1/JavaScript-client/src/transaction_builder/builder_utils.ts b/m1/JavaScript-client/src/transaction_builder/builder_utils.ts new file mode 100644 index 00000000..c8d6c278 --- /dev/null +++ b/m1/JavaScript-client/src/transaction_builder/builder_utils.ts @@ -0,0 +1,196 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { HexString } from "../utils"; +import { + TypeTag, + TypeTagBool, + TypeTagU8, + TypeTagU16, + TypeTagU32, + TypeTagU64, + TypeTagU128, + TypeTagU256, + TypeTagAddress, + AccountAddress, + TypeTagVector, + TypeTagStruct, + TransactionArgument, + TransactionArgumentBool, + TransactionArgumentU16, + TransactionArgumentU32, + TransactionArgumentU64, + TransactionArgumentU128, + TransactionArgumentU256, + TransactionArgumentAddress, + TransactionArgumentU8, + TransactionArgumentU8Vector, +} from "../aptos_types"; +import { Serializer } from "../bcs"; + +function assertType(val: any, types: string[] | string, message?: string) { + if (!types?.includes(typeof val)) { + throw new Error( + message || `Invalid arg: ${val} type should be ${types instanceof Array ? types.join(" or ") : types}`, + ); + } +} + +export function ensureBoolean(val: boolean | string): boolean { + assertType(val, ["boolean", "string"]); + if (typeof val === "boolean") { + return val; + } + + if (val === "true") { + return true; + } + if (val === "false") { + return false; + } + + throw new Error("Invalid boolean string."); +} + +export function ensureNumber(val: number | string): number { + assertType(val, ["number", "string"]); + if (typeof val === "number") { + return val; + } + + const res = Number.parseInt(val, 10); + if (Number.isNaN(res)) { + throw new Error("Invalid number string."); + } + + return res; +} + +export function ensureBigInt(val: number | bigint | string): bigint { + assertType(val, ["number", "bigint", "string"]); + return BigInt(val); +} + +export function serializeArg(argVal: any, argType: TypeTag, serializer: Serializer) { + if (argType instanceof TypeTagBool) { + serializer.serializeBool(ensureBoolean(argVal)); + return; + } + if (argType instanceof TypeTagU8) { + serializer.serializeU8(ensureNumber(argVal)); + return; + } + if (argType instanceof TypeTagU16) { + serializer.serializeU16(ensureNumber(argVal)); + return; + } + if (argType instanceof TypeTagU32) { + serializer.serializeU32(ensureNumber(argVal)); + return; + } + if (argType instanceof TypeTagU64) { + serializer.serializeU64(ensureBigInt(argVal)); + return; + } + if (argType instanceof TypeTagU128) { + serializer.serializeU128(ensureBigInt(argVal)); + return; + } + if (argType instanceof TypeTagU256) { + serializer.serializeU256(ensureBigInt(argVal)); + return; + } + if (argType instanceof TypeTagAddress) { + let addr: AccountAddress; + if (typeof argVal === "string" || argVal instanceof HexString) { + addr = AccountAddress.fromHex(argVal); + } else if (argVal instanceof AccountAddress) { + addr = argVal; + } else { + throw new Error("Invalid account address."); + } + addr.serialize(serializer); + return; + } + if (argType instanceof TypeTagVector) { + // We are serializing a vector + if (argType.value instanceof TypeTagU8) { + if (argVal instanceof Uint8Array) { + serializer.serializeBytes(argVal); + return; + } + + if (typeof argVal === "string") { + serializer.serializeStr(argVal); + return; + } + } + + if (!Array.isArray(argVal)) { + throw new Error("Invalid vector args."); + } + + serializer.serializeU32AsUleb128(argVal.length); + + argVal.forEach((arg) => serializeArg(arg, argType.value, serializer)); + return; + } + + if (argType instanceof TypeTagStruct) { + const { address, module_name: moduleName, name } = (argType as TypeTagStruct).value; + if ( + `${HexString.fromUint8Array(address.address).toShortString()}::${moduleName.value}::${name.value}` !== + "0x1::string::String" + ) { + throw new Error("The only supported struct arg is of type 0x1::string::String"); + } + assertType(argVal, ["string"]); + + serializer.serializeStr(argVal); + return; + } + throw new Error("Unsupported arg type."); +} + +export function argToTransactionArgument(argVal: any, argType: TypeTag): TransactionArgument { + if (argType instanceof TypeTagBool) { + return new TransactionArgumentBool(ensureBoolean(argVal)); + } + if (argType instanceof TypeTagU8) { + return new TransactionArgumentU8(ensureNumber(argVal)); + } + if (argType instanceof TypeTagU16) { + return new TransactionArgumentU16(ensureNumber(argVal)); + } + if (argType instanceof TypeTagU32) { + return new TransactionArgumentU32(ensureNumber(argVal)); + } + if (argType instanceof TypeTagU64) { + return new TransactionArgumentU64(ensureBigInt(argVal)); + } + if (argType instanceof TypeTagU128) { + return new TransactionArgumentU128(ensureBigInt(argVal)); + } + if (argType instanceof TypeTagU256) { + return new TransactionArgumentU256(ensureBigInt(argVal)); + } + if (argType instanceof TypeTagAddress) { + let addr: AccountAddress; + if (typeof argVal === "string" || argVal instanceof HexString) { + addr = AccountAddress.fromHex(argVal); + } else if (argVal instanceof AccountAddress) { + addr = argVal; + } else { + throw new Error("Invalid account address."); + } + return new TransactionArgumentAddress(addr); + } + if (argType instanceof TypeTagVector && argType.value instanceof TypeTagU8) { + if (!(argVal instanceof Uint8Array)) { + throw new Error(`${argVal} should be an instance of Uint8Array`); + } + return new TransactionArgumentU8Vector(argVal); + } + + throw new Error("Unknown type for TransactionArgument."); +} diff --git a/m1/JavaScript-client/src/transaction_builder/index.ts b/m1/JavaScript-client/src/transaction_builder/index.ts new file mode 100644 index 00000000..eea089f9 --- /dev/null +++ b/m1/JavaScript-client/src/transaction_builder/index.ts @@ -0,0 +1,5 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export * from "./builder"; +export * as TxnBuilderTypes from "../aptos_types"; diff --git a/m1/JavaScript-client/src/utils/api-endpoints.ts b/m1/JavaScript-client/src/utils/api-endpoints.ts new file mode 100644 index 00000000..7645b827 --- /dev/null +++ b/m1/JavaScript-client/src/utils/api-endpoints.ts @@ -0,0 +1,22 @@ +export const NetworkToIndexerAPI: Record = { + mainnet: "https://indexer.mainnet.aptoslabs.com/v1/graphql", + testnet: "https://indexer-testnet.staging.gcp.aptosdev.com/v1/graphql", + devnet: "https://indexer-devnet.staging.gcp.aptosdev.com/v1/graphql", +}; + +export const NetworkToNodeAPI: Record = { + mainnet: "https://fullnode.mainnet.aptoslabs.com/v1", + testnet: "https://fullnode.testnet.aptoslabs.com/v1", + devnet: "https://fullnode.devnet.aptoslabs.com/v1", +}; + +export enum Network { + MAINNET = "mainnet", + TESTNET = "testnet", + DEVNET = "devnet", +} + +export interface CustomEndpoints { + fullnodeUrl: string; + indexerUrl: string; +} diff --git a/m1/JavaScript-client/src/utils/hd-key.ts b/m1/JavaScript-client/src/utils/hd-key.ts new file mode 100644 index 00000000..32ffd932 --- /dev/null +++ b/m1/JavaScript-client/src/utils/hd-key.ts @@ -0,0 +1,79 @@ +import nacl from "tweetnacl"; +import { hmac } from "@noble/hashes/hmac"; +import { sha512 } from "@noble/hashes/sha512"; +import { hexToBytes } from "@noble/hashes/utils"; + +type Hex = string; +type Path = string; + +type Keys = { + key: Uint8Array; + chainCode: Uint8Array; +}; + +const pathRegex = /^m(\/[0-9]+')+$/; + +const replaceDerive = (val: string): string => val.replace("'", ""); + +const HMAC_KEY = "ed25519 seed"; +const HARDENED_OFFSET = 0x80000000; + +export const getMasterKeyFromSeed = (seed: Hex): Keys => { + const h = hmac.create(sha512, HMAC_KEY); + const I = h.update(hexToBytes(seed)).digest(); + const IL = I.slice(0, 32); + const IR = I.slice(32); + return { + key: IL, + chainCode: IR, + }; +}; + +export const CKDPriv = ({ key, chainCode }: Keys, index: number): Keys => { + const buffer = new ArrayBuffer(4); + new DataView(buffer).setUint32(0, index); + const indexBytes = new Uint8Array(buffer); + const zero = new Uint8Array([0]); + const data = new Uint8Array([...zero, ...key, ...indexBytes]); + + const I = hmac.create(sha512, chainCode).update(data).digest(); + const IL = I.slice(0, 32); + const IR = I.slice(32); + return { + key: IL, + chainCode: IR, + }; +}; + +export const getPublicKey = (privateKey: Uint8Array, withZeroByte = true): Uint8Array => { + const keyPair = nacl.sign.keyPair.fromSeed(privateKey); + const signPk = keyPair.secretKey.subarray(32); + const zero = new Uint8Array([0]); + return withZeroByte ? new Uint8Array([...zero, ...signPk]) : signPk; +}; + +export const isValidPath = (path: string): boolean => { + if (!pathRegex.test(path)) { + return false; + } + return !path + .split("/") + .slice(1) + .map(replaceDerive) + .some(Number.isNaN as any); +}; + +export const derivePath = (path: Path, seed: Hex, offset = HARDENED_OFFSET): Keys => { + if (!isValidPath(path)) { + throw new Error("Invalid derivation path"); + } + + const { key, chainCode } = getMasterKeyFromSeed(seed); + const segments = path + .split("/") + .slice(1) + .map(replaceDerive) + .map((el) => parseInt(el, 10)); + + return segments.reduce((parentKeys, segment) => CKDPriv(parentKeys, segment + offset), { key, chainCode }); +}; diff --git a/m1/JavaScript-client/src/utils/hex_string.ts b/m1/JavaScript-client/src/utils/hex_string.ts new file mode 100644 index 00000000..2fa228b6 --- /dev/null +++ b/m1/JavaScript-client/src/utils/hex_string.ts @@ -0,0 +1,122 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +import { bytesToHex, hexToBytes } from "@noble/hashes/utils"; +import { HexEncodedBytes } from "../generated"; + +// eslint-disable-next-line no-use-before-define +export type MaybeHexString = HexString | string | HexEncodedBytes; + +/** + * A util class for working with hex strings. + * Hex strings are strings that are prefixed with `0x` + */ +export class HexString { + /// We want to make sure this hexString has the `0x` hex prefix + private readonly hexString: string; + + /** + * Creates new hex string from Buffer + * @param buffer A buffer to convert + * @returns New HexString + */ + static fromBuffer(buffer: Uint8Array): HexString { + return HexString.fromUint8Array(buffer); + } + + /** + * Creates new hex string from Uint8Array + * @param arr Uint8Array to convert + * @returns New HexString + */ + static fromUint8Array(arr: Uint8Array): HexString { + return new HexString(bytesToHex(arr)); + } + + /** + * Ensures `hexString` is instance of `HexString` class + * @param hexString String to check + * @returns New HexString if `hexString` is regular string or `hexString` if it is HexString instance + * @example + * ``` + * const regularString = "string"; + * const hexString = new HexString("string"); // "0xstring" + * HexString.ensure(regularString); // "0xstring" + * HexString.ensure(hexString); // "0xstring" + * ``` + */ + static ensure(hexString: MaybeHexString): HexString { + if (typeof hexString === "string") { + return new HexString(hexString); + } + return hexString; + } + + /** + * Creates new HexString instance from regular string. If specified string already starts with "0x" prefix, + * it will not add another one + * @param hexString String to convert + * @example + * ``` + * const string = "string"; + * new HexString(string); // "0xstring" + * ``` + */ + constructor(hexString: string | HexEncodedBytes) { + if (hexString.startsWith("0x")) { + this.hexString = hexString; + } else { + this.hexString = `0x${hexString}`; + } + } + + /** + * Getter for inner hexString + * @returns Inner hex string + */ + hex(): string { + return this.hexString; + } + + /** + * Getter for inner hexString without prefix + * @returns Inner hex string without prefix + * @example + * ``` + * const hexString = new HexString("string"); // "0xstring" + * hexString.noPrefix(); // "string" + * ``` + */ + noPrefix(): string { + return this.hexString.slice(2); + } + + /** + * Overrides default `toString` method + * @returns Inner hex string + */ + toString(): string { + return this.hex(); + } + + /** + * Trimmes extra zeroes in the begining of a string + * @returns Inner hexString without leading zeroes + * @example + * ``` + * new HexString("0x000000string").toShortString(); // result = "0xstring" + * ``` + */ + toShortString(): string { + const trimmed = this.hexString.replace(/^0x0*/, ""); + return `0x${trimmed}`; + } + + /** + * Converts hex string to a Uint8Array + * @returns Uint8Array from inner hexString without prefix + */ + toUint8Array(): Uint8Array { + return Uint8Array.from(hexToBytes(this.noPrefix())); + } +} diff --git a/m1/JavaScript-client/src/utils/index.ts b/m1/JavaScript-client/src/utils/index.ts new file mode 100644 index 00000000..dfeaacc6 --- /dev/null +++ b/m1/JavaScript-client/src/utils/index.ts @@ -0,0 +1,5 @@ +export * from "./misc"; +export * from "./memoize-decorator"; +export * from "./pagination_helpers"; +export * from "./api-endpoints"; +export * from "./hex_string"; diff --git a/m1/JavaScript-client/src/utils/memoize-decorator.ts b/m1/JavaScript-client/src/utils/memoize-decorator.ts new file mode 100644 index 00000000..291c8781 --- /dev/null +++ b/m1/JavaScript-client/src/utils/memoize-decorator.ts @@ -0,0 +1,151 @@ +/** + * Credits to https://github.com/darrylhodgins/typescript-memoize + */ + +/* eslint-disable no-param-reassign */ +/* eslint-disable no-restricted-syntax */ + +interface MemoizeArgs { + // ttl in milliseconds for cached items. After `ttlMs`, cached items are evicted automatically. If no `ttlMs` + // is provided, cached items won't get auto-evicted. + ttlMs?: number; + // produces the cache key based on `args`. + hashFunction?: boolean | ((...args: any[]) => any); + // cached items can be taged with `tags`. `tags` can be used to evict cached items + tags?: string[]; +} + +export function Memoize(args?: MemoizeArgs | MemoizeArgs["hashFunction"]) { + let hashFunction: MemoizeArgs["hashFunction"]; + let ttlMs: MemoizeArgs["ttlMs"]; + let tags: MemoizeArgs["tags"]; + + if (typeof args === "object") { + hashFunction = args.hashFunction; + ttlMs = args.ttlMs; + tags = args.tags; + } else { + hashFunction = args; + } + + return (target: Object, propertyKey: string, descriptor: TypedPropertyDescriptor) => { + if (descriptor.value != null) { + descriptor.value = getNewFunction(descriptor.value, hashFunction, ttlMs, tags); + } else if (descriptor.get != null) { + descriptor.get = getNewFunction(descriptor.get, hashFunction, ttlMs, tags); + } else { + throw new Error("Only put a Memoize() decorator on a method or get accessor."); + } + }; +} + +export function MemoizeExpiring(ttlMs: number, hashFunction?: MemoizeArgs["hashFunction"]) { + return Memoize({ + ttlMs, + hashFunction, + }); +} + +const clearCacheTagsMap: Map[]> = new Map(); + +export function clear(tags: string[]): number { + const cleared: Set> = new Set(); + for (const tag of tags) { + const maps = clearCacheTagsMap.get(tag); + if (maps) { + for (const mp of maps) { + if (!cleared.has(mp)) { + mp.clear(); + cleared.add(mp); + } + } + } + } + return cleared.size; +} + +function getNewFunction( + originalMethod: () => void, + hashFunction?: MemoizeArgs["hashFunction"], + ttlMs: number = 0, + tags?: MemoizeArgs["tags"], +) { + const propMapName = Symbol("__memoized_map__"); + + // The function returned here gets called instead of originalMethod. + // eslint-disable-next-line func-names + return function (...args: any[]) { + let returnedValue: any; + + // @ts-ignore + const that: any = this; + + // Get or create map + // eslint-disable-next-line no-prototype-builtins + if (!that.hasOwnProperty(propMapName)) { + Object.defineProperty(that, propMapName, { + configurable: false, + enumerable: false, + writable: false, + value: new Map(), + }); + } + const myMap: Map = that[propMapName]; + + if (Array.isArray(tags)) { + for (const tag of tags) { + if (clearCacheTagsMap.has(tag)) { + clearCacheTagsMap.get(tag)!.push(myMap); + } else { + clearCacheTagsMap.set(tag, [myMap]); + } + } + } + + if (hashFunction || args.length > 0 || ttlMs > 0) { + let hashKey: any; + + // If true is passed as first parameter, will automatically use every argument, passed to string + if (hashFunction === true) { + hashKey = args.map((a) => a.toString()).join("!"); + } else if (hashFunction) { + hashKey = hashFunction.apply(that, args); + } else { + // eslint-disable-next-line prefer-destructuring + hashKey = args[0]; + } + + const timestampKey = `${hashKey}__timestamp`; + let isExpired: boolean = false; + if (ttlMs > 0) { + if (!myMap.has(timestampKey)) { + // "Expired" since it was never called before + isExpired = true; + } else { + const timestamp = myMap.get(timestampKey); + isExpired = Date.now() - timestamp > ttlMs; + } + } + + if (myMap.has(hashKey) && !isExpired) { + returnedValue = myMap.get(hashKey); + } else { + returnedValue = originalMethod.apply(that, args as any); + myMap.set(hashKey, returnedValue); + if (ttlMs > 0) { + myMap.set(timestampKey, Date.now()); + } + } + } else { + const hashKey = that; + if (myMap.has(hashKey)) { + returnedValue = myMap.get(hashKey); + } else { + returnedValue = originalMethod.apply(that, args as any); + myMap.set(hashKey, returnedValue); + } + } + + return returnedValue; + }; +} diff --git a/m1/JavaScript-client/src/utils/misc.ts b/m1/JavaScript-client/src/utils/misc.ts new file mode 100644 index 00000000..fca18b0f --- /dev/null +++ b/m1/JavaScript-client/src/utils/misc.ts @@ -0,0 +1,32 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +export type Nullable = { [P in keyof T]: T[P] | null }; + +export type AnyObject = { [key: string]: any }; + +export async function sleep(timeMs: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, timeMs); + }); +} + +export const DEFAULT_VERSION_PATH_BASE = "/v1"; + +export function fixNodeUrl(nodeUrl: string): string { + let out = `${nodeUrl}`; + if (out.endsWith("/")) { + out = out.substring(0, out.length - 1); + } + if (!out.endsWith(DEFAULT_VERSION_PATH_BASE)) { + out = `${out}${DEFAULT_VERSION_PATH_BASE}`; + } + return out; +} + +export const DEFAULT_MAX_GAS_AMOUNT = 200000; +// Transaction expire timestamp +export const DEFAULT_TXN_EXP_SEC_FROM_NOW = 20; +// How long does SDK wait for txhn to finish +export const DEFAULT_TXN_TIMEOUT_SEC = 20; +export const APTOS_COIN = "0x1::aptos_coin::AptosCoin"; diff --git a/m1/JavaScript-client/src/utils/pagination_helpers.ts b/m1/JavaScript-client/src/utils/pagination_helpers.ts new file mode 100644 index 00000000..3d903715 --- /dev/null +++ b/m1/JavaScript-client/src/utils/pagination_helpers.ts @@ -0,0 +1,40 @@ +import { AnyNumber } from "../bcs"; +import { HexString, MaybeHexString } from "./hex_string"; + +/// This function is a helper for paginating using a function wrapping an API +export async function paginateWithCursor( + apiFunction: ( + address: string, + ledgerVersion?: string | undefined, + start?: string | undefined, + limit?: number | undefined, + ) => Promise, + accountAddress: MaybeHexString, + limitPerRequest: number, + query?: { ledgerVersion?: AnyNumber }, +): Promise { + const out = []; + let cursor: string | undefined; + // eslint-disable-next-line no-constant-condition + while (true) { + // eslint-disable-next-line no-await-in-loop + const response = await apiFunction( + HexString.ensure(accountAddress).hex(), + query?.ledgerVersion?.toString(), + cursor, + limitPerRequest, + ); + // Response is the main response, i.e. the T[]. Attached to that are the headers as `__headers`. + // eslint-disable-next-line no-underscore-dangle + cursor = (response as any).__headers["x-aptos-cursor"]; + // Now that we have the cursor (if any), we remove the headers before + // adding these to the output of this function. + // eslint-disable-next-line no-underscore-dangle + delete (response as any).__headers; + out.push(...response); + if (cursor === null || cursor === undefined) { + break; + } + } + return out; +} diff --git a/m1/JavaScript-client/src/utils/property_map_serde.ts b/m1/JavaScript-client/src/utils/property_map_serde.ts new file mode 100644 index 00000000..b3b0d3d7 --- /dev/null +++ b/m1/JavaScript-client/src/utils/property_map_serde.ts @@ -0,0 +1,119 @@ +import { Bytes, Deserializer, Serializer } from "../bcs"; +import { serializeArg } from "../transaction_builder/builder_utils"; +import { + stringStructTag, + TypeTag, + TypeTagAddress, + TypeTagBool, + TypeTagParser, + TypeTagStruct, + TypeTagU128, + TypeTagU64, + TypeTagU8, +} from "../aptos_types"; +import { HexString } from "./hex_string"; + +export class PropertyValue { + type: string; + + value: any; + + constructor(type: string, value: string) { + this.type = type; + this.value = value; + } +} + +export class PropertyMap { + data: { [key: string]: PropertyValue }; + + constructor() { + this.data = {}; + } + + setProperty(key: string, value: PropertyValue) { + this.data[key] = value; + } +} + +export function getPropertyType(typ: string): TypeTag { + let typeTag: TypeTag; + if (typ === "string" || typ === "String") { + typeTag = new TypeTagStruct(stringStructTag); + } else { + typeTag = new TypeTagParser(typ).parseTypeTag(); + } + return typeTag; +} + +export function getPropertyValueRaw(values: Array, types: Array): Array { + if (values.length !== types.length) { + throw new Error("Length of property values and types not match"); + } + + const results = new Array(); + types.forEach((typ, index) => { + try { + const typeTag = getPropertyType(typ); + const serializer = new Serializer(); + serializeArg(values[index], typeTag, serializer); + results.push(serializer.getBytes()); + } catch (error) { + // if not support type, just use the raw string bytes + results.push(new TextEncoder().encode(values[index])); + } + }); + return results; +} + +export function getSinglePropertyValueRaw(value: string, type: string): Uint8Array { + if (!value || !type) { + throw new Error("value or type can not be empty"); + } + + try { + const typeTag = getPropertyType(type); + const serializer = new Serializer(); + serializeArg(value, typeTag, serializer); + return serializer.getBytes(); + } catch (error) { + // if not support type, just use the raw string bytes + return new TextEncoder().encode(value); + } +} + +export function deserializePropertyMap(rawPropertyMap: any): PropertyMap { + const entries = rawPropertyMap.map.data; + const pm = new PropertyMap(); + entries.forEach((prop: any) => { + const { key } = prop; + const val: string = prop.value.value; + const typ: string = prop.value.type; + const typeTag = getPropertyType(typ); + const newValue = deserializeValueBasedOnTypeTag(typeTag, val); + const pv = new PropertyValue(typ, newValue); + pm.setProperty(key, pv); + }); + return pm; +} + +export function deserializeValueBasedOnTypeTag(tag: TypeTag, val: string): string { + const de = new Deserializer(new HexString(val).toUint8Array()); + let res: string = ""; + if (tag instanceof TypeTagU8) { + res = de.deserializeU8().toString(); + } else if (tag instanceof TypeTagU64) { + res = de.deserializeU64().toString(); + } else if (tag instanceof TypeTagU128) { + res = de.deserializeU128().toString(); + } else if (tag instanceof TypeTagBool) { + res = de.deserializeBool() ? "true" : "false"; + } else if (tag instanceof TypeTagAddress) { + res = HexString.fromUint8Array(de.deserializeFixedBytes(32)).hex(); + } else if (tag instanceof TypeTagStruct && (tag as TypeTagStruct).isStringTypeTag()) { + res = de.deserializeStr(); + } else { + res = val; + } + return res; +} diff --git a/m1/JavaScript-client/src/version.ts b/m1/JavaScript-client/src/version.ts new file mode 100644 index 00000000..c58a1f0a --- /dev/null +++ b/m1/JavaScript-client/src/version.ts @@ -0,0 +1,2 @@ +// hardcoded for now, we would want to have it injected dynamically +export const VERSION = "1.8.5"; diff --git a/m1/JavaScript-client/tsconfig.json b/m1/JavaScript-client/tsconfig.json new file mode 100644 index 00000000..f7996b6d --- /dev/null +++ b/m1/JavaScript-client/tsconfig.json @@ -0,0 +1,19 @@ +{ + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "allowJs": true, + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "module": "esnext", + "moduleResolution": "node", + "noImplicitAny": true, + "outDir": "./dist", + "sourceMap": true, + "strict": true, + "target": "es2020", + "pretty": true + }, + "include": ["src"] +} diff --git a/m1/JavaScript-client/tsup.config.js b/m1/JavaScript-client/tsup.config.js new file mode 100644 index 00000000..b02abb27 --- /dev/null +++ b/m1/JavaScript-client/tsup.config.js @@ -0,0 +1,8 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + splitting: false, + sourcemap: true, + target: "es2018", +}); diff --git a/m1/JavaScript-client/yarn.lock b/m1/JavaScript-client/yarn.lock new file mode 100644 index 00000000..cbd0bd57 --- /dev/null +++ b/m1/JavaScript-client/yarn.lock @@ -0,0 +1,6110 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@apidevtools/json-schema-ref-parser@9.0.9": + version "9.0.9" + resolved "https://registry.yarnpkg.com/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz#d720f9256e3609621280584f2b47ae165359268b" + integrity sha512-GBD2Le9w2+lVFoc4vswGI/TjkNIZSVp7+9xPf+X3uidBfWnAeUWmquteSyt0+VCrhNMWj/FTABISQrD3Z/YA+w== + dependencies: + "@jsdevtools/ono" "^7.1.3" + "@types/json-schema" "^7.0.6" + call-me-maybe "^1.0.1" + js-yaml "^4.1.0" + +"@ardatan/relay-compiler@12.0.0": + version "12.0.0" + resolved "https://registry.yarnpkg.com/@ardatan/relay-compiler/-/relay-compiler-12.0.0.tgz#2e4cca43088e807adc63450e8cab037020e91106" + integrity sha512-9anThAaj1dQr6IGmzBMcfzOQKTa5artjuPmw8NYK/fiGEMjADbSguBY2FMDykt+QhilR3wc9VA/3yVju7JHg7Q== + dependencies: + "@babel/core" "^7.14.0" + "@babel/generator" "^7.14.0" + "@babel/parser" "^7.14.0" + "@babel/runtime" "^7.0.0" + "@babel/traverse" "^7.14.0" + "@babel/types" "^7.0.0" + babel-preset-fbjs "^3.4.0" + chalk "^4.0.0" + fb-watchman "^2.0.0" + fbjs "^3.0.0" + glob "^7.1.1" + immutable "~3.7.6" + invariant "^2.2.4" + nullthrows "^1.1.1" + relay-runtime "12.0.0" + signedsource "^1.0.0" + yargs "^15.3.1" + +"@ardatan/sync-fetch@^0.0.1": + version "0.0.1" + resolved "https://registry.yarnpkg.com/@ardatan/sync-fetch/-/sync-fetch-0.0.1.tgz#3385d3feedceb60a896518a1db857ec1e945348f" + integrity sha512-xhlTqH0m31mnsG0tIP4ETgfSB6gXDaYYsUWTrlUV93fFQPI9dd8hE0Ot6MHLCtqgB32hwJAC3YZMWlXZw7AleA== + dependencies: + node-fetch "^2.6.1" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/code-frame@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.21.4.tgz#d0fa9e4413aca81f2b23b9442797bda1826edb39" + integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.20.0": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.20.1.tgz#f2e6ef7790d8c8dbf03d379502dcc246dcce0b30" + integrity sha512-EWZ4mE2diW3QALKvDMiXnbZpRvlj+nayZ112nK93SnhqOtpdsbVD4W+2tEoT3YNBAG9RBR0ISY758ZkOgsn6pQ== + +"@babel/compat-data@^7.20.5", "@babel/compat-data@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.21.4.tgz#457ffe647c480dff59c2be092fc3acf71195c87f" + integrity sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g== + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.20.2.tgz#8dc9b1620a673f92d3624bd926dc49a52cf25b92" + integrity sha512-w7DbG8DtMrJcFOi4VrLm+8QM4az8Mo+PuLBKLp2zrYRCow8W/f9xiXm5sN53C8HksCyDQwCKha9JiDoIyPjT2g== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.20.2" + "@babel/helper-compilation-targets" "^7.20.0" + "@babel/helper-module-transforms" "^7.20.2" + "@babel/helpers" "^7.20.1" + "@babel/parser" "^7.20.2" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.2" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/core@^7.14.0": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.21.4.tgz#c6dc73242507b8e2a27fd13a9c1814f9fa34a659" + integrity sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.21.4" + "@babel/generator" "^7.21.4" + "@babel/helper-compilation-targets" "^7.21.4" + "@babel/helper-module-transforms" "^7.21.2" + "@babel/helpers" "^7.21.0" + "@babel/parser" "^7.21.4" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.4" + "@babel/types" "^7.21.4" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.2" + semver "^6.3.0" + +"@babel/generator@^7.14.0", "@babel/generator@^7.18.13", "@babel/generator@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.4.tgz#64a94b7448989f421f919d5239ef553b37bb26bc" + integrity sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA== + dependencies: + "@babel/types" "^7.21.4" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + +"@babel/generator@^7.20.1", "@babel/generator@^7.20.2", "@babel/generator@^7.7.2": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.20.2.tgz#c2e89e22613a039285c1e7b749e2cd0b30b9a481" + integrity sha512-SD75PMIK6i9H8G/tfGvB4KKl4Nw6Ssos9nGgYwxbgyTP0iX/Z55DveoH86rmUB/YHTQQ+ZC0F7xxaY8l2OF44Q== + dependencies: + "@babel/types" "^7.20.2" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-annotate-as-pure@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" + integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-compilation-targets@^7.18.9", "@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.21.4.tgz#770cd1ce0889097ceacb99418ee6934ef0572656" + integrity sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg== + dependencies: + "@babel/compat-data" "^7.21.4" + "@babel/helper-validator-option" "^7.21.0" + browserslist "^4.21.3" + lru-cache "^5.1.1" + semver "^6.3.0" + +"@babel/helper-compilation-targets@^7.20.0": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.0.tgz#6bf5374d424e1b3922822f1d9bdaa43b1a139d0a" + integrity sha512-0jp//vDGp9e8hZzBc6N/KwA5ZK3Wsm/pfm4CrY7vzegkVxc65SgSn6wYOnwHe9Js9HRQ1YTCKLGPzDtaS3RoLQ== + dependencies: + "@babel/compat-data" "^7.20.0" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.21.3" + semver "^6.3.0" + +"@babel/helper-create-class-features-plugin@^7.18.6": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.21.4.tgz#3a017163dc3c2ba7deb9a7950849a9586ea24c18" + integrity sha512-46QrX2CQlaFRF4TkwfTt6nJD7IHq8539cCL7SDpqWSDeJKY1xylKKY5F/33mJhLZ3mFvKv2gGrVS6NkyF6qs+Q== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-member-expression-to-functions" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + "@babel/helper-split-export-declaration" "^7.18.6" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-function-name@^7.18.9", "@babel/helper-function-name@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz#d552829b10ea9f120969304023cd0645fa00b1b4" + integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== + dependencies: + "@babel/template" "^7.20.7" + "@babel/types" "^7.21.0" + +"@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-member-expression-to-functions@^7.20.7", "@babel/helper-member-expression-to-functions@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.21.0.tgz#319c6a940431a133897148515877d2f3269c3ba5" + integrity sha512-Muu8cdZwNN6mRRNG6lAYErJ5X3bRevgYR2O8wN0yn7jJSnGDu6eG59RfT29JHxGUovyfrh6Pj0XzmR7drNVL3Q== + dependencies: + "@babel/types" "^7.21.0" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.20.2": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.20.2.tgz#ac53da669501edd37e658602a21ba14c08748712" + integrity sha512-zvBKyJXRbmK07XhMuujYoJ48B5yvvmM6+wcpv6Ivj4Yg6qO7NOZOSnvZN9CRl1zz1Z4cKf8YejmCMh8clOoOeA== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.2" + +"@babel/helper-module-transforms@^7.21.2": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz#160caafa4978ac8c00ac66636cb0fa37b024e2d2" + integrity sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.20.2" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.19.1" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.2" + "@babel/types" "^7.21.2" + +"@babel/helper-optimise-call-expression@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz#9369aa943ee7da47edab2cb4e838acf09d290ffe" + integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.18.9", "@babel/helper-plugin-utils@^7.19.0", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz#d1b9000752b18d0877cff85a5c376ce5c3121629" + integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== + +"@babel/helper-replace-supers@^7.18.6", "@babel/helper-replace-supers@^7.20.7": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.20.7.tgz#243ecd2724d2071532b2c8ad2f0f9f083bcae331" + integrity sha512-vujDMtB6LVfNW13jhlCrp48QNslK6JXi7lQG736HVbHz/mbf4Dc7tIRh1Xf5C0rF7BP8iiSxGMCmY6Ci1ven3A== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-member-expression-to-functions" "^7.20.7" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.20.7" + "@babel/types" "^7.20.7" + +"@babel/helper-simple-access@^7.20.2": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz#0ab452687fe0c2cfb1e2b9e0015de07fc2d62dd9" + integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== + dependencies: + "@babel/types" "^7.20.2" + +"@babel/helper-skip-transparent-expression-wrappers@^7.20.0": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.20.0.tgz#fbe4c52f60518cab8140d77101f0e63a8a230684" + integrity sha512-5y1JYeNKfvnT8sZcK9DVRtpTbGiomYIHviSP3OQWmDPU3DeH4a1ZlT/N2lyQ5P8egjcRaT/Y9aNqUxK0WsnIIg== + dependencies: + "@babel/types" "^7.20.0" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.19.4": + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" + integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== + +"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helper-validator-option@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz#8224c7e13ace4bafdc4004da2cf064ef42673180" + integrity sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ== + +"@babel/helpers@^7.20.1": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.20.1.tgz#2ab7a0fcb0a03b5bf76629196ed63c2d7311f4c9" + integrity sha512-J77mUVaDTUJFZ5BpP6mMn6OIl3rEWymk2ZxDBQJUG3P+PbmyMcF3bYWvz0ma69Af1oobDqT/iAsvzhB58xhQUg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.20.1" + "@babel/types" "^7.20.0" + +"@babel/helpers@^7.21.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.21.0.tgz#9dd184fb5599862037917cdc9eecb84577dc4e7e" + integrity sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA== + dependencies: + "@babel/template" "^7.20.7" + "@babel/traverse" "^7.21.0" + "@babel/types" "^7.21.0" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.10", "@babel/parser@^7.20.1", "@babel/parser@^7.20.2": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.20.2.tgz#9aeb9b92f64412b5f81064d46f6a1ac0881337f4" + integrity sha512-afk318kh2uKbo7BEj2QtEi8HVCGrwHUffrYDy7dgVcSa2j9lY3LDjPzcyGdpX7xgm35aWqvciZJ4WKmdF/SxYg== + +"@babel/parser@^7.14.0", "@babel/parser@^7.16.8", "@babel/parser@^7.20.7", "@babel/parser@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.4.tgz#94003fdfc520bbe2875d4ae557b43ddb6d880f17" + integrity sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw== + +"@babel/plugin-proposal-class-properties@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz#b110f59741895f7ec21a6fff696ec46265c446a3" + integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.18.6" + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-proposal-object-rest-spread@^7.0.0": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" + integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== + dependencies: + "@babel/compat-data" "^7.20.5" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-transform-parameters" "^7.20.7" + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.0.0", "@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-flow@^7.0.0", "@babel/plugin-syntax-flow@^7.18.6": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.21.4.tgz#3e37fca4f06d93567c1cd9b75156422e90a67107" + integrity sha512-l9xd3N+XG4fZRxEP3vXdK6RW7vN1Uf5dxzRC/09wV86wqZ/YYQooBIGNsiRdfNR3/q2/5pPzV4B54J/9ctX5jw== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-syntax-import-assertions@^7.20.0": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.20.0.tgz#bb50e0d4bea0957235390641209394e87bdb9cc4" + integrity sha512-IUh1vakzNoWalR8ch/areW7qFopR2AEw03JlG7BbrDqmQ4X3q9uuipQwSGrUn7oGiemKjtSLDhNtQHzMHr1JdQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.0.0", "@babel/plugin-syntax-jsx@^7.18.6": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.21.4.tgz#f264ed7bf40ffc9ec239edabc17a50c4f5b6fea2" + integrity sha512-5hewiLct5OKyh6PLKEYaFclcqtIgCb6bmELouxjF6up5q3Sov7rOayW4RwhbaBL0dit8rA80GNfY+UuDp2mBbQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.0.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.20.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.20.0.tgz#4e9a0cfc769c85689b77a2e642d24e9f697fc8c7" + integrity sha512-rd9TkG+u1CExzS4SM1BlMEhMXwFLKVjOAFFCDx9PbX5ycJWDoWMcwdJH9RhkPu1dOgn5TrxLot/Gx6lWFuAUNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.19.0" + +"@babel/plugin-transform-arrow-functions@^7.0.0": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.20.7.tgz#bea332b0e8b2dab3dafe55a163d8227531ab0551" + integrity sha512-3poA5E7dzDomxj9WXWwuD6A5F3kc7VXwIJO+E+J8qtDtS+pXPAhrgEyh+9GBwBgPq1Z+bB+/JD60lp5jsN7JPQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-block-scoped-functions@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz#9187bf4ba302635b9d70d986ad70f038726216a8" + integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-block-scoping@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.21.0.tgz#e737b91037e5186ee16b76e7ae093358a5634f02" + integrity sha512-Mdrbunoh9SxwFZapeHVrwFmri16+oYotcZysSzhNIVDwIAb1UV+kvnxULSYq9J3/q5MDG+4X6w8QVgD1zhBXNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-classes@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.21.0.tgz#f469d0b07a4c5a7dbb21afad9e27e57b47031665" + integrity sha512-RZhbYTCEUAe6ntPehC4hlslPWosNHDox+vAs4On/mCLRLfoDVHf6hVEd7kuxr1RnHwJmxFfUM3cZiZRmPxJPXQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-compilation-targets" "^7.20.7" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-optimise-call-expression" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-replace-supers" "^7.20.7" + "@babel/helper-split-export-declaration" "^7.18.6" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.0.0": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.20.7.tgz#704cc2fd155d1c996551db8276d55b9d46e4d0aa" + integrity sha512-Lz7MvBK6DTjElHAmfu6bfANzKcxpyNPeYBGEafyA6E5HtRpjpZwU+u7Qrgz/2OR0z+5TvKYbPdphfSaAcZBrYQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/template" "^7.20.7" + +"@babel/plugin-transform-destructuring@^7.0.0": + version "7.21.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.21.3.tgz#73b46d0fd11cd6ef57dea8a381b1215f4959d401" + integrity sha512-bp6hwMFzuiE4HqYEyoGJ/V2LeIWn+hLVKc4pnj++E5XQptwhtcGmSayM029d/j2X1bPKGTlsyPwAubuU22KhMA== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-flow-strip-types@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.21.0.tgz#6aeca0adcb81dc627c8986e770bfaa4d9812aff5" + integrity sha512-FlFA2Mj87a6sDkW4gfGrQQqwY/dLlBAyJa2dJEZ+FHXUVHBflO2wyKvg+OOEzXfrKYIa4HWl0mgmbCzt0cMb7w== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-flow" "^7.18.6" + +"@babel/plugin-transform-for-of@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.21.0.tgz#964108c9988de1a60b4be2354a7d7e245f36e86e" + integrity sha512-LlUYlydgDkKpIY7mcBWvyPPmMcOphEyYA27Ef4xpbh1IiDNLr0kZsos2nf92vz3IccvJI25QUwp86Eo5s6HmBQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-function-name@^7.0.0": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.9.tgz#cc354f8234e62968946c61a46d6365440fc764e0" + integrity sha512-WvIBoRPaJQ5yVHzcnJFor7oS5Ls0PYixlTYE63lCj2RtdQEl15M68FXQlxnG6wdraJIXRdR7KI+hQ7q/9QjrCQ== + dependencies: + "@babel/helper-compilation-targets" "^7.18.9" + "@babel/helper-function-name" "^7.18.9" + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-literals@^7.0.0": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.9.tgz#72796fdbef80e56fba3c6a699d54f0de557444bc" + integrity sha512-IFQDSRoTPnrAIrI5zoZv73IFeZu2dhu6irxQjY9rNjTT53VmKg9fenjvoiOWOkJ6mm4jKVPtdMzBY98Fp4Z4cg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/plugin-transform-member-expression-literals@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz#ac9fdc1a118620ac49b7e7a5d2dc177a1bfee88e" + integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-modules-commonjs@^7.0.0": + version "7.21.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.21.2.tgz#6ff5070e71e3192ef2b7e39820a06fb78e3058e7" + integrity sha512-Cln+Yy04Gxua7iPdj6nOV96smLGjpElir5YwzF0LBPKoPlLDNJePNlrGGaybAJkd0zKRnOVXOgizSqPYMNYkzA== + dependencies: + "@babel/helper-module-transforms" "^7.21.2" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-simple-access" "^7.20.2" + +"@babel/plugin-transform-object-super@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz#fb3c6ccdd15939b6ff7939944b51971ddc35912c" + integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-replace-supers" "^7.18.6" + +"@babel/plugin-transform-parameters@^7.0.0", "@babel/plugin-transform-parameters@^7.20.7": + version "7.21.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.21.3.tgz#18fc4e797cf6d6d972cb8c411dbe8a809fa157db" + integrity sha512-Wxc+TvppQG9xWFYatvCGPvZ6+SIUxQ2ZdiBP+PHYMIjnPXD+uThCshaz4NZOnODAtBjjcVQQ/3OKs9LW28purQ== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + +"@babel/plugin-transform-property-literals@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz#e22498903a483448e94e032e9bbb9c5ccbfc93a3" + integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-display-name@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.18.6.tgz#8b1125f919ef36ebdfff061d664e266c666b9415" + integrity sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-react-jsx@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.21.0.tgz#656b42c2fdea0a6d8762075d58ef9d4e3c4ab8a2" + integrity sha512-6OAWljMvQrZjR2DaNhVfRz6dkCAVV+ymcLUmaf8bccGOHn2v5rHJK3tTpij0BuhdYWP4LLaqj5lwcdlpAAPuvg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/plugin-syntax-jsx" "^7.18.6" + "@babel/types" "^7.21.0" + +"@babel/plugin-transform-shorthand-properties@^7.0.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz#6d6df7983d67b195289be24909e3f12a8f664dc9" + integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/plugin-transform-spread@^7.0.0": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.20.7.tgz#c2d83e0b99d3bf83e07b11995ee24bf7ca09401e" + integrity sha512-ewBbHQ+1U/VnH1fxltbJqDeWBU1oNLG8Dj11uIv3xVf7nrQu0bPGe5Rf716r7K5Qz+SqtAOVswoVunoiBtGhxw== + dependencies: + "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-skip-transparent-expression-wrappers" "^7.20.0" + +"@babel/plugin-transform-template-literals@^7.0.0": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.9.tgz#04ec6f10acdaa81846689d63fae117dd9c243a5e" + integrity sha512-S8cOWfT82gTezpYOiVaGHrCbhlHgKhQt8XH5ES46P2XWmX92yisoZywf5km75wv5sYcXDUCLMmMxOLCtthDgMA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.9" + +"@babel/runtime@^7.0.0": + version "7.21.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.21.0.tgz#5b55c9d394e5fcf304909a8b00c07dc217b56673" + integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw== + dependencies: + regenerator-runtime "^0.13.11" + +"@babel/template@^7.18.10", "@babel/template@^7.3.3": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/template@^7.20.7": + version "7.20.7" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" + integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.20.7" + "@babel/types" "^7.20.7" + +"@babel/traverse@^7.14.0", "@babel/traverse@^7.16.8", "@babel/traverse@^7.20.7", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.4.tgz#a836aca7b116634e97a6ed99976236b3282c9d36" + integrity sha512-eyKrRHKdyZxqDm+fV1iqL9UAHMoIg0nDaGqfIOd8rKH17m5snv7Gn4qgjBoFfLz9APvjFU/ICT00NVCv1Epp8Q== + dependencies: + "@babel/code-frame" "^7.21.4" + "@babel/generator" "^7.21.4" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.21.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.21.4" + "@babel/types" "^7.21.4" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/traverse@^7.20.1", "@babel/traverse@^7.7.2": + version "7.20.1" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.20.1.tgz#9b15ccbf882f6d107eeeecf263fbcdd208777ec8" + integrity sha512-d3tN8fkVJwFLkHkBN479SOsw4DMZnz8cdbL/gvuDuzy3TS6Nfw80HuQqhw1pITbIruHyh7d1fMA47kWzmcUEGA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.20.1" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.20.1" + "@babel/types" "^7.20.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.20.0", "@babel/types@^7.20.2", "@babel/types@^7.3.0", "@babel/types@^7.3.3": + version "7.20.2" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.20.2.tgz#67ac09266606190f496322dbaff360fdaa5e7842" + integrity sha512-FnnvsNWgZCr232sqtXggapvlkk/tuwR/qhGzcmxI0GXLCjmPYQPzio2FbdlWuY6y1sHFfQKk+rRbUZ9VStQMog== + dependencies: + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@babel/types@^7.16.8", "@babel/types@^7.18.13", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.21.2", "@babel/types@^7.21.4": + version "7.21.4" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.4.tgz#2d5d6bb7908699b3b416409ffd3b5daa25b030d4" + integrity sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA== + dependencies: + "@babel/helper-string-parser" "^7.19.4" + "@babel/helper-validator-identifier" "^7.19.1" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@cspotcode/source-map-support@^0.8.0": + version "0.8.1" + resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1" + integrity sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw== + dependencies: + "@jridgewell/trace-mapping" "0.3.9" + +"@esbuild/android-arm@0.15.13": + version "0.15.13" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.15.13.tgz#ce11237a13ee76d5eae3908e47ba4ddd380af86a" + integrity sha512-RY2fVI8O0iFUNvZirXaQ1vMvK0xhCcl0gqRj74Z6yEiO1zAUa7hbsdwZM1kzqbxHK7LFyMizipfXT3JME+12Hw== + +"@esbuild/linux-loong64@0.15.13": + version "0.15.13" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.15.13.tgz#64e8825bf0ce769dac94ee39d92ebe6272020dfc" + integrity sha512-+BoyIm4I8uJmH/QDIH0fu7MG0AEx9OXEDXnqptXCwKOlOqZiS4iraH1Nr7/ObLMokW3sOCeBNyD68ATcV9b9Ag== + +"@eslint/eslintrc@^1.3.1": + version "1.3.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.3.tgz#2b044ab39fdfa75b4688184f9e573ce3c5b0ff95" + integrity sha512-uj3pT6Mg+3t39fvLrj8iuCIJ38zKO9FpGtJ4BBJebJhEwjoT+KLVNCcHT5QC9NGRIEi7fZ0ZR8YRb884auB4Lg== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.4.0" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@graphql-codegen/add@^3.2.1": + version "3.2.3" + resolved "https://registry.yarnpkg.com/@graphql-codegen/add/-/add-3.2.3.tgz#f1ecee085987e7c21841edc4b1fd48877c663e1a" + integrity sha512-sQOnWpMko4JLeykwyjFTxnhqjd/3NOG2OyMuvK76Wnnwh8DRrNf2VEs2kmSvLl7MndMlOj7Kh5U154dVcvhmKQ== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.1" + tslib "~2.4.0" + +"@graphql-codegen/cli@^2.13.5": + version "2.16.5" + resolved "https://registry.yarnpkg.com/@graphql-codegen/cli/-/cli-2.16.5.tgz#b3b5eeec357af01c1cb72f6a4ea96e52bd49e662" + integrity sha512-XYPIp+q7fB0xAGSAoRykiTe4oY80VU+z+dw5nuv4mLY0+pv7+pa2C6Nwhdw7a65lXOhFviBApWCCZeqd54SMnA== + dependencies: + "@babel/generator" "^7.18.13" + "@babel/template" "^7.18.10" + "@babel/types" "^7.18.13" + "@graphql-codegen/core" "^2.6.8" + "@graphql-codegen/plugin-helpers" "^3.1.2" + "@graphql-tools/apollo-engine-loader" "^7.3.6" + "@graphql-tools/code-file-loader" "^7.3.13" + "@graphql-tools/git-loader" "^7.2.13" + "@graphql-tools/github-loader" "^7.3.20" + "@graphql-tools/graphql-file-loader" "^7.5.0" + "@graphql-tools/json-file-loader" "^7.4.1" + "@graphql-tools/load" "^7.8.0" + "@graphql-tools/prisma-loader" "^7.2.49" + "@graphql-tools/url-loader" "^7.13.2" + "@graphql-tools/utils" "^9.0.0" + "@whatwg-node/fetch" "^0.6.0" + chalk "^4.1.0" + chokidar "^3.5.2" + cosmiconfig "^7.0.0" + cosmiconfig-typescript-loader "^4.3.0" + debounce "^1.2.0" + detect-indent "^6.0.0" + graphql-config "^4.4.0" + inquirer "^8.0.0" + is-glob "^4.0.1" + json-to-pretty-yaml "^1.2.2" + listr2 "^4.0.5" + log-symbols "^4.0.0" + shell-quote "^1.7.3" + string-env-interpolation "^1.0.1" + ts-log "^2.2.3" + ts-node "^10.9.1" + tslib "^2.4.0" + yaml "^1.10.0" + yargs "^17.0.0" + +"@graphql-codegen/core@^2.6.8": + version "2.6.8" + resolved "https://registry.yarnpkg.com/@graphql-codegen/core/-/core-2.6.8.tgz#00c4011e3619ddbc6af5e41b2f254d6f6759556e" + integrity sha512-JKllNIipPrheRgl+/Hm/xuWMw9++xNQ12XJR/OHHgFopOg4zmN3TdlRSyYcv/K90hCFkkIwhlHFUQTfKrm8rxQ== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.1" + "@graphql-tools/schema" "^9.0.0" + "@graphql-tools/utils" "^9.1.1" + tslib "~2.4.0" + +"@graphql-codegen/import-types-preset@^2.2.3": + version "2.2.6" + resolved "https://registry.yarnpkg.com/@graphql-codegen/import-types-preset/-/import-types-preset-2.2.6.tgz#aab292d93f79bffc09bc12a51843c3272b526475" + integrity sha512-Lo2ITOln3UVdyyEPiijj8bVhVg0Ghp/JzHXA2LXxrJVCRbXizQhVC2vjiaWTjMskPt9Zub0yIoce4+RrbsXKcg== + dependencies: + "@graphql-codegen/add" "^3.2.1" + "@graphql-codegen/plugin-helpers" "^2.7.2" + "@graphql-codegen/visitor-plugin-common" "2.13.1" + tslib "~2.4.0" + +"@graphql-codegen/plugin-helpers@^2.7.2": + version "2.7.2" + resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-2.7.2.tgz#6544f739d725441c826a8af6a49519f588ff9bed" + integrity sha512-kln2AZ12uii6U59OQXdjLk5nOlh1pHis1R98cDZGFnfaiAbX9V3fxcZ1MMJkB7qFUymTALzyjZoXXdyVmPMfRg== + dependencies: + "@graphql-tools/utils" "^8.8.0" + change-case-all "1.0.14" + common-tags "1.8.2" + import-from "4.0.0" + lodash "~4.17.0" + tslib "~2.4.0" + +"@graphql-codegen/plugin-helpers@^3.0.0", "@graphql-codegen/plugin-helpers@^3.1.1", "@graphql-codegen/plugin-helpers@^3.1.2": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@graphql-codegen/plugin-helpers/-/plugin-helpers-3.1.2.tgz#69a2e91178f478ea6849846ade0a59a844d34389" + integrity sha512-emOQiHyIliVOIjKVKdsI5MXj312zmRDwmHpyUTZMjfpvxq/UVAHUJIVdVf+lnjjrI+LXBTgMlTWTgHQfmICxjg== + dependencies: + "@graphql-tools/utils" "^9.0.0" + change-case-all "1.0.15" + common-tags "1.8.2" + import-from "4.0.0" + lodash "~4.17.0" + tslib "~2.4.0" + +"@graphql-codegen/schema-ast@^2.6.1": + version "2.6.1" + resolved "https://registry.yarnpkg.com/@graphql-codegen/schema-ast/-/schema-ast-2.6.1.tgz#8ba1b38827c034b51ecd3ce88622c2ae6cd3fe1a" + integrity sha512-5TNW3b1IHJjCh07D2yQNGDQzUpUl2AD+GVe1Dzjqyx/d2Fn0TPMxLsHsKPS4Plg4saO8FK/QO70wLsP7fdbQ1w== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.2" + "@graphql-tools/utils" "^9.0.0" + tslib "~2.4.0" + +"@graphql-codegen/typescript-graphql-request@^4.5.8": + version "4.5.9" + resolved "https://registry.yarnpkg.com/@graphql-codegen/typescript-graphql-request/-/typescript-graphql-request-4.5.9.tgz#d8a9488b7419cabf2ca98ae3936e82b9d244f2e9" + integrity sha512-Vtv5qymUXcR4UFdHOlJHzK5TN+CZUwMwFDGb3n4Gjcr4yln1BWbUb7DXgD0GHzpXwDIj5G2XmJnFtr0jihBfrg== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.0.0" + "@graphql-codegen/visitor-plugin-common" "2.13.1" + auto-bind "~4.0.0" + tslib "~2.4.0" + +"@graphql-codegen/typescript-operations@^2.5.3": + version "2.5.13" + resolved "https://registry.yarnpkg.com/@graphql-codegen/typescript-operations/-/typescript-operations-2.5.13.tgz#f286c37f9c023356aacaa983ebd32e9e021a05ca" + integrity sha512-3vfR6Rx6iZU0JRt29GBkFlrSNTM6t+MSLF86ChvL4d/Jfo/JYAGuB3zNzPhirHYzJPCvLOAx2gy9ID1ltrpYiw== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.2" + "@graphql-codegen/typescript" "^2.8.8" + "@graphql-codegen/visitor-plugin-common" "2.13.8" + auto-bind "~4.0.0" + tslib "~2.4.0" + +"@graphql-codegen/typescript@^2.7.3", "@graphql-codegen/typescript@^2.8.8": + version "2.8.8" + resolved "https://registry.yarnpkg.com/@graphql-codegen/typescript/-/typescript-2.8.8.tgz#8c3b9153e334db43c65f8f31ced69b4c60d14861" + integrity sha512-A0oUi3Oy6+DormOlrTC4orxT9OBZkIglhbJBcDmk34jAKKUgesukXRd4yOhmTrnbchpXz2T8IAOFB3FWIaK4Rw== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.2" + "@graphql-codegen/schema-ast" "^2.6.1" + "@graphql-codegen/visitor-plugin-common" "2.13.8" + auto-bind "~4.0.0" + tslib "~2.4.0" + +"@graphql-codegen/visitor-plugin-common@2.13.1": + version "2.13.1" + resolved "https://registry.yarnpkg.com/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-2.13.1.tgz#2228660f6692bcdb96b1f6d91a0661624266b76b" + integrity sha512-mD9ufZhDGhyrSaWQGrU1Q1c5f01TeWtSWy/cDwXYjJcHIj1Y/DG2x0tOflEfCvh5WcnmHNIw4lzDsg1W7iFJEg== + dependencies: + "@graphql-codegen/plugin-helpers" "^2.7.2" + "@graphql-tools/optimize" "^1.3.0" + "@graphql-tools/relay-operation-optimizer" "^6.5.0" + "@graphql-tools/utils" "^8.8.0" + auto-bind "~4.0.0" + change-case-all "1.0.14" + dependency-graph "^0.11.0" + graphql-tag "^2.11.0" + parse-filepath "^1.0.2" + tslib "~2.4.0" + +"@graphql-codegen/visitor-plugin-common@2.13.8": + version "2.13.8" + resolved "https://registry.yarnpkg.com/@graphql-codegen/visitor-plugin-common/-/visitor-plugin-common-2.13.8.tgz#09bc6317b227e5a278f394f4cef0d6c2d1910597" + integrity sha512-IQWu99YV4wt8hGxIbBQPtqRuaWZhkQRG2IZKbMoSvh0vGeWb3dB0n0hSgKaOOxDY+tljtOf9MTcUYvJslQucMQ== + dependencies: + "@graphql-codegen/plugin-helpers" "^3.1.2" + "@graphql-tools/optimize" "^1.3.0" + "@graphql-tools/relay-operation-optimizer" "^6.5.0" + "@graphql-tools/utils" "^9.0.0" + auto-bind "~4.0.0" + change-case-all "1.0.15" + dependency-graph "^0.11.0" + graphql-tag "^2.11.0" + parse-filepath "^1.0.2" + tslib "~2.4.0" + +"@graphql-tools/apollo-engine-loader@^7.3.6": + version "7.3.26" + resolved "https://registry.yarnpkg.com/@graphql-tools/apollo-engine-loader/-/apollo-engine-loader-7.3.26.tgz#91e54460d5579933e42a2010b8688c3459c245d8" + integrity sha512-h1vfhdJFjnCYn9b5EY1Z91JTF0KB3hHVJNQIsiUV2mpQXZdeOXQoaWeYEKaiI5R6kwBw5PP9B0fv3jfUIG8LyQ== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/utils" "^9.2.1" + "@whatwg-node/fetch" "^0.8.0" + tslib "^2.4.0" + +"@graphql-tools/batch-execute@^8.5.21": + version "8.5.21" + resolved "https://registry.yarnpkg.com/@graphql-tools/batch-execute/-/batch-execute-8.5.21.tgz#dce327cab43bf6d7389e607f603369724c93ce06" + integrity sha512-DDyCPUSUjc0he/I9byguxohW/owQyVEO/gJcXLFAbHtTjORci3gRaRwLw24j0WaP+ZAlxYTMQs1HSlyJFaUArA== + dependencies: + "@graphql-tools/utils" "^9.2.1" + dataloader "^2.2.2" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/code-file-loader@^7.3.13": + version "7.3.23" + resolved "https://registry.yarnpkg.com/@graphql-tools/code-file-loader/-/code-file-loader-7.3.23.tgz#33793f9a1f8e74981f8ae6ec4ab7061f9713db15" + integrity sha512-8Wt1rTtyTEs0p47uzsPJ1vAtfAx0jmxPifiNdmo9EOCuUPyQGEbMaik/YkqZ7QUFIEYEQu+Vgfo8tElwOPtx5Q== + dependencies: + "@graphql-tools/graphql-tag-pluck" "7.5.2" + "@graphql-tools/utils" "^9.2.1" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/delegate@^9.0.31": + version "9.0.34" + resolved "https://registry.yarnpkg.com/@graphql-tools/delegate/-/delegate-9.0.34.tgz#50a877b3645bf5275d12c6f8a422cefdfe16bdad" + integrity sha512-qVTeq+0nxiDo54f28r0tOoXE6nipzGyDp5uuNMJJRvmIWjFGiyOBI8IsynPvLb2nCwhcpaTVJ2Bqswk/3DNO4w== + dependencies: + "@graphql-tools/batch-execute" "^8.5.21" + "@graphql-tools/executor" "^0.0.19" + "@graphql-tools/schema" "^9.0.19" + "@graphql-tools/utils" "^9.2.1" + dataloader "^2.2.2" + tslib "^2.5.0" + value-or-promise "^1.0.12" + +"@graphql-tools/executor-graphql-ws@^0.0.14": + version "0.0.14" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-graphql-ws/-/executor-graphql-ws-0.0.14.tgz#e0f53fc4cfc8a06cc461b2bc1edb4bb9a8e837ed" + integrity sha512-P2nlkAsPZKLIXImFhj0YTtny5NQVGSsKnhi7PzXiaHSXc6KkzqbWZHKvikD4PObanqg+7IO58rKFpGXP7eeO+w== + dependencies: + "@graphql-tools/utils" "^9.2.1" + "@repeaterjs/repeater" "3.0.4" + "@types/ws" "^8.0.0" + graphql-ws "5.12.1" + isomorphic-ws "5.0.0" + tslib "^2.4.0" + ws "8.13.0" + +"@graphql-tools/executor-http@^0.1.7", "@graphql-tools/executor-http@^0.1.9": + version "0.1.9" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-http/-/executor-http-0.1.9.tgz#ddd74ef376b4a2ed59c622acbcca068890854a30" + integrity sha512-tNzMt5qc1ptlHKfpSv9wVBVKCZ7gks6Yb/JcYJluxZIT4qRV+TtOFjpptfBU63usgrGVOVcGjzWc/mt7KhmmpQ== + dependencies: + "@graphql-tools/utils" "^9.2.1" + "@repeaterjs/repeater" "^3.0.4" + "@whatwg-node/fetch" "^0.8.1" + dset "^3.1.2" + extract-files "^11.0.0" + meros "^1.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/executor-legacy-ws@^0.0.11": + version "0.0.11" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor-legacy-ws/-/executor-legacy-ws-0.0.11.tgz#a1e12be8279e92a363a23d4105461a34cd9e389e" + integrity sha512-4ai+NnxlNfvIQ4c70hWFvOZlSUN8lt7yc+ZsrwtNFbFPH/EroIzFMapAxM9zwyv9bH38AdO3TQxZ5zNxgBdvUw== + dependencies: + "@graphql-tools/utils" "^9.2.1" + "@types/ws" "^8.0.0" + isomorphic-ws "5.0.0" + tslib "^2.4.0" + ws "8.13.0" + +"@graphql-tools/executor@^0.0.19": + version "0.0.19" + resolved "https://registry.yarnpkg.com/@graphql-tools/executor/-/executor-0.0.19.tgz#df3e0aa03923cebc6a4ff9bad76a8fd4c8e34eb5" + integrity sha512-JYxuxseH7GGQ9olamrK73xUA05q/bKZ1efnYglSD6/05pb+Gz+VXDK8Y3pWha10aM6c529t//6hzJ5T/99Be5Q== + dependencies: + "@graphql-tools/utils" "^9.2.1" + "@graphql-typed-document-node/core" "3.2.0" + "@repeaterjs/repeater" "^3.0.4" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/git-loader@^7.2.13": + version "7.2.22" + resolved "https://registry.yarnpkg.com/@graphql-tools/git-loader/-/git-loader-7.2.22.tgz#b937273adae69a992d5d1d2e43bc1df21b6654d3" + integrity sha512-9rpHggHiOeqA7/ZlKD3c5yXk5bPGw0zkIgKMerjCmFAQAZ6CEVfsa7nAzEWQxn6rpdaBft4/0A56rPMrsUwGBA== + dependencies: + "@graphql-tools/graphql-tag-pluck" "7.5.2" + "@graphql-tools/utils" "^9.2.1" + is-glob "4.0.3" + micromatch "^4.0.4" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/github-loader@^7.3.20": + version "7.3.28" + resolved "https://registry.yarnpkg.com/@graphql-tools/github-loader/-/github-loader-7.3.28.tgz#a7166b136e8442bd8b3ab943ad3b66c84bcabfcf" + integrity sha512-OK92Lf9pmxPQvjUNv05b3tnVhw0JRfPqOf15jZjyQ8BfdEUrJoP32b4dRQQem/wyRL24KY4wOfArJNqzpsbwCA== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/executor-http" "^0.1.9" + "@graphql-tools/graphql-tag-pluck" "^7.4.6" + "@graphql-tools/utils" "^9.2.1" + "@whatwg-node/fetch" "^0.8.0" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/graphql-file-loader@^7.3.7", "@graphql-tools/graphql-file-loader@^7.5.0": + version "7.5.17" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-file-loader/-/graphql-file-loader-7.5.17.tgz#7c281617ea3ab4db4d42a2bdb49850f2b937f0f9" + integrity sha512-hVwwxPf41zOYgm4gdaZILCYnKB9Zap7Ys9OhY1hbwuAuC4MMNY9GpUjoTU3CQc3zUiPoYStyRtUGkHSJZ3HxBw== + dependencies: + "@graphql-tools/import" "6.7.18" + "@graphql-tools/utils" "^9.2.1" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/graphql-tag-pluck@7.5.2", "@graphql-tools/graphql-tag-pluck@^7.4.6": + version "7.5.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/graphql-tag-pluck/-/graphql-tag-pluck-7.5.2.tgz#502f1e066e19d832ebdeba5f571d7636dc27572d" + integrity sha512-RW+H8FqOOLQw0BPXaahYepVSRjuOHw+7IL8Opaa5G5uYGOBxoXR7DceyQ7BcpMgktAOOmpDNQ2WtcboChOJSRA== + dependencies: + "@babel/parser" "^7.16.8" + "@babel/plugin-syntax-import-assertions" "^7.20.0" + "@babel/traverse" "^7.16.8" + "@babel/types" "^7.16.8" + "@graphql-tools/utils" "^9.2.1" + tslib "^2.4.0" + +"@graphql-tools/import@6.7.18": + version "6.7.18" + resolved "https://registry.yarnpkg.com/@graphql-tools/import/-/import-6.7.18.tgz#ad092d8a4546bb6ffc3e871e499eec7ac368680b" + integrity sha512-XQDdyZTp+FYmT7as3xRWH/x8dx0QZA2WZqfMF5EWb36a0PiH7WwlRQYIdyYXj8YCLpiWkeBXgBRHmMnwEYR8iQ== + dependencies: + "@graphql-tools/utils" "^9.2.1" + resolve-from "5.0.0" + tslib "^2.4.0" + +"@graphql-tools/json-file-loader@^7.3.7", "@graphql-tools/json-file-loader@^7.4.1": + version "7.4.18" + resolved "https://registry.yarnpkg.com/@graphql-tools/json-file-loader/-/json-file-loader-7.4.18.tgz#d78ae40979bde51cfc59717757354afc9e35fba2" + integrity sha512-AJ1b6Y1wiVgkwsxT5dELXhIVUPs/u3VZ8/0/oOtpcoyO/vAeM5rOvvWegzicOOnQw8G45fgBRMkkRfeuwVt6+w== + dependencies: + "@graphql-tools/utils" "^9.2.1" + globby "^11.0.3" + tslib "^2.4.0" + unixify "^1.0.0" + +"@graphql-tools/load@^7.5.5", "@graphql-tools/load@^7.8.0": + version "7.8.14" + resolved "https://registry.yarnpkg.com/@graphql-tools/load/-/load-7.8.14.tgz#f2356f9a5f658a42e33934ae036e4b2cadf2d1e9" + integrity sha512-ASQvP+snHMYm+FhIaLxxFgVdRaM0vrN9wW2BKInQpktwWTXVyk+yP5nQUCEGmn0RTdlPKrffBaigxepkEAJPrg== + dependencies: + "@graphql-tools/schema" "^9.0.18" + "@graphql-tools/utils" "^9.2.1" + p-limit "3.1.0" + tslib "^2.4.0" + +"@graphql-tools/merge@^8.2.6", "@graphql-tools/merge@^8.4.1": + version "8.4.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-8.4.1.tgz#52879e5f73565f504ceea04fcd9ef90a6e733c62" + integrity sha512-hssnPpZ818mxgl5+GfyOOSnnflAxiaTn1A1AojZcIbh4J52sS1Q0gSuBR5VrnUDjuxiqoCotpXdAQl+K+U6KLQ== + dependencies: + "@graphql-tools/utils" "^9.2.1" + tslib "^2.4.0" + +"@graphql-tools/optimize@^1.3.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@graphql-tools/optimize/-/optimize-1.4.0.tgz#20d6a9efa185ef8fc4af4fd409963e0907c6e112" + integrity sha512-dJs/2XvZp+wgHH8T5J2TqptT9/6uVzIYvA6uFACha+ufvdMBedkfR4b4GbT8jAKLRARiqRTxy3dctnwkTM2tdw== + dependencies: + tslib "^2.4.0" + +"@graphql-tools/prisma-loader@^7.2.49": + version "7.2.71" + resolved "https://registry.yarnpkg.com/@graphql-tools/prisma-loader/-/prisma-loader-7.2.71.tgz#7540bfabcc9717c10c1ebdb359d265f721205199" + integrity sha512-FuIvhRrkduqPdj3QX0/anCxGViEETfoZ/1NvotfM6iVO1XxR75VXvP/iyKGbK6XvYRXwSstgj2DetlQnqdgXhA== + dependencies: + "@graphql-tools/url-loader" "^7.17.18" + "@graphql-tools/utils" "^9.2.1" + "@types/js-yaml" "^4.0.0" + "@types/json-stable-stringify" "^1.0.32" + "@whatwg-node/fetch" "^0.8.2" + chalk "^4.1.0" + debug "^4.3.1" + dotenv "^16.0.0" + graphql-request "^6.0.0" + http-proxy-agent "^5.0.0" + https-proxy-agent "^5.0.0" + jose "^4.11.4" + js-yaml "^4.0.0" + json-stable-stringify "^1.0.1" + lodash "^4.17.20" + scuid "^1.1.0" + tslib "^2.4.0" + yaml-ast-parser "^0.0.43" + +"@graphql-tools/relay-operation-optimizer@^6.5.0": + version "6.5.18" + resolved "https://registry.yarnpkg.com/@graphql-tools/relay-operation-optimizer/-/relay-operation-optimizer-6.5.18.tgz#a1b74a8e0a5d0c795b8a4d19629b654cf66aa5ab" + integrity sha512-mc5VPyTeV+LwiM+DNvoDQfPqwQYhPV/cl5jOBjTgSniyaq8/86aODfMkrE2OduhQ5E00hqrkuL2Fdrgk0w1QJg== + dependencies: + "@ardatan/relay-compiler" "12.0.0" + "@graphql-tools/utils" "^9.2.1" + tslib "^2.4.0" + +"@graphql-tools/schema@^9.0.0", "@graphql-tools/schema@^9.0.18", "@graphql-tools/schema@^9.0.19": + version "9.0.19" + resolved "https://registry.yarnpkg.com/@graphql-tools/schema/-/schema-9.0.19.tgz#c4ad373b5e1b8a0cf365163435b7d236ebdd06e7" + integrity sha512-oBRPoNBtCkk0zbUsyP4GaIzCt8C0aCI4ycIRUL67KK5pOHljKLBBtGT+Jr6hkzA74C8Gco8bpZPe7aWFjiaK2w== + dependencies: + "@graphql-tools/merge" "^8.4.1" + "@graphql-tools/utils" "^9.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-tools/url-loader@^7.13.2", "@graphql-tools/url-loader@^7.17.18", "@graphql-tools/url-loader@^7.9.7": + version "7.17.18" + resolved "https://registry.yarnpkg.com/@graphql-tools/url-loader/-/url-loader-7.17.18.tgz#3e253594d23483e4c0dd3a4c3dd2ad5cd0141192" + integrity sha512-ear0CiyTj04jCVAxi7TvgbnGDIN2HgqzXzwsfcqiVg9cvjT40NcMlZ2P1lZDgqMkZ9oyLTV8Bw6j+SyG6A+xPw== + dependencies: + "@ardatan/sync-fetch" "^0.0.1" + "@graphql-tools/delegate" "^9.0.31" + "@graphql-tools/executor-graphql-ws" "^0.0.14" + "@graphql-tools/executor-http" "^0.1.7" + "@graphql-tools/executor-legacy-ws" "^0.0.11" + "@graphql-tools/utils" "^9.2.1" + "@graphql-tools/wrap" "^9.4.2" + "@types/ws" "^8.0.0" + "@whatwg-node/fetch" "^0.8.0" + isomorphic-ws "^5.0.0" + tslib "^2.4.0" + value-or-promise "^1.0.11" + ws "^8.12.0" + +"@graphql-tools/utils@^8.8.0": + version "8.13.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-8.13.1.tgz#b247607e400365c2cd87ff54654d4ad25a7ac491" + integrity sha512-qIh9yYpdUFmctVqovwMdheVNJqFh+DQNWIhX87FJStfXYnmweBUDATok9fWPleKeFwxnW8IapKmY8m8toJEkAw== + dependencies: + tslib "^2.4.0" + +"@graphql-tools/utils@^9.0.0", "@graphql-tools/utils@^9.1.1", "@graphql-tools/utils@^9.2.1": + version "9.2.1" + resolved "https://registry.yarnpkg.com/@graphql-tools/utils/-/utils-9.2.1.tgz#1b3df0ef166cfa3eae706e3518b17d5922721c57" + integrity sha512-WUw506Ql6xzmOORlriNrD6Ugx+HjVgYxt9KCXD9mHAak+eaXSwuGGPyE60hy9xaDEoXKBsG7SkG69ybitaVl6A== + dependencies: + "@graphql-typed-document-node/core" "^3.1.1" + tslib "^2.4.0" + +"@graphql-tools/wrap@^9.4.2": + version "9.4.2" + resolved "https://registry.yarnpkg.com/@graphql-tools/wrap/-/wrap-9.4.2.tgz#30835587c4c73be1780908a7cb077d8013aa2703" + integrity sha512-DFcd9r51lmcEKn0JW43CWkkI2D6T9XI1juW/Yo86i04v43O9w2/k4/nx2XTJv4Yv+iXwUw7Ok81PGltwGJSDSA== + dependencies: + "@graphql-tools/delegate" "^9.0.31" + "@graphql-tools/schema" "^9.0.18" + "@graphql-tools/utils" "^9.2.1" + tslib "^2.4.0" + value-or-promise "^1.0.12" + +"@graphql-typed-document-node/core@3.2.0", "@graphql-typed-document-node/core@^3.1.1", "@graphql-typed-document-node/core@^3.2.0": + version "3.2.0" + resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.2.0.tgz#5f3d96ec6b2354ad6d8a28bf216a1d97b5426861" + integrity sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ== + +"@humanwhocodes/config-array@^0.10.4": + version "0.10.7" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.10.7.tgz#6d53769fd0c222767e6452e8ebda825c22e9f0dc" + integrity sha512-MDl6D6sBsaV452/QSdX+4CXIjZhIcI0PELsxUjk4U828yd58vk3bTIvk/6w5FY+4hIy9sLW0sfrV7K7Kc++j/w== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/gitignore-to-minimatch@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@humanwhocodes/gitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" + integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.3.tgz#2030606ec03a18c31803b8a36382762e447655df" + integrity sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + +"@jest/core@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-28.1.3.tgz#0ebf2bd39840f1233cd5f2d1e6fc8b71bd5a1ac7" + integrity sha512-CIKBrlaKOzA7YG19BEqCw3SLIsEwjZkeJzf5bdooVnW4bH5cktqe3JX+G2YV1aK5vP8N9na1IGWFzYaTp6k6NA== + dependencies: + "@jest/console" "^28.1.3" + "@jest/reporters" "^28.1.3" + "@jest/test-result" "^28.1.3" + "@jest/transform" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + ci-info "^3.2.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^28.1.3" + jest-config "^28.1.3" + jest-haste-map "^28.1.3" + jest-message-util "^28.1.3" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.3" + jest-resolve-dependencies "^28.1.3" + jest-runner "^28.1.3" + jest-runtime "^28.1.3" + jest-snapshot "^28.1.3" + jest-util "^28.1.3" + jest-validate "^28.1.3" + jest-watcher "^28.1.3" + micromatch "^4.0.4" + pretty-format "^28.1.3" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-28.1.3.tgz#abed43a6b040a4c24fdcb69eab1f97589b2d663e" + integrity sha512-1bf40cMFTEkKyEf585R9Iz1WayDjHoHqvts0XFYEqyKM3cFWDpeMoqKKTAF9LSYQModPUlh8FKptoM2YcMWAXA== + dependencies: + "@jest/fake-timers" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + jest-mock "^28.1.3" + +"@jest/expect-utils@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-28.1.3.tgz#58561ce5db7cd253a7edddbc051fb39dda50f525" + integrity sha512-wvbi9LUrHJLn3NlDW6wF2hvIMtd4JUl2QNVrjq+IBSHirgfrR3o9RnVtxzdEGO2n9JyIWwHnLfby5KzqBGg2YA== + dependencies: + jest-get-type "^28.0.2" + +"@jest/expect@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-28.1.3.tgz#9ac57e1d4491baca550f6bdbd232487177ad6a72" + integrity sha512-lzc8CpUbSoE4dqT0U+g1qODQjBRHPpCPXissXD4mS9+sWQdmmpeJ9zSH1rS1HEkrsMN0fb7nKrJ9giAR1d3wBw== + dependencies: + expect "^28.1.3" + jest-snapshot "^28.1.3" + +"@jest/fake-timers@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-28.1.3.tgz#230255b3ad0a3d4978f1d06f70685baea91c640e" + integrity sha512-D/wOkL2POHv52h+ok5Oj/1gOG9HSywdoPtFsRCUmlCILXNn5eIWmcnd3DIiWlJnpGvQtmajqBP95Ei0EimxfLw== + dependencies: + "@jest/types" "^28.1.3" + "@sinonjs/fake-timers" "^9.1.2" + "@types/node" "*" + jest-message-util "^28.1.3" + jest-mock "^28.1.3" + jest-util "^28.1.3" + +"@jest/globals@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-28.1.3.tgz#a601d78ddc5fdef542728309894895b4a42dc333" + integrity sha512-XFU4P4phyryCXu1pbcqMO0GSQcYe1IsalYCDzRNyhetyeyxMcIxa11qPNDpVNLeretItNqEmYYQn1UYz/5x1NA== + dependencies: + "@jest/environment" "^28.1.3" + "@jest/expect" "^28.1.3" + "@jest/types" "^28.1.3" + +"@jest/reporters@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-28.1.3.tgz#9adf6d265edafc5fc4a434cfb31e2df5a67a369a" + integrity sha512-JuAy7wkxQZVNU/V6g9xKzCGC5LVXx9FDcABKsSXp5MiKPEE2144a/vXTEDoyzjUpZKfVwp08Wqg5A4WfTMAzjg== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^28.1.3" + "@jest/test-result" "^28.1.3" + "@jest/transform" "^28.1.3" + "@jest/types" "^28.1.3" + "@jridgewell/trace-mapping" "^0.3.13" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + jest-worker "^28.1.3" + slash "^3.0.0" + string-length "^4.0.1" + strip-ansi "^6.0.0" + terminal-link "^2.0.0" + v8-to-istanbul "^9.0.1" + +"@jest/schemas@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.1.3.tgz#ad8b86a66f11f33619e3d7e1dcddd7f2d40ff905" + integrity sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== + dependencies: + "@sinclair/typebox" "^0.24.1" + +"@jest/source-map@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-28.1.2.tgz#7fe832b172b497d6663cdff6c13b0a920e139e24" + integrity sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww== + dependencies: + "@jridgewell/trace-mapping" "^0.3.13" + callsites "^3.0.0" + graceful-fs "^4.2.9" + +"@jest/test-result@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.3.tgz#5eae945fd9f4b8fcfce74d239e6f725b6bf076c5" + integrity sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== + dependencies: + "@jest/console" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-28.1.3.tgz#9d0c283d906ac599c74bde464bc0d7e6a82886c3" + integrity sha512-NIMPEqqa59MWnDi1kvXXpYbqsfQmSJsIbnd85mdVGkiDfQ9WQQTXOLsvISUfonmnBT+w85WEgneCigEEdHDFxw== + dependencies: + "@jest/test-result" "^28.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + slash "^3.0.0" + +"@jest/transform@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.3.tgz#59d8098e50ab07950e0f2fc0fc7ec462371281b0" + integrity sha512-u5dT5di+oFI6hfcLOHGTAfmUxFRrjK+vnaP0kkVow9Md/M7V/MxqQMOz/VV25UZO8pzeA9PjfTpOu6BDuwSPQA== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^28.1.3" + "@jridgewell/trace-mapping" "^0.3.13" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^28.1.3": + version "28.1.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.3.tgz#b05de80996ff12512bc5ceb1d208285a7d11748b" + integrity sha512-RyjiyMUZrKz/c+zlMFO1pm70DcIlST8AeWTkoUdZevew44wcNZQHsEVOiCVtgVnlFFD82FPaXycys58cf2muVQ== + dependencies: + "@jest/schemas" "^28.1.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@0.3.9": + version "0.3.9" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9" + integrity sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.13", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.17" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" + integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + +"@jridgewell/trace-mapping@^0.3.17": + version "0.3.18" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + +"@jsdevtools/ono@^7.1.3": + version "7.1.3" + resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" + integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== + +"@noble/hashes@1.1.3", "@noble/hashes@~1.1.1": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.3.tgz#360afc77610e0a61f3417e497dcf36862e4f8111" + integrity sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@peculiar/asn1-schema@^2.3.6": + version "2.3.6" + resolved "https://registry.yarnpkg.com/@peculiar/asn1-schema/-/asn1-schema-2.3.6.tgz#3dd3c2ade7f702a9a94dfb395c192f5fa5d6b922" + integrity sha512-izNRxPoaeJeg/AyH8hER6s+H7p4itk+03QCa4sbxI3lNdseQYCuxzgsuNK8bTXChtLTjpJz6NmXKA73qLa3rCA== + dependencies: + asn1js "^3.0.5" + pvtsutils "^1.3.2" + tslib "^2.4.0" + +"@peculiar/json-schema@^1.1.12": + version "1.1.12" + resolved "https://registry.yarnpkg.com/@peculiar/json-schema/-/json-schema-1.1.12.tgz#fe61e85259e3b5ba5ad566cb62ca75b3d3cd5339" + integrity sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w== + dependencies: + tslib "^2.0.0" + +"@peculiar/webcrypto@^1.4.0": + version "1.4.3" + resolved "https://registry.yarnpkg.com/@peculiar/webcrypto/-/webcrypto-1.4.3.tgz#078b3e8f598e847b78683dc3ba65feb5029b93a7" + integrity sha512-VtaY4spKTdN5LjJ04im/d/joXuvLbQdgy5Z4DXF4MFZhQ+MTrejbNMkfZBp1Bs3O5+bFqnJgyGdPuZQflvIa5A== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + pvtsutils "^1.3.2" + tslib "^2.5.0" + webcrypto-core "^1.7.7" + +"@repeaterjs/repeater@3.0.4", "@repeaterjs/repeater@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@repeaterjs/repeater/-/repeater-3.0.4.tgz#a04d63f4d1bf5540a41b01a921c9a7fddc3bd1ca" + integrity sha512-AW8PKd6iX3vAZ0vA43nOUOnbq/X5ihgU+mSXXqunMkeQADGiqw/PY0JNeYtD5sr0PAy51YPgAPbDoeapv9r8WA== + +"@scure/base@~1.1.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@scure/base/-/base-1.1.1.tgz#ebb651ee52ff84f420097055f4bf46cfba403938" + integrity sha512-ZxOhsSyxYwLJj3pLZCefNitxsj093tb2vq90mp2txoYeBqbcjDjqFhyM8eUjq/uFm6zJ+mUuqxlS2FkuSY1MTA== + +"@scure/bip39@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@scure/bip39/-/bip39-1.1.0.tgz#92f11d095bae025f166bef3defcc5bf4945d419a" + integrity sha512-pwrPOS16VeTKg98dYXQyIjJEcWfz7/1YJIwxUEPFfQPtc86Ym/1sVgQ2RLoD43AazMk2l/unK4ITySSpW2+82w== + dependencies: + "@noble/hashes" "~1.1.1" + "@scure/base" "~1.1.0" + +"@sinclair/typebox@^0.24.1": + version "0.24.51" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.24.51.tgz#645f33fe4e02defe26f2f5c0410e1c094eac7f5f" + integrity sha512-1P1OROm/rdubP5aFDSZQILU0vrLCJ4fvHt6EoqHEM+2D/G5MK3bIaymUKLit8Js9gbns5UyJnkP/TZROLw4tUA== + +"@sinonjs/commons@^1.7.0": + version "1.8.4" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.4.tgz#d1f2d80f1bd0f2520873f161588bd9b7f8567120" + integrity sha512-RpmQdHVo8hCEHDVpO39zToS9jOhR6nw+/lQAzRNq9ErrGV9IeHM71XCn68svVl/euFeVW6BWX4p35gkhbOcSIQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^9.1.2": + version "9.1.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" + integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@tootallnate/once@2": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf" + integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A== + +"@tsconfig/node10@^1.0.7": + version "1.0.9" + resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" + integrity sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA== + +"@tsconfig/node12@^1.0.7": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@tsconfig/node12/-/node12-1.0.11.tgz#ee3def1f27d9ed66dac6e46a295cffb0152e058d" + integrity sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag== + +"@tsconfig/node14@^1.0.0": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node14/-/node14-1.0.3.tgz#e4386316284f00b98435bf40f72f75a09dabf6c1" + integrity sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow== + +"@tsconfig/node16@^1.0.2": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e" + integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ== + +"@types/babel__core@^7.1.14": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.18.2.tgz#235bf339d17185bdec25e024ca19cce257cc7309" + integrity sha512-FcFaxOr2V5KZCviw1TnutEMVUVsGt4D2hP1TAfXZAMKuHYW3xQhe3jTxNPWutgCJ3/X1c5yX8ZoGVEItxKbwBg== + dependencies: + "@babel/types" "^7.3.0" + +"@types/graceful-fs@^4.1.3": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@28.1.8": + version "28.1.8" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-28.1.8.tgz#6936409f3c9724ea431efd412ea0238a0f03b09b" + integrity sha512-8TJkV++s7B6XqnDrzR1m/TT0A0h948Pnl/097veySPN67VRAgQ4gZ7n2KfJo2rVq6njQjdxU3GCCyDvAeuHoiw== + dependencies: + expect "^28.0.0" + pretty-format "^28.0.0" + +"@types/js-yaml@^4.0.0": + version "4.0.5" + resolved "https://registry.yarnpkg.com/@types/js-yaml/-/js-yaml-4.0.5.tgz#738dd390a6ecc5442f35e7f03fa1431353f7e138" + integrity sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA== + +"@types/json-schema@^7.0.6", "@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/json-stable-stringify@^1.0.32": + version "1.0.34" + resolved "https://registry.yarnpkg.com/@types/json-stable-stringify/-/json-stable-stringify-1.0.34.tgz#c0fb25e4d957e0ee2e497c1f553d7f8bb668fd75" + integrity sha512-s2cfwagOQAS8o06TcwKfr9Wx11dNGbH2E9vJz1cqV+a/LOyhWNLUNd6JSRYNzvB4d29UuJX2M0Dj9vE1T8fRXw== + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/node@*": + version "18.11.9" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.9.tgz#02d013de7058cea16d36168ef2fc653464cfbad4" + integrity sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg== + +"@types/node@18.6.2": + version "18.6.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.6.2.tgz#ffc5f0f099d27887c8d9067b54e55090fcd54126" + integrity sha512-KcfkBq9H4PI6Vpu5B/KoPeuVDAbmi+2mDBqGPGUgoL7yXQtcWGu2vJWmmRkneWK3Rh0nIAX192Aa87AqKHYChQ== + +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/prettier@^2.1.5": + version "2.7.1" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/ws@^8.0.0": + version "8.5.4" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.4.tgz#bb10e36116d6e570dd943735f86c933c1587b8a5" + integrity sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg== + dependencies: + "@types/node" "*" + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.13" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.13.tgz#34cced675ca1b1d51fcf4d34c3c6f0fa142a5c76" + integrity sha512-9sWaruZk2JGxIQU+IhI1fhPYRcQ0UuTNuKuCW9bR5fp7qi2Llf7WDzNa17Cy7TKnh3cdxDOiyTu6gaLS0eDatg== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.36.2.tgz#6df092a20e0f9ec748b27f293a12cb39d0c1fe4d" + integrity sha512-OwwR8LRwSnI98tdc2z7mJYgY60gf7I9ZfGjN5EjCwwns9bdTuQfAXcsjSB2wSQ/TVNYSGKf4kzVXbNGaZvwiXw== + dependencies: + "@typescript-eslint/scope-manager" "5.36.2" + "@typescript-eslint/type-utils" "5.36.2" + "@typescript-eslint/utils" "5.36.2" + debug "^4.3.4" + functional-red-black-tree "^1.0.1" + ignore "^5.2.0" + regexpp "^3.2.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/parser@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.36.2.tgz#3ddf323d3ac85a25295a55fcb9c7a49ab4680ddd" + integrity sha512-qS/Kb0yzy8sR0idFspI9Z6+t7mqk/oRjnAYfewG+VN73opAUvmYL3oPIMmgOX6CnQS6gmVIXGshlb5RY/R22pA== + dependencies: + "@typescript-eslint/scope-manager" "5.36.2" + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/typescript-estree" "5.36.2" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.36.2.tgz#a75eb588a3879ae659514780831370642505d1cd" + integrity sha512-cNNP51L8SkIFSfce8B1NSUBTJTu2Ts4nWeWbFrdaqjmn9yKrAaJUBHkyTZc0cL06OFHpb+JZq5AUHROS398Orw== + dependencies: + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/visitor-keys" "5.36.2" + +"@typescript-eslint/type-utils@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.36.2.tgz#752373f4babf05e993adf2cd543a763632826391" + integrity sha512-rPQtS5rfijUWLouhy6UmyNquKDPhQjKsaKH0WnY6hl/07lasj8gPaH2UD8xWkePn6SC+jW2i9c2DZVDnL+Dokw== + dependencies: + "@typescript-eslint/typescript-estree" "5.36.2" + "@typescript-eslint/utils" "5.36.2" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.36.2.tgz#a5066e500ebcfcee36694186ccc57b955c05faf9" + integrity sha512-9OJSvvwuF1L5eS2EQgFUbECb99F0mwq501w0H0EkYULkhFa19Qq7WFbycdw1PexAc929asupbZcgjVIe6OK/XQ== + +"@typescript-eslint/typescript-estree@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.36.2.tgz#0c93418b36c53ba0bc34c61fe9405c4d1d8fe560" + integrity sha512-8fyH+RfbKc0mTspfuEjlfqA4YywcwQK2Amcf6TDOwaRLg7Vwdu4bZzyvBZp4bjt1RRjQ5MDnOZahxMrt2l5v9w== + dependencies: + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/visitor-keys" "5.36.2" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.36.2.tgz#b01a76f0ab244404c7aefc340c5015d5ce6da74c" + integrity sha512-uNcopWonEITX96v9pefk9DC1bWMdkweeSsewJ6GeC7L6j2t0SJywisgkr9wUTtXk90fi2Eljj90HSHm3OGdGRg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.36.2" + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/typescript-estree" "5.36.2" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.36.2.tgz#2f8f78da0a3bad3320d2ac24965791ac39dace5a" + integrity sha512-BtRvSR6dEdrNt7Net2/XDjbYKU5Ml6GqJgVfXT0CxTCJlnIqK7rAGreuWKMT2t8cFUT2Msv5oxw0GMRD7T5J7A== + dependencies: + "@typescript-eslint/types" "5.36.2" + eslint-visitor-keys "^3.3.0" + +"@whatwg-node/events@^0.0.2": + version "0.0.2" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.0.2.tgz#7b7107268d2982fc7b7aff5ee6803c64018f84dd" + integrity sha512-WKj/lI4QjnLuPrim0cfO7i+HsDSXHxNv1y0CrJhdntuO3hxWZmnXCwNDnwOvry11OjRin6cgWNF+j/9Pn8TN4w== + +"@whatwg-node/events@^0.0.3": + version "0.0.3" + resolved "https://registry.yarnpkg.com/@whatwg-node/events/-/events-0.0.3.tgz#13a65dd4f5893f55280f766e29ae48074927acad" + integrity sha512-IqnKIDWfXBJkvy/k6tzskWTc2NK3LcqHlb+KHGCrjOCH4jfQckRX0NAiIcC/vIqQkzLYw2r2CTSwAxcrtcD6lA== + +"@whatwg-node/fetch@^0.6.0": + version "0.6.9" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.6.9.tgz#6cc694cc0378e27b8dfed427c5bf633eda6972b9" + integrity sha512-JfrBCJdMu9n9OARc0e/hPHcD98/8Nz1CKSdGYDg6VbObDkV/Ys30xe5i/wPOatYbxuvatj1kfWeHf7iNX3i17w== + dependencies: + "@peculiar/webcrypto" "^1.4.0" + "@whatwg-node/node-fetch" "^0.0.5" + busboy "^1.6.0" + urlpattern-polyfill "^6.0.2" + web-streams-polyfill "^3.2.1" + +"@whatwg-node/fetch@^0.8.0", "@whatwg-node/fetch@^0.8.1", "@whatwg-node/fetch@^0.8.2": + version "0.8.8" + resolved "https://registry.yarnpkg.com/@whatwg-node/fetch/-/fetch-0.8.8.tgz#48c6ad0c6b7951a73e812f09dd22d75e9fa18cae" + integrity sha512-CdcjGC2vdKhc13KKxgsc6/616BQ7ooDIgPeTuAiE8qfCnS0mGzcfCOoZXypQSz73nxI+GWc7ZReIAVhxoE1KCg== + dependencies: + "@peculiar/webcrypto" "^1.4.0" + "@whatwg-node/node-fetch" "^0.3.6" + busboy "^1.6.0" + urlpattern-polyfill "^8.0.0" + web-streams-polyfill "^3.2.1" + +"@whatwg-node/node-fetch@^0.0.5": + version "0.0.5" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.0.5.tgz#bebf18891088e5e2fc449dea8d1bc94af5ec38df" + integrity sha512-hbccmaSZaItdsRuBKBEEhLoO+5oXJPxiyd0kG2xXd0Dh3Rt+vZn4pADHxuSiSHLd9CM+S2z4+IxlEGbWUgiz9g== + dependencies: + "@whatwg-node/events" "^0.0.2" + busboy "^1.6.0" + tslib "^2.3.1" + +"@whatwg-node/node-fetch@^0.3.6": + version "0.3.6" + resolved "https://registry.yarnpkg.com/@whatwg-node/node-fetch/-/node-fetch-0.3.6.tgz#e28816955f359916e2d830b68a64493124faa6d0" + integrity sha512-w9wKgDO4C95qnXZRwZTfCmLWqyRnooGjcIwG0wADWjw9/HN0p7dtvtgSvItZtUyNteEvgTrd8QojNEqV6DAGTA== + dependencies: + "@whatwg-node/events" "^0.0.3" + busboy "^1.6.0" + fast-querystring "^1.1.1" + fast-url-parser "^1.1.3" + tslib "^2.3.1" + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn-walk@^8.1.1: + version "8.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" + integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== + +acorn@^8.4.1, acorn@^8.8.0: + version "8.8.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.1.tgz#0a3f9cbecc4ec3bea6f0a80b66ae8dd2da250b73" + integrity sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA== + +agent-base@6: + version "6.0.2" + resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" + integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== + dependencies: + debug "4" + +aggregate-error@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== + dependencies: + clean-stack "^2.0.0" + indent-string "^4.0.0" + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1, ansi-escapes@^4.3.0: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@^3.0.3, anymatch@~3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +arg@^4.1.0: + version "4.1.3" + resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089" + integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA== + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-includes@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.1" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz#ffc6576a7ca3efc2f46a143b9d1dda9b4b3cf5e2" + integrity sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.20.4" + es-shim-unscopables "^1.0.0" + +asap@~2.0.3: + version "2.0.6" + resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46" + integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== + +asn1js@^3.0.1, asn1js@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/asn1js/-/asn1js-3.0.5.tgz#5ea36820443dbefb51cc7f88a2ebb5b462114f38" + integrity sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ== + dependencies: + pvtsutils "^1.3.2" + pvutils "^1.1.3" + tslib "^2.4.0" + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== + +auto-bind@~4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/auto-bind/-/auto-bind-4.0.0.tgz#e3589fc6c2da8f7ca43ba9f84fa52a744fc997fb" + integrity sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ== + +axios@0.27.2: + version "0.27.2" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.27.2.tgz#207658cc8621606e586c85db4b41a750e756d972" + integrity sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ== + dependencies: + follow-redirects "^1.14.9" + form-data "^4.0.0" + +babel-jest@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-28.1.3.tgz#c1187258197c099072156a0a121c11ee1e3917d5" + integrity sha512-epUaPOEWMk3cWX0M/sPvCHHCe9fMFAa/9hXEgKP8nFfNl/jlGkE9ucq9NqkZGXLDduCJYS0UvSlPUwC0S+rH6Q== + dependencies: + "@jest/transform" "^28.1.3" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^28.1.3" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-28.1.3.tgz#1952c4d0ea50f2d6d794353762278d1d8cca3fbe" + integrity sha512-Ys3tUKAmfnkRUpPdpa98eYrAR0nV+sSFUZZEGuQ2EbFd1y4SOLtD5QDNHAq+bb9a+bbXvYQC4b+ID/THIMcU6Q== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.1.14" + "@types/babel__traverse" "^7.0.6" + +babel-plugin-syntax-trailing-function-commas@^7.0.0-beta.0: + version "7.0.0-beta.0" + resolved "https://registry.yarnpkg.com/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-7.0.0-beta.0.tgz#aa213c1435e2bffeb6fca842287ef534ad05d5cf" + integrity sha512-Xj9XuRuz3nTSbaTXWv3itLOcxyF4oPD8douBBmj7U9BBC6nEBYfyOJYQMf/8PJAFotC62UY5dFfIGEPr7WswzQ== + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-fbjs@^3.4.0: + version "3.4.0" + resolved "https://registry.yarnpkg.com/babel-preset-fbjs/-/babel-preset-fbjs-3.4.0.tgz#38a14e5a7a3b285a3f3a86552d650dca5cf6111c" + integrity sha512-9ywCsCvo1ojrw0b+XYk7aFvTH6D9064t0RIL1rtMf3nsa02Xw41MS7sZw216Im35xj/UY0PDBQsa1brUDDF1Ow== + dependencies: + "@babel/plugin-proposal-class-properties" "^7.0.0" + "@babel/plugin-proposal-object-rest-spread" "^7.0.0" + "@babel/plugin-syntax-class-properties" "^7.0.0" + "@babel/plugin-syntax-flow" "^7.0.0" + "@babel/plugin-syntax-jsx" "^7.0.0" + "@babel/plugin-syntax-object-rest-spread" "^7.0.0" + "@babel/plugin-transform-arrow-functions" "^7.0.0" + "@babel/plugin-transform-block-scoped-functions" "^7.0.0" + "@babel/plugin-transform-block-scoping" "^7.0.0" + "@babel/plugin-transform-classes" "^7.0.0" + "@babel/plugin-transform-computed-properties" "^7.0.0" + "@babel/plugin-transform-destructuring" "^7.0.0" + "@babel/plugin-transform-flow-strip-types" "^7.0.0" + "@babel/plugin-transform-for-of" "^7.0.0" + "@babel/plugin-transform-function-name" "^7.0.0" + "@babel/plugin-transform-literals" "^7.0.0" + "@babel/plugin-transform-member-expression-literals" "^7.0.0" + "@babel/plugin-transform-modules-commonjs" "^7.0.0" + "@babel/plugin-transform-object-super" "^7.0.0" + "@babel/plugin-transform-parameters" "^7.0.0" + "@babel/plugin-transform-property-literals" "^7.0.0" + "@babel/plugin-transform-react-display-name" "^7.0.0" + "@babel/plugin-transform-react-jsx" "^7.0.0" + "@babel/plugin-transform-shorthand-properties" "^7.0.0" + "@babel/plugin-transform-spread" "^7.0.0" + "@babel/plugin-transform-template-literals" "^7.0.0" + babel-plugin-syntax-trailing-function-commas "^7.0.0-beta.0" + +babel-preset-jest@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-28.1.3.tgz#5dfc20b99abed5db994406c2b9ab94c73aaa419d" + integrity sha512-L+fupJvlWAHbQfn74coNX3zf60LXMJsezNvvx8eIh7iOR1luJ1poxYgQk1F8PYtNq/6QODDHCqsSnTFSWC491A== + dependencies: + babel-plugin-jest-hoist "^28.1.3" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + +binary-extensions@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== + +bl@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-4.1.0.tgz#451535264182bec2fbbc83a62ab98cf11d9f7b3a" + integrity sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w== + dependencies: + buffer "^5.5.0" + inherits "^2.0.4" + readable-stream "^3.4.0" + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.2, braces@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browserslist@^4.21.3: + version "4.21.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== + dependencies: + caniuse-lite "^1.0.30001400" + electron-to-chromium "^1.4.251" + node-releases "^2.0.6" + update-browserslist-db "^1.0.9" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +buffer@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + +bundle-require@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bundle-require/-/bundle-require-3.1.2.tgz#1374a7bdcb8b330a7ccc862ccbf7c137cc43ad27" + integrity sha512-Of6l6JBAxiyQ5axFxUM6dYeP/W7X2Sozeo/4EYB9sJhL+dqL7TKjg+shwxp6jlu/6ZSERfsYtIpSJ1/x3XkAEA== + dependencies: + load-tsconfig "^0.2.0" + +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + +cac@^6.7.12: + version "6.7.14" + resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" + integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +call-me-maybe@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.2.tgz#03f964f19522ba643b1b0693acb9152fe2074baa" + integrity sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ== + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camel-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/camel-case/-/camel-case-4.1.2.tgz#9728072a954f805228225a6deea6b38461e1bd5a" + integrity sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw== + dependencies: + pascal-case "^3.1.2" + tslib "^2.0.3" + +camelcase@^5.0.0, camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0, camelcase@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30001400: + version "1.0.30001430" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001430.tgz#638a8ae00b5a8a97e66ff43733b2701f81b101fa" + integrity sha512-IB1BXTZKPDVPM7cnV4iaKaHxckvdr/3xtctB3f7Hmenx3qYBhGtTZ//7EllK66aKXW98Lx0+7Yr0kxBtIt3tzg== + +capital-case@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" + integrity sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + upper-case-first "^2.0.2" + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +change-case-all@1.0.14: + version "1.0.14" + resolved "https://registry.yarnpkg.com/change-case-all/-/change-case-all-1.0.14.tgz#bac04da08ad143278d0ac3dda7eccd39280bfba1" + integrity sha512-CWVm2uT7dmSHdO/z1CXT/n47mWonyypzBbuCy5tN7uMg22BsfkhwT6oHmFCAk+gL1LOOxhdbB9SZz3J1KTY3gA== + dependencies: + change-case "^4.1.2" + is-lower-case "^2.0.2" + is-upper-case "^2.0.2" + lower-case "^2.0.2" + lower-case-first "^2.0.2" + sponge-case "^1.0.1" + swap-case "^2.0.2" + title-case "^3.0.3" + upper-case "^2.0.2" + upper-case-first "^2.0.2" + +change-case-all@1.0.15: + version "1.0.15" + resolved "https://registry.yarnpkg.com/change-case-all/-/change-case-all-1.0.15.tgz#de29393167fc101d646cd76b0ef23e27d09756ad" + integrity sha512-3+GIFhk3sNuvFAJKU46o26OdzudQlPNBCu1ZQi3cMeMHhty1bhDxu2WrEilVNYaGvqUtR1VSigFcJOiS13dRhQ== + dependencies: + change-case "^4.1.2" + is-lower-case "^2.0.2" + is-upper-case "^2.0.2" + lower-case "^2.0.2" + lower-case-first "^2.0.2" + sponge-case "^1.0.1" + swap-case "^2.0.2" + title-case "^3.0.3" + upper-case "^2.0.2" + upper-case-first "^2.0.2" + +change-case@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/change-case/-/change-case-4.1.2.tgz#fedfc5f136045e2398c0410ee441f95704641e12" + integrity sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A== + dependencies: + camel-case "^4.1.2" + capital-case "^1.0.4" + constant-case "^3.0.4" + dot-case "^3.0.4" + header-case "^2.0.4" + no-case "^3.0.4" + param-case "^3.0.4" + pascal-case "^3.1.2" + path-case "^3.0.4" + sentence-case "^3.0.4" + snake-case "^3.0.4" + tslib "^2.0.3" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +chardet@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" + integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== + +chokidar@^3.5.1, chokidar@^3.5.2: + version "3.5.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +ci-info@^3.2.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.5.0.tgz#bfac2a29263de4c829d806b1ab478e35091e171f" + integrity sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +clean-stack@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== + +cli-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== + dependencies: + restore-cursor "^3.1.0" + +cli-spinners@^2.5.0: + version "2.8.0" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.8.0.tgz#e97a3e2bd00e6d85aa0c13d7f9e3ce236f7787fc" + integrity sha512-/eG5sJcvEIwxcdYM86k5tPwn0MUzkX5YY3eImTGpJOZgVe4SdTMY14vQpcxgBzJ0wXwAYrS8E+c3uHeK4JNyzQ== + +cli-truncate@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== + dependencies: + slice-ansi "^3.0.0" + string-width "^4.2.0" + +cli-width@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" + integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== + +cliui@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^6.2.0" + +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg== + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colorette@^2.0.16: + version "2.0.20" + resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.20.tgz#9eb793e6833067f7235902fcd3b09917a000a95a" + integrity sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w== + +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +commander@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + +commander@^9.3.0: + version "9.4.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-9.4.1.tgz#d1dd8f2ce6faf93147295c0df13c7c21141cfbdd" + integrity sha512-5EEkTNyHNGFPD2H+c/dXXfQZYa/scCKasxWcXJaWnNJ99pnQN9Vnmqow+p+PlFPE63Q6mThaZws1T+HxfpgtPw== + +common-tags@1.8.2: + version "1.8.2" + resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.2.tgz#94ebb3c076d26032745fd54face7f688ef5ac9c6" + integrity sha512-gk/Z852D2Wtb//0I+kRFNKKE9dIIVirjoqPoA1wJU+XePVXZfGeBpk45+A1rKO4Q43prqWBNY/MiIeRLbPWUaA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +confusing-browser-globals@^1.0.10: + version "1.0.11" + resolved "https://registry.yarnpkg.com/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz#ae40e9b57cdd3915408a2805ebd3a5585608dc81" + integrity sha512-JsPKdmh8ZkmnHxDk55FZ1TqVLvEQTvoByJZRN9jzI0UjxK/QgAmsphz7PGtqgPieQZ/CQcHWXCR7ATDNhGe+YA== + +constant-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/constant-case/-/constant-case-3.0.4.tgz#3b84a9aeaf4cf31ec45e6bf5de91bdfb0589faf1" + integrity sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + upper-case "^2.0.2" + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +cosmiconfig-typescript-loader@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-4.3.0.tgz#c4259ce474c9df0f32274ed162c0447c951ef073" + integrity sha512-NTxV1MFfZDLPiBMjxbHRwSh5LaLcPMwNdCutmnHJCKoVnlvldPWlllonKwrsRJ5pYZBIBGRWWU2tfvzxgeSW5Q== + +cosmiconfig@8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.0.0.tgz#e9feae014eab580f858f8a0288f38997a7bebe97" + integrity sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ== + dependencies: + import-fresh "^3.2.1" + js-yaml "^4.1.0" + parse-json "^5.0.0" + path-type "^4.0.0" + +cosmiconfig@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +create-require@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" + integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== + +cross-fetch@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" + integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + dependencies: + node-fetch "2.6.7" + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +dataloader@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-2.2.2.tgz#216dc509b5abe39d43a9b9d97e6e5e473dfbe3e0" + integrity sha512-8YnDaaf7N3k/q5HnTJVuzSyLETjoZjVmHc4AeKAzOvKHEFQKcn64OKBfzHYtE9zGjctNM7V9I0MfnUVLpi7M5g== + +debounce@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5" + integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== + +debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +decamelize@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +defaults@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== + dependencies: + clone "^1.0.2" + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== + +dependency-graph@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.11.0.tgz#ac0ce7ed68a54da22165a85e97a01d53f5eb2e27" + integrity sha512-JeMq7fEshyepOWDfcfHK06N3MhyPhz++vtqWhMT5O9A3K42rdsEDpfdVqjaqaAhsw6a+ZqeDvQVtD0hFHQWrzg== + +detect-indent@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" + integrity sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-28.1.1.tgz#9989dc731266dc2903457a70e996f3a041913ac6" + integrity sha512-FU0iFaH/E23a+a718l8Qa/19bF9p06kgE0KipMOMadwa3SjnaElKzPaUC0vnibs6/B/9ni97s61mcejk8W1fQw== + +diff@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +dot-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/dot-case/-/dot-case-3.0.4.tgz#9b2b670d00a431667a8a75ba29cd1b98809ce751" + integrity sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +dotenv@16.0.2: + version "16.0.2" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.2.tgz#0b0f8652c016a3858ef795024508cddc4bffc5bf" + integrity sha512-JvpYKUmzQhYoIFgK2MOnF3bciIZoItIIoryihy0rIA+H4Jy0FmgyKYAHCTN98P5ybGSJcIFbh6QKeJdtZd1qhA== + +dotenv@^16.0.0: + version "16.0.3" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.3.tgz#115aec42bac5053db3c456db30cc243a5a836a07" + integrity sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ== + +dset@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/dset/-/dset-3.1.2.tgz#89c436ca6450398396dc6538ea00abc0c54cd45a" + integrity sha512-g/M9sqy3oHe477Ar4voQxWtaPIFw1jTdKZuomOjhCcBx9nHUNn0pu6NopuFFrTh/TRZIKEj+76vLWFu9BNKk+Q== + +electron-to-chromium@^1.4.251: + version "1.4.284" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" + integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== + +emittery@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +enhanced-resolve@^5.0.0: + version "5.10.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.5, es-abstract@^1.20.4: + version "1.20.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.3" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.7" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +esbuild-android-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-android-64/-/esbuild-android-64-0.15.13.tgz#5f25864055dbd62e250f360b38b4c382224063af" + integrity sha512-yRorukXBlokwTip+Sy4MYskLhJsO0Kn0/Fj43s1krVblfwP+hMD37a4Wmg139GEsMLl+vh8WXp2mq/cTA9J97g== + +esbuild-android-arm64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-android-arm64/-/esbuild-android-arm64-0.15.13.tgz#d8820f999314efbe8e0f050653a99ff2da632b0f" + integrity sha512-TKzyymLD6PiVeyYa4c5wdPw87BeAiTXNtK6amWUcXZxkV51gOk5u5qzmDaYSwiWeecSNHamFsaFjLoi32QR5/w== + +esbuild-darwin-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-darwin-64/-/esbuild-darwin-64-0.15.13.tgz#99ae7fdaa43947b06cd9d1a1c3c2c9f245d81fd0" + integrity sha512-WAx7c2DaOS6CrRcoYCgXgkXDliLnFv3pQLV6GeW1YcGEZq2Gnl8s9Pg7ahValZkpOa0iE/ojRVQ87sbUhF1Cbg== + +esbuild-darwin-arm64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.15.13.tgz#bafa1814354ad1a47adcad73de416130ef7f55e3" + integrity sha512-U6jFsPfSSxC3V1CLiQqwvDuj3GGrtQNB3P3nNC3+q99EKf94UGpsG9l4CQ83zBs1NHrk1rtCSYT0+KfK5LsD8A== + +esbuild-freebsd-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-freebsd-64/-/esbuild-freebsd-64-0.15.13.tgz#84ef85535c5cc38b627d1c5115623b088d1de161" + integrity sha512-whItJgDiOXaDG/idy75qqevIpZjnReZkMGCgQaBWZuKHoElDJC1rh7MpoUgupMcdfOd+PgdEwNQW9DAE6i8wyA== + +esbuild-freebsd-arm64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.15.13.tgz#033f21de434ec8e0c478054b119af8056763c2d8" + integrity sha512-6pCSWt8mLUbPtygv7cufV0sZLeylaMwS5Fznj6Rsx9G2AJJsAjQ9ifA+0rQEIg7DwJmi9it+WjzNTEAzzdoM3Q== + +esbuild-linux-32@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-32/-/esbuild-linux-32-0.15.13.tgz#54290ea8035cba0faf1791ce9ae6693005512535" + integrity sha512-VbZdWOEdrJiYApm2kkxoTOgsoCO1krBZ3quHdYk3g3ivWaMwNIVPIfEE0f0XQQ0u5pJtBsnk2/7OPiCFIPOe/w== + +esbuild-linux-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-64/-/esbuild-linux-64-0.15.13.tgz#4264249281ea388ead948614b57fb1ddf7779a2c" + integrity sha512-rXmnArVNio6yANSqDQlIO4WiP+Cv7+9EuAHNnag7rByAqFVuRusLbGi2697A5dFPNXoO//IiogVwi3AdcfPC6A== + +esbuild-linux-arm64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-arm64/-/esbuild-linux-arm64-0.15.13.tgz#9323c333924f97a02bdd2ae8912b36298acb312d" + integrity sha512-alEMGU4Z+d17U7KQQw2IV8tQycO6T+rOrgW8OS22Ua25x6kHxoG6Ngry6Aq6uranC+pNWNMB6aHFPh7aTQdORQ== + +esbuild-linux-arm@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-arm/-/esbuild-linux-arm-0.15.13.tgz#b407f47b3ae721fe4e00e19e9f19289bef87a111" + integrity sha512-Ac6LpfmJO8WhCMQmO253xX2IU2B3wPDbl4IvR0hnqcPrdfCaUa2j/lLMGTjmQ4W5JsJIdHEdW12dG8lFS0MbxQ== + +esbuild-linux-mips64le@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.15.13.tgz#bdf905aae5c0bcaa8f83567fe4c4c1bdc1f14447" + integrity sha512-47PgmyYEu+yN5rD/MbwS6DxP2FSGPo4Uxg5LwIdxTiyGC2XKwHhHyW7YYEDlSuXLQXEdTO7mYe8zQ74czP7W8A== + +esbuild-linux-ppc64le@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.15.13.tgz#2911eae1c90ff58a3bd3259cb557235df25aa3b4" + integrity sha512-z6n28h2+PC1Ayle9DjKoBRcx/4cxHoOa2e689e2aDJSaKug3jXcQw7mM+GLg+9ydYoNzj8QxNL8ihOv/OnezhA== + +esbuild-linux-riscv64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.15.13.tgz#1837c660be12b1d20d2a29c7189ea703f93e9265" + integrity sha512-+Lu4zuuXuQhgLUGyZloWCqTslcCAjMZH1k3Xc9MSEJEpEFdpsSU0sRDXAnk18FKOfEjhu4YMGaykx9xjtpA6ow== + +esbuild-linux-s390x@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-linux-s390x/-/esbuild-linux-s390x-0.15.13.tgz#d52880ece229d1bd10b2d936b792914ffb07c7fc" + integrity sha512-BMeXRljruf7J0TMxD5CIXS65y7puiZkAh+s4XFV9qy16SxOuMhxhVIXYLnbdfLrsYGFzx7U9mcdpFWkkvy/Uag== + +esbuild-netbsd-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-netbsd-64/-/esbuild-netbsd-64-0.15.13.tgz#de14da46f1d20352b43e15d97a80a8788275e6ed" + integrity sha512-EHj9QZOTel581JPj7UO3xYbltFTYnHy+SIqJVq6yd3KkCrsHRbapiPb0Lx3EOOtybBEE9EyqbmfW1NlSDsSzvQ== + +esbuild-openbsd-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-openbsd-64/-/esbuild-openbsd-64-0.15.13.tgz#45e8a5fd74d92ad8f732c43582369c7990f5a0ac" + integrity sha512-nkuDlIjF/sfUhfx8SKq0+U+Fgx5K9JcPq1mUodnxI0x4kBdCv46rOGWbuJ6eof2n3wdoCLccOoJAbg9ba/bT2w== + +esbuild-sunos-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-sunos-64/-/esbuild-sunos-64-0.15.13.tgz#f646ac3da7aac521ee0fdbc192750c87da697806" + integrity sha512-jVeu2GfxZQ++6lRdY43CS0Tm/r4WuQQ0Pdsrxbw+aOrHQPHV0+LNOLnvbN28M7BSUGnJnHkHm2HozGgNGyeIRw== + +esbuild-windows-32@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-windows-32/-/esbuild-windows-32-0.15.13.tgz#fb4fe77c7591418880b3c9b5900adc4c094f2401" + integrity sha512-XoF2iBf0wnqo16SDq+aDGi/+QbaLFpkiRarPVssMh9KYbFNCqPLlGAWwDvxEVz+ywX6Si37J2AKm+AXq1kC0JA== + +esbuild-windows-64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-windows-64/-/esbuild-windows-64-0.15.13.tgz#1fca8c654392c0c31bdaaed168becfea80e20660" + integrity sha512-Et6htEfGycjDrtqb2ng6nT+baesZPYQIW+HUEHK4D1ncggNrDNk3yoboYQ5KtiVrw/JaDMNttz8rrPubV/fvPQ== + +esbuild-windows-arm64@0.15.13: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild-windows-arm64/-/esbuild-windows-arm64-0.15.13.tgz#4ffd01b6b2888603f1584a2fe96b1f6a6f2b3dd8" + integrity sha512-3bv7tqntThQC9SWLRouMDmZnlOukBhOCTlkzNqzGCmrkCJI7io5LLjwJBOVY6kOUlIvdxbooNZwjtBvj+7uuVg== + +esbuild@^0.15.1: + version "0.15.13" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.15.13.tgz#7293480038feb2bafa91d3f6a20edab3ba6c108a" + integrity sha512-Cu3SC84oyzzhrK/YyN4iEVy2jZu5t2fz66HEOShHURcjSkOSAVL8C/gfUT+lDJxkVHpg8GZ10DD0rMHRPqMFaQ== + optionalDependencies: + "@esbuild/android-arm" "0.15.13" + "@esbuild/linux-loong64" "0.15.13" + esbuild-android-64 "0.15.13" + esbuild-android-arm64 "0.15.13" + esbuild-darwin-64 "0.15.13" + esbuild-darwin-arm64 "0.15.13" + esbuild-freebsd-64 "0.15.13" + esbuild-freebsd-arm64 "0.15.13" + esbuild-linux-32 "0.15.13" + esbuild-linux-64 "0.15.13" + esbuild-linux-arm "0.15.13" + esbuild-linux-arm64 "0.15.13" + esbuild-linux-mips64le "0.15.13" + esbuild-linux-ppc64le "0.15.13" + esbuild-linux-riscv64 "0.15.13" + esbuild-linux-s390x "0.15.13" + esbuild-netbsd-64 "0.15.13" + esbuild-openbsd-64 "0.15.13" + esbuild-sunos-64 "0.15.13" + esbuild-windows-32 "0.15.13" + esbuild-windows-64 "0.15.13" + esbuild-windows-arm64 "0.15.13" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-airbnb-base@15.0.0, eslint-config-airbnb-base@^15.0.0: + version "15.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-15.0.0.tgz#6b09add90ac79c2f8d723a2580e07f3925afd236" + integrity sha512-xaX3z4ZZIcFLvh2oUNvcX5oEofXda7giYmuplVxoOg5A7EXJMrUyqRgR+mhDhPK8LZ4PttFOBvCYDbX3sUoUig== + dependencies: + confusing-browser-globals "^1.0.10" + object.assign "^4.1.2" + object.entries "^1.1.5" + semver "^6.3.0" + +eslint-config-airbnb-typescript@17.0.0: + version "17.0.0" + resolved "https://registry.yarnpkg.com/eslint-config-airbnb-typescript/-/eslint-config-airbnb-typescript-17.0.0.tgz#360dbcf810b26bbcf2ff716198465775f1c49a07" + integrity sha512-elNiuzD0kPAPTXjFWg+lE24nMdHMtuxgYoD30OyMD6yrW1AhFZPAg27VX7d3tzOErw+dgJTNWfRSDqEcXb4V0g== + dependencies: + eslint-config-airbnb-base "^15.0.0" + +eslint-config-prettier@8.5.0: + version "8.5.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz#5a81680ec934beca02c7b1a61cf8ca34b66feab1" + integrity sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q== + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-module-utils@^2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-import@2.26.0: + version "2.26.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint@8.23.0: + version "8.23.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.23.0.tgz#a184918d288820179c6041bb3ddcc99ce6eea040" + integrity sha512-pBG/XOn0MsJcKcTRLr27S5HpzQo4kLr+HjLQIyK4EiCsijDl/TB+h5uEuJU6bQ8Edvwz1XWOjpaP2qgnXGpTcA== + dependencies: + "@eslint/eslintrc" "^1.3.1" + "@humanwhocodes/config-array" "^0.10.4" + "@humanwhocodes/gitignore-to-minimatch" "^1.0.2" + "@humanwhocodes/module-importer" "^1.0.1" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.4.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + functional-red-black-tree "^1.0.1" + glob-parent "^6.0.1" + globals "^13.15.0" + globby "^11.1.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.4.0: + version "9.4.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^28.0.0, expect@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/expect/-/expect-28.1.3.tgz#90a7c1a124f1824133dd4533cce2d2bdcb6603ec" + integrity sha512-eEh0xn8HlsuOBxFgIss+2mX85VAS4Qy3OSkjV7rlBWljtA4oWH37glVGyOZSZvErDT/yBywZdPGwCXuTvSG85g== + dependencies: + "@jest/expect-utils" "^28.1.3" + jest-get-type "^28.0.2" + jest-matcher-utils "^28.1.3" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + +external-editor@^3.0.3: + version "3.1.0" + resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" + integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== + dependencies: + chardet "^0.7.0" + iconv-lite "^0.4.24" + tmp "^0.0.33" + +extract-files@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-11.0.0.tgz#b72d428712f787eef1f5193aff8ab5351ca8469a" + integrity sha512-FuoE1qtbJ4bBVvv94CC7s0oTnKUGvQs+Rjf1L2SJFfS+HTVVjhPFtehPdQ0JiGPqVNfSSZvL5yzHHQq2Z4WNhQ== + +extract-files@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/extract-files/-/extract-files-9.0.0.tgz#8a7744f2437f81f5ed3250ed9f1550de902fe54a" + integrity sha512-CvdFfHkC95B4bBBk36hcEmvdR2awOdhhVUYH6S/zrVj3477zven/fJMYg7121h4T1xHZC+tetUpubpAhxwI7hQ== + +fast-decode-uri-component@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/fast-decode-uri-component/-/fast-decode-uri-component-1.0.1.tgz#46f8b6c22b30ff7a81357d4f59abfae938202543" + integrity sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fast-querystring@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/fast-querystring/-/fast-querystring-1.1.1.tgz#f4c56ef56b1a954880cfd8c01b83f9e1a3d3fda2" + integrity sha512-qR2r+e3HvhEFmpdHMv//U8FnFlnYjaC6QKDuaXALDkw2kvHO8WDjxH+f/rHGR4Me4pnk8p9JAkRNTjYHAKRn2Q== + dependencies: + fast-decode-uri-component "^1.0.1" + +fast-url-parser@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/fast-url-parser/-/fast-url-parser-1.1.3.tgz#f4af3ea9f34d8a271cf58ad2b3759f431f0b318d" + integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== + dependencies: + punycode "^1.3.2" + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +fb-watchman@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.2.tgz#e9524ee6b5c77e9e5001af0f85f3adbb8623255c" + integrity sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA== + dependencies: + bser "2.1.1" + +fbjs-css-vars@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz#216551136ae02fe255932c3ec8775f18e2c078b8" + integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ== + +fbjs@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/fbjs/-/fbjs-3.0.4.tgz#e1871c6bd3083bac71ff2da868ad5067d37716c6" + integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ== + dependencies: + cross-fetch "^3.1.5" + fbjs-css-vars "^1.0.0" + loose-envify "^1.0.0" + object-assign "^4.1.0" + promise "^7.1.1" + setimmediate "^1.0.5" + ua-parser-js "^0.7.30" + +figures@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +find-up@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== + dependencies: + locate-path "^6.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +follow-redirects@^1.14.9: + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== + +form-data@4.0.0, form-data@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" + integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + +fs-extra@^10.1.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2, fsevents@~2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.1, get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@7.1.6: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.1.1, glob@^7.1.3, glob@^7.1.4: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.15.0: + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globby@^11.0.3, globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + +graphql-config@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-4.5.0.tgz#257c2338950b8dce295a27f75c5f6c39f8f777b2" + integrity sha512-x6D0/cftpLUJ0Ch1e5sj1TZn6Wcxx4oMfmhaG9shM0DKajA9iR+j1z86GSTQ19fShbGvrSSvbIQsHku6aQ6BBw== + dependencies: + "@graphql-tools/graphql-file-loader" "^7.3.7" + "@graphql-tools/json-file-loader" "^7.3.7" + "@graphql-tools/load" "^7.5.5" + "@graphql-tools/merge" "^8.2.6" + "@graphql-tools/url-loader" "^7.9.7" + "@graphql-tools/utils" "^9.0.0" + cosmiconfig "8.0.0" + jiti "1.17.1" + minimatch "4.2.3" + string-env-interpolation "1.0.1" + tslib "^2.4.0" + +graphql-request@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-5.2.0.tgz#a05fb54a517d91bb2d7aefa17ade4523dc5ebdca" + integrity sha512-pLhKIvnMyBERL0dtFI3medKqWOz/RhHdcgbZ+hMMIb32mEPa5MJSzS4AuXxfI4sRAu6JVVk5tvXuGfCWl9JYWQ== + dependencies: + "@graphql-typed-document-node/core" "^3.1.1" + cross-fetch "^3.1.5" + extract-files "^9.0.0" + form-data "^3.0.0" + +graphql-request@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-6.0.0.tgz#9c8b6a0c341f289e049936d03cc9205300faae1c" + integrity sha512-2BmHTuglonjZvmNVw6ZzCfFlW/qkIPds0f+Qdi/Lvjsl3whJg2uvHmSvHnLWhUTEw6zcxPYAHiZoPvSVKOZ7Jw== + dependencies: + "@graphql-typed-document-node/core" "^3.2.0" + cross-fetch "^3.1.5" + +graphql-tag@^2.11.0: + version "2.12.6" + resolved "https://registry.yarnpkg.com/graphql-tag/-/graphql-tag-2.12.6.tgz#d441a569c1d2537ef10ca3d1633b48725329b5f1" + integrity sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg== + dependencies: + tslib "^2.1.0" + +graphql-ws@5.12.1: + version "5.12.1" + resolved "https://registry.yarnpkg.com/graphql-ws/-/graphql-ws-5.12.1.tgz#c62d5ac54dbd409cc6520b0b39de374b3d59d0dd" + integrity sha512-umt4f5NnMK46ChM2coO36PTFhHouBrK9stWWBczERguwYrGnPNxJ9dimU6IyOBfOkC6Izhkg4H8+F51W/8CYDg== + +graphql@^16.5.0: + version "16.6.0" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.6.0.tgz#c2dcffa4649db149f6282af726c8c83f1c7c5fdb" + integrity sha512-KPIBPDlW7NxrbT/eh4qPXz5FiFdL5UbaA0XUNz2Rp3Z3hqBSkbj0GVjwFDztsWVauZUWsbKHgMg++sk8UX0bkw== + +handlebars@^4.7.7: + version "4.7.7" + resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.7.7.tgz#9ce33416aad02dbd6c8fafa8240d5d98004945a1" + integrity sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA== + dependencies: + minimist "^1.2.5" + neo-async "^2.6.0" + source-map "^0.6.1" + wordwrap "^1.0.0" + optionalDependencies: + uglify-js "^3.1.4" + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +header-case@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/header-case/-/header-case-2.0.4.tgz#5a42e63b55177349cf405beb8d775acabb92c063" + integrity sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q== + dependencies: + capital-case "^1.0.4" + tslib "^2.0.3" + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +http-proxy-agent@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43" + integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w== + dependencies: + "@tootallnate/once" "2" + agent-base "6" + debug "4" + +https-proxy-agent@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" + integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA== + dependencies: + agent-base "6" + debug "4" + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +iconv-lite@^0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +immutable@~3.7.6: + version "3.7.6" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.7.6.tgz#13b4d3cb12befa15482a26fe1b2ebae640071e4b" + integrity sha512-AizQPcaofEtO11RZhPPHBOJRdo/20MKQF9mBLnVkBoyHi1/zXK8fzVdnEpSV9gxqtnh6Qomfp3F0xT5qP/vThw== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-from@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/import-from/-/import-from-4.0.0.tgz#2710b8d66817d232e16f4166e319248d3d5492e2" + integrity sha512-P9J71vT5nLlDeV8FHs5nNxaLbrpfAV5cF5srvbZfpwpcJoM/xZR3hiv+q+SAnuSmuGbXMWud063iIMx/V/EWZQ== + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +indent-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@^2.0.3, inherits@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inquirer@^8.0.0: + version "8.2.5" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-8.2.5.tgz#d8654a7542c35a9b9e069d27e2df4858784d54f8" + integrity sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ== + dependencies: + ansi-escapes "^4.2.1" + chalk "^4.1.1" + cli-cursor "^3.1.0" + cli-width "^3.0.0" + external-editor "^3.0.3" + figures "^3.0.0" + lodash "^4.17.21" + mute-stream "0.0.8" + ora "^5.4.1" + run-async "^2.4.0" + rxjs "^7.5.5" + string-width "^4.1.0" + strip-ansi "^6.0.0" + through "^2.3.6" + wrap-ansi "^7.0.0" + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +is-absolute@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-absolute/-/is-absolute-1.0.0.tgz#395e1ae84b11f26ad1795e73c17378e48a301576" + integrity sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA== + dependencies: + is-relative "^1.0.0" + is-windows "^1.0.1" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== + +is-core-module@^2.8.1, is-core-module@^2.9.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@4.0.3, is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-interactive@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-1.0.0.tgz#cea6e6ae5c870a7b0a0004070b7b587e0252912e" + integrity sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w== + +is-lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-lower-case/-/is-lower-case-2.0.2.tgz#1c0884d3012c841556243483aa5d522f47396d2a" + integrity sha512-bVcMJy4X5Og6VZfdOZstSexlEy20Sr0k/p/b2IlQJlfdKAQuMpiv5w2Ccxb8sKdRUNAG1PnHVHjFSdRDVS6NlQ== + dependencies: + tslib "^2.0.3" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-relative@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-relative/-/is-relative-1.0.0.tgz#a1bb6935ce8c5dba1e8b9754b9b2dcc020e2260d" + integrity sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA== + dependencies: + is-unc-path "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-unc-path@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-unc-path/-/is-unc-path-1.0.0.tgz#d731e8898ed090a12c352ad2eaed5095ad322c9d" + integrity sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ== + dependencies: + unc-path-regex "^0.1.2" + +is-unicode-supported@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7" + integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw== + +is-upper-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-upper-case/-/is-upper-case-2.0.2.tgz#f1105ced1fe4de906a5f39553e7d3803fd804649" + integrity sha512-44pxmxAvnnAOwBg4tHPnkfvgjPwbc5QIsSstNU+YcJ1ovxVzCWpSGosPJOZh/a1tdl81fbgnLc9LLv+x2ywbPQ== + dependencies: + tslib "^2.0.3" + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +is-windows@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isomorphic-ws@5.0.0, isomorphic-ws@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" + integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz#d10c8885c2125574e1c231cacadf955675e1ce3d" + integrity sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.5" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-28.1.3.tgz#d9aeee6792be3686c47cb988a8eaf82ff4238831" + integrity sha512-esaOfUWJXk2nfZt9SPyC8gA1kNfdKLkQWyzsMlqq8msYSlNKfmZxfRgZn4Cd4MGVUF+7v6dBs0d5TOAKa7iIiA== + dependencies: + execa "^5.0.0" + p-limit "^3.1.0" + +jest-circus@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-28.1.3.tgz#d14bd11cf8ee1a03d69902dc47b6bd4634ee00e4" + integrity sha512-cZ+eS5zc79MBwt+IhQhiEp0OeBddpc1n8MBo1nMB8A7oPMKEO+Sre+wHaLJexQUj9Ya/8NOBY0RESUgYjB6fow== + dependencies: + "@jest/environment" "^28.1.3" + "@jest/expect" "^28.1.3" + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + is-generator-fn "^2.0.0" + jest-each "^28.1.3" + jest-matcher-utils "^28.1.3" + jest-message-util "^28.1.3" + jest-runtime "^28.1.3" + jest-snapshot "^28.1.3" + jest-util "^28.1.3" + p-limit "^3.1.0" + pretty-format "^28.1.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-cli@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-28.1.3.tgz#558b33c577d06de55087b8448d373b9f654e46b2" + integrity sha512-roY3kvrv57Azn1yPgdTebPAXvdR2xfezaKKYzVxZ6It/5NCxzJym6tUI5P1zkdWhfUYkxEI9uZWcQdaFLo8mJQ== + dependencies: + "@jest/core" "^28.1.3" + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^28.1.3" + jest-util "^28.1.3" + jest-validate "^28.1.3" + prompts "^2.0.1" + yargs "^17.3.1" + +jest-config@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-28.1.3.tgz#e315e1f73df3cac31447eed8b8740a477392ec60" + integrity sha512-MG3INjByJ0J4AsNBm7T3hsuxKQqFIiRo/AUqb1q9LRKI5UU6Aar9JHbr9Ivn1TVwfUD9KirRoM/T6u8XlcQPHQ== + dependencies: + "@babel/core" "^7.11.6" + "@jest/test-sequencer" "^28.1.3" + "@jest/types" "^28.1.3" + babel-jest "^28.1.3" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-circus "^28.1.3" + jest-environment-node "^28.1.3" + jest-get-type "^28.0.2" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.3" + jest-runner "^28.1.3" + jest-util "^28.1.3" + jest-validate "^28.1.3" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^28.1.3" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-28.1.3.tgz#948a192d86f4e7a64c5264ad4da4877133d8792f" + integrity sha512-8RqP1B/OXzjjTWkqMX67iqgwBVJRgCyKD3L9nq+6ZqJMdvjE8RgHktqZ6jNrkdMT+dJuYNI3rhQpxaz7drJHfw== + dependencies: + chalk "^4.0.0" + diff-sequences "^28.1.1" + jest-get-type "^28.0.2" + pretty-format "^28.1.3" + +jest-docblock@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-28.1.1.tgz#6f515c3bf841516d82ecd57a62eed9204c2f42a8" + integrity sha512-3wayBVNiOYx0cwAbl9rwm5kKFP8yHH3d/fkEaL02NPTkDojPtheGB7HZSFY4wzX+DxyrvhXz0KSCVksmCknCuA== + dependencies: + detect-newline "^3.0.0" + +jest-each@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-28.1.3.tgz#bdd1516edbe2b1f3569cfdad9acd543040028f81" + integrity sha512-arT1z4sg2yABU5uogObVPvSlSMQlDA48owx07BDPAiasW0yYpYHYOo4HHLz9q0BVzDVU4hILFjzJw0So9aCL/g== + dependencies: + "@jest/types" "^28.1.3" + chalk "^4.0.0" + jest-get-type "^28.0.2" + jest-util "^28.1.3" + pretty-format "^28.1.3" + +jest-environment-node@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-28.1.3.tgz#7e74fe40eb645b9d56c0c4b70ca4357faa349be5" + integrity sha512-ugP6XOhEpjAEhGYvp5Xj989ns5cB1K6ZdjBYuS30umT4CQEETaxSiPcZ/E1kFktX4GkrcM4qu07IIlDYX1gp+A== + dependencies: + "@jest/environment" "^28.1.3" + "@jest/fake-timers" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + jest-mock "^28.1.3" + jest-util "^28.1.3" + +jest-get-type@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-28.0.2.tgz#34622e628e4fdcd793d46db8a242227901fcf203" + integrity sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA== + +jest-haste-map@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.3.tgz#abd5451129a38d9841049644f34b034308944e2b" + integrity sha512-3S+RQWDXccXDKSWnkHa/dPwt+2qwA8CJzR61w3FoYCvoo3Pn8tvGcysmMF0Bj0EX5RYvAI2EIvC57OmotfdtKA== + dependencies: + "@jest/types" "^28.1.3" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^28.0.2" + jest-util "^28.1.3" + jest-worker "^28.1.3" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-leak-detector@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-28.1.3.tgz#a6685d9b074be99e3adee816ce84fd30795e654d" + integrity sha512-WFVJhnQsiKtDEo5lG2mM0v40QWnBM+zMdHHyJs8AWZ7J0QZJS59MsyKeJHWhpBZBH32S48FOVvGyOFT1h0DlqA== + dependencies: + jest-get-type "^28.0.2" + pretty-format "^28.1.3" + +jest-matcher-utils@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-28.1.3.tgz#5a77f1c129dd5ba3b4d7fc20728806c78893146e" + integrity sha512-kQeJ7qHemKfbzKoGjHHrRKH6atgxMk8Enkk2iPQ3XwO6oE/KYD8lMYOziCkeSB9G4adPM4nR1DE8Tf5JeWH6Bw== + dependencies: + chalk "^4.0.0" + jest-diff "^28.1.3" + jest-get-type "^28.0.2" + pretty-format "^28.1.3" + +jest-message-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.3.tgz#232def7f2e333f1eecc90649b5b94b0055e7c43d" + integrity sha512-PFdn9Iewbt575zKPf1286Ht9EPoJmYT7P0kY+RibeYZ2XtOr53pDLEFoTWXbd1h4JiGiWpTBC84fc8xMXQMb7g== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.3" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.3" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-28.1.3.tgz#d4e9b1fc838bea595c77ab73672ebf513ab249da" + integrity sha512-o3J2jr6dMMWYVH4Lh/NKmDXdosrsJgi4AviS8oXLujcjpCMBb1FMsblDnOXKZKfSiHLxYub1eS0IHuRXsio9eA== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-28.1.3.tgz#8c65d7583460df7275c6ea2791901fa975c1fe66" + integrity sha512-qa0QO2Q0XzQoNPouMbCc7Bvtsem8eQgVPNkwn9LnS+R2n8DaVDPL/U1gngC0LTl1RYXJU0uJa2BMC2DbTfFrHA== + dependencies: + jest-regex-util "^28.0.2" + jest-snapshot "^28.1.3" + +jest-resolve@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-28.1.3.tgz#cfb36100341ddbb061ec781426b3c31eb51aa0a8" + integrity sha512-Z1W3tTjE6QaNI90qo/BJpfnvpxtaFTFw5CDgwpyE/Kz8U/06N1Hjf4ia9quUhCh39qIGWF1ZuxFiBiJQwSEYKQ== + dependencies: + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + jest-pnp-resolver "^1.2.2" + jest-util "^28.1.3" + jest-validate "^28.1.3" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-28.1.3.tgz#5eee25febd730b4713a2cdfd76bdd5557840f9a1" + integrity sha512-GkMw4D/0USd62OVO0oEgjn23TM+YJa2U2Wu5zz9xsQB1MxWKDOlrnykPxnMsN0tnJllfLPinHTka61u0QhaxBA== + dependencies: + "@jest/console" "^28.1.3" + "@jest/environment" "^28.1.3" + "@jest/test-result" "^28.1.3" + "@jest/transform" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.10.2" + graceful-fs "^4.2.9" + jest-docblock "^28.1.1" + jest-environment-node "^28.1.3" + jest-haste-map "^28.1.3" + jest-leak-detector "^28.1.3" + jest-message-util "^28.1.3" + jest-resolve "^28.1.3" + jest-runtime "^28.1.3" + jest-util "^28.1.3" + jest-watcher "^28.1.3" + jest-worker "^28.1.3" + p-limit "^3.1.0" + source-map-support "0.5.13" + +jest-runtime@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-28.1.3.tgz#a57643458235aa53e8ec7821949e728960d0605f" + integrity sha512-NU+881ScBQQLc1JHG5eJGU7Ui3kLKrmwCPPtYsJtBykixrM2OhVQlpMmFWJjMyDfdkGgBMNjXCGB/ebzsgNGQw== + dependencies: + "@jest/environment" "^28.1.3" + "@jest/fake-timers" "^28.1.3" + "@jest/globals" "^28.1.3" + "@jest/source-map" "^28.1.2" + "@jest/test-result" "^28.1.3" + "@jest/transform" "^28.1.3" + "@jest/types" "^28.1.3" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.3" + jest-message-util "^28.1.3" + jest-mock "^28.1.3" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.3" + jest-snapshot "^28.1.3" + jest-util "^28.1.3" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-snapshot@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-28.1.3.tgz#17467b3ab8ddb81e2f605db05583d69388fc0668" + integrity sha512-4lzMgtiNlc3DU/8lZfmqxN3AYD6GGLbl+72rdBpXvcV+whX7mDrREzkPdp2RnmfIiWBg1YbuFSkXduF2JcafJg== + dependencies: + "@babel/core" "^7.11.6" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.3.3" + "@jest/expect-utils" "^28.1.3" + "@jest/transform" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/babel__traverse" "^7.0.6" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^28.1.3" + graceful-fs "^4.2.9" + jest-diff "^28.1.3" + jest-get-type "^28.0.2" + jest-haste-map "^28.1.3" + jest-matcher-utils "^28.1.3" + jest-message-util "^28.1.3" + jest-util "^28.1.3" + natural-compare "^1.4.0" + pretty-format "^28.1.3" + semver "^7.3.5" + +jest-util@^28.0.0, jest-util@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.3.tgz#f4f932aa0074f0679943220ff9cbba7e497028b0" + integrity sha512-XdqfpHwpcSRko/C35uLYFM2emRAltIIKZiJ9eAmhjsj0CqZMa0p1ib0R5fWIqGhn1a103DebTbpqIaP1qCQ6tQ== + dependencies: + "@jest/types" "^28.1.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-28.1.3.tgz#e322267fd5e7c64cea4629612c357bbda96229df" + integrity sha512-SZbOGBWEsaTxBGCOpsRWlXlvNkvTkY0XxRfh7zYmvd8uL5Qzyg0CHAXiXKROflh801quA6+/DsT4ODDthOC/OA== + dependencies: + "@jest/types" "^28.1.3" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^28.0.2" + leven "^3.1.0" + pretty-format "^28.1.3" + +jest-watcher@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.3.tgz#c6023a59ba2255e3b4c57179fc94164b3e73abd4" + integrity sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== + dependencies: + "@jest/test-result" "^28.1.3" + "@jest/types" "^28.1.3" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.3" + string-length "^4.0.1" + +jest-worker@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.3.tgz#7e3c4ce3fa23d1bb6accb169e7f396f98ed4bb98" + integrity sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/jest/-/jest-28.1.3.tgz#e9c6a7eecdebe3548ca2b18894a50f45b36dfc6b" + integrity sha512-N4GT5on8UkZgH0O5LUavMRV1EDEhNTL0KEfRmDIeZHSV7p2XgLoY9t9VDUgL6o+yfdgYHVxuz81G8oB9VG5uyA== + dependencies: + "@jest/core" "^28.1.3" + "@jest/types" "^28.1.3" + import-local "^3.0.2" + jest-cli "^28.1.3" + +jiti@1.17.1: + version "1.17.1" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.17.1.tgz#264daa43ee89a03e8be28c3d712ccc4eb9f1e8ed" + integrity sha512-NZIITw8uZQFuzQimqjUxIrIcEdxYDFIe/0xYfIlVXTkiBjjyBEvgasj5bb0/cHtPRD/NziPbT312sFrkI5ALpw== + +jose@^4.11.4: + version "4.14.2" + resolved "https://registry.yarnpkg.com/jose/-/jose-4.14.2.tgz#f126e2805555882f0d86d1f84bcf00481d5ab3bf" + integrity sha512-Fcbi5lskAiSvs8qhdQBusANZWwyATdp7IxgHJTXiaU74sbVjX9uAw+myDPvI8pNo2wXKHECXCR63hqhRkN/SSQ== + +joycon@^3.0.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/joycon/-/joycon-3.1.1.tgz#bce8596d6ae808f8b68168f5fc69280996894f03" + integrity sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.0.0, js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-ref-parser@^9.0.9: + version "9.0.9" + resolved "https://registry.yarnpkg.com/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz#66ea538e7450b12af342fa3d5b8458bc1e1e013f" + integrity sha512-qcP2lmGy+JUoQJ4DOQeLaZDqH9qSkeGCK3suKWxJXS82dg728Mn3j97azDMaOUmJAN4uCq91LdPx4K7E8F1a7Q== + dependencies: + "@apidevtools/json-schema-ref-parser" "9.0.9" + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json-stable-stringify@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.2.tgz#e06f23128e0bbe342dc996ed5a19e28b57b580e0" + integrity sha512-eunSSaEnxV12z+Z73y/j5N37/In40GK4GmsSy+tEHJMxknvqnA7/djeYtAgW0GsWHUfg+847WJjKaEylk2y09g== + dependencies: + jsonify "^0.0.1" + +json-to-pretty-yaml@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/json-to-pretty-yaml/-/json-to-pretty-yaml-1.2.2.tgz#f4cd0bd0a5e8fe1df25aaf5ba118b099fd992d5b" + integrity sha512-rvm6hunfCcqegwYaG5T4yKJWxc9FXFgBVrcTZ4XfSVRwa5HA/Xs+vB/Eo9treYYHCeNM0nrSUr82V/M31Urc7A== + dependencies: + remedial "^1.0.7" + remove-trailing-spaces "^1.0.6" + +json5@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== + dependencies: + minimist "^1.2.0" + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +json5@^2.2.2: + version "2.2.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonc-parser@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.2.0.tgz#31ff3f4c2b9793f89c67212627c51c6394f88e76" + integrity sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonify@^0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.1.tgz#2aa3111dae3d34a0f151c63f3a45d995d9420978" + integrity sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lilconfig@^2.0.5: + version "2.0.6" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-2.0.6.tgz#32a384558bd58af3d4c6e077dd1ad1d397bc69d4" + integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +listr2@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/listr2/-/listr2-4.0.5.tgz#9dcc50221583e8b4c71c43f9c7dfd0ef546b75d5" + integrity sha512-juGHV1doQdpNT3GSTs9IUN43QJb7KHdF9uqg7Vufs/tG9VTzpFphqF4pm/ICdAABGQxsyNn9CiYA3StkI6jpwA== + dependencies: + cli-truncate "^2.1.0" + colorette "^2.0.16" + log-update "^4.0.0" + p-map "^4.0.0" + rfdc "^1.3.0" + rxjs "^7.5.5" + through "^2.3.8" + wrap-ansi "^7.0.0" + +load-tsconfig@^0.2.0: + version "0.2.3" + resolved "https://registry.yarnpkg.com/load-tsconfig/-/load-tsconfig-0.2.3.tgz#08af3e7744943caab0c75f8af7f1703639c3ef1f" + integrity sha512-iyT2MXws+dc2Wi6o3grCFtGXpeMvHmJqS27sMPGtV2eUu4PeFnG+33I8BlFK1t1NWMjOpcx9bridn5yxLDX2gQ== + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +locate-path@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== + dependencies: + p-locate "^5.0.0" + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== + +lodash@^4.17.20, lodash@^4.17.21, lodash@~4.17.0: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +log-symbols@^4.0.0, log-symbols@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503" + integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg== + dependencies: + chalk "^4.1.0" + is-unicode-supported "^0.1.0" + +log-update@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/log-update/-/log-update-4.0.0.tgz#589ecd352471f2a1c0c570287543a64dfd20e0a1" + integrity sha512-9fkkDevMefjg0mmzWFBW8YkFP91OrizzkW3diF7CpG+S2EYdy4+TVfGwz1zeF8x7hCx1ovSPTOE9Ngib74qqUg== + dependencies: + ansi-escapes "^4.3.0" + cli-cursor "^3.1.0" + slice-ansi "^4.0.0" + wrap-ansi "^6.2.0" + +loose-envify@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lower-case-first@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case-first/-/lower-case-first-2.0.2.tgz#64c2324a2250bf7c37c5901e76a5b5309301160b" + integrity sha512-EVm/rR94FJTZi3zefZ82fLWab+GX14LJN4HrWBcuo6Evmsl9hEfnqxgcHCKb9q+mNf6EVdsjx/qucYFIIB84pg== + dependencies: + tslib "^2.0.3" + +lower-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" + integrity sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg== + dependencies: + tslib "^2.0.3" + +lru-cache@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== + dependencies: + yallist "^3.0.2" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +lunr@^2.3.9: + version "2.3.9" + resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" + integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x, make-error@^1.1.1: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +map-cache@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha512-8y/eV9QQZCiyn1SprXSrCmqJN0yNRATe+PO8ztwqrvrbdRLA3eYJF0yaR0YayLWkMbsQSKWS9N2gPcGEc4UsZg== + +marked@^4.2.5: + version "4.2.5" + resolved "https://registry.yarnpkg.com/marked/-/marked-4.2.5.tgz#979813dfc1252cc123a79b71b095759a32f42a5d" + integrity sha512-jPueVhumq7idETHkb203WDD4fMA3yV9emQ5vLwop58lu8bTclMghBWcYAavlDqIEMaisADinV1TooIFCfqOsYQ== + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +meros@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/meros/-/meros-1.2.1.tgz#056f7a76e8571d0aaf3c7afcbe7eb6407ff7329e" + integrity sha512-R2f/jxYqCAGI19KhAvaxSOxALBMkaXWH2a7rOyqQw+ZmizX5bKkEYWLzdhC+U82ZVVPVp6MCXe3EkVligh+12g== + +micromatch@^4.0.0, micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12: + version "2.1.35" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimatch@4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-4.2.3.tgz#b4dcece1d674dee104bb0fb833ebb85a78cbbca6" + integrity sha512-lIUdtK5hdofgCTu3aT0sOaHsYR37viUuIc0rwnnDXImbwFRcumyLMeZaM0t0I/fgxS6s6JMfu0rLD1Wz9pv1ng== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimatch@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-5.1.2.tgz#0939d7d6f0898acbd1508abe534d1929368a8fff" + integrity sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg== + dependencies: + brace-expansion "^2.0.1" + +minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mute-stream@0.0.8: + version "0.0.8" + resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" + integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +neo-async@^2.6.0: + version "2.6.2" + resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f" + integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw== + +no-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/no-case/-/no-case-3.0.4.tgz#d361fd5c9800f558551a8369fc0dcd4662b6124d" + integrity sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg== + dependencies: + lower-case "^2.0.2" + tslib "^2.0.3" + +node-fetch@2.6.7: + version "2.6.7" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" + integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@^2.6.1: + version "2.6.9" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6" + integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg== + dependencies: + whatwg-url "^5.0.0" + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha512-3pKJwH184Xo/lnH6oyP1q2pMd7HcypqqmRs91/6/i2CGtWwIKGCkOOMTm/zXbgTEWHw1uNpNi/igc3ePOYHb6w== + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +nullthrows@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/nullthrows/-/nullthrows-1.1.1.tgz#7818258843856ae971eae4208ad7d7eb19a431b1" + integrity sha512-2vPPEi+Z7WqML2jZYddDIfy5Dqb0r2fze2zTxNNknZaFpVHU3mFB3R+DWeJWGVx0ecvttSGlJTI+WG+8Z4cDWw== + +object-assign@^4.0.1, object-assign@^4.1.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.2, object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.0, onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +"openapi-typescript-codegen@https://github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz": + version "0.24.0" + resolved "https://github.com/aptos-labs/openapi-typescript-codegen/releases/download/v0.24.0-p1/openapi-typescript-codegen-v0.24.0-p1.tgz#36a66aa2dc0d02ffc75b87c0b16fac12f273a42a" + dependencies: + camelcase "^6.3.0" + commander "^9.3.0" + fs-extra "^10.1.0" + handlebars "^4.7.7" + json-schema-ref-parser "^9.0.9" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +ora@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/ora/-/ora-5.4.1.tgz#1b2678426af4ac4a509008e5e4ac9e9959db9e18" + integrity sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ== + dependencies: + bl "^4.1.0" + chalk "^4.1.0" + cli-cursor "^3.1.0" + cli-spinners "^2.5.0" + is-interactive "^1.0.0" + is-unicode-supported "^0.1.0" + log-symbols "^4.1.0" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + +os-tmpdir@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" + integrity sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g== + +p-limit@3.1.0, p-limit@^3.0.2, p-limit@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== + dependencies: + yocto-queue "^0.1.0" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-locate@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== + dependencies: + p-limit "^3.0.2" + +p-map@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-map/-/p-map-4.0.0.tgz#bb2f95a5eda2ec168ec9274e06a747c3e2904d2b" + integrity sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ== + dependencies: + aggregate-error "^3.0.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +param-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/param-case/-/param-case-3.0.4.tgz#7d17fe4aa12bde34d4a77d91acfb6219caad01c5" + integrity sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-filepath@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/parse-filepath/-/parse-filepath-1.0.2.tgz#a632127f53aaf3d15876f5872f3ffac763d6c891" + integrity sha512-FwdRXKCohSVeXqwtYonZTXtbGJKrn+HNyWDYVcp5yuJlesTwNH4rsmRZ+GrKAPJ5bLpRxESMeS+Rl0VCHRvB2Q== + dependencies: + is-absolute "^1.0.0" + map-cache "^0.2.0" + path-root "^0.1.1" + +parse-json@^5.0.0, parse-json@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +pascal-case@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/pascal-case/-/pascal-case-3.1.2.tgz#b48e0ef2b98e205e7c1dae747d0b1508237660eb" + integrity sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + +path-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/path-case/-/path-case-3.0.4.tgz#9168645334eb942658375c56f80b4c0cb5f82c6f" + integrity sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-root-regex@^0.1.0: + version "0.1.2" + resolved "https://registry.yarnpkg.com/path-root-regex/-/path-root-regex-0.1.2.tgz#bfccdc8df5b12dc52c8b43ec38d18d72c04ba96d" + integrity sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ== + +path-root@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/path-root/-/path-root-0.1.1.tgz#9a4a6814cac1c0cd73360a95f32083c8ea4745b7" + integrity sha512-QLcPegTHF11axjfojBIoDygmS2E3Lf+8+jI6wOVmNVenrKSo3mFdSGiIgdSHenczw3wPtlVMQaFVwGmM7BJdtg== + dependencies: + path-root-regex "^0.1.0" + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.1, pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +postcss-load-config@^3.0.1: + version "3.1.4" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-3.1.4.tgz#1ab2571faf84bb078877e1d07905eabe9ebda855" + integrity sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg== + dependencies: + lilconfig "^2.0.5" + yaml "^1.10.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prettier@2.6.2: + version "2.6.2" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.6.2.tgz#e26d71a18a74c3d0f0597f55f01fb6c06c206032" + integrity sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew== + +pretty-format@^28.0.0, pretty-format@^28.1.3: + version "28.1.3" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.3.tgz#c9fba8cedf99ce50963a11b27d982a9ae90970d5" + integrity sha512-8gFb/To0OmxHR9+ZTb14Df2vNxdGCX8g1xWGUTqUw5TiZvcQf5sHKObd5UcPyLLyowNwDAMTF3XWOG1B6mxl1Q== + dependencies: + "@jest/schemas" "^28.1.3" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +promise@^7.1.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/promise/-/promise-7.3.1.tgz#064b72602b18f90f29192b8b1bc418ffd1ebd3bf" + integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== + dependencies: + asap "~2.0.3" + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +punycode@^1.3.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +pvtsutils@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/pvtsutils/-/pvtsutils-1.3.2.tgz#9f8570d132cdd3c27ab7d51a2799239bf8d8d5de" + integrity sha512-+Ipe2iNUyrZz+8K/2IOo+kKikdtfhRKzNpQbruF2URmqPtoqAs8g3xS7TJvFF2GcPXjh7DkqMnpVveRFq4PgEQ== + dependencies: + tslib "^2.4.0" + +pvutils@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/pvutils/-/pvutils-1.1.3.tgz#f35fc1d27e7cd3dfbd39c0826d173e806a03f5a3" + integrity sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +readable-stream@^3.4.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +regenerator-runtime@^0.13.11: + version "0.13.11" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" + integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== + +regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +relay-runtime@12.0.0: + version "12.0.0" + resolved "https://registry.yarnpkg.com/relay-runtime/-/relay-runtime-12.0.0.tgz#1e039282bdb5e0c1b9a7dc7f6b9a09d4f4ff8237" + integrity sha512-QU6JKr1tMsry22DXNy9Whsq5rmvwr3LSZiiWV/9+DFpuTWvp+WFhobWMc8TC4OjKFfNhEZy7mOiqUAn5atQtug== + dependencies: + "@babel/runtime" "^7.0.0" + fbjs "^3.0.0" + invariant "^2.2.4" + +remedial@^1.0.7: + version "1.0.8" + resolved "https://registry.yarnpkg.com/remedial/-/remedial-1.0.8.tgz#a5e4fd52a0e4956adbaf62da63a5a46a78c578a0" + integrity sha512-/62tYiOe6DzS5BqVsNpH/nkGlX45C/Sp6V+NtiN6JQNS1Viay7cWkazmRkrQrdFj2eshDe96SIQNIoMxqhzBOg== + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw== + +remove-trailing-spaces@^1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/remove-trailing-spaces/-/remove-trailing-spaces-1.0.8.tgz#4354d22f3236374702f58ee373168f6d6887ada7" + integrity sha512-O3vsMYfWighyFbTd8hk8VaSj9UAGENxAtX+//ugIst2RMk5e03h6RoIS+0ylsFxY1gvmPuAY/PO4It+gPEeySA== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-main-filename@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@5.0.0, resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.20.0, resolve@^1.22.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +restore-cursor@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== + dependencies: + onetime "^5.1.0" + signal-exit "^3.0.2" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rfdc@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" + integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== + +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +rollup@^2.74.1: + version "2.79.1" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.79.1.tgz#bedee8faef7c9f93a2647ac0108748f497f081c7" + integrity sha512-uKxbd0IhMZOhjAiD5oAFp7BqvkA4Dv47qpOCtaNvng4HBwdbWtdOh8f5nZNuk2rp51PMGk3bzfWu5oayNEuYnw== + optionalDependencies: + fsevents "~2.3.2" + +run-async@^2.4.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" + integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rxjs@^7.5.5: + version "7.8.0" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4" + integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg== + dependencies: + tslib "^2.1.0" + +safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + +"safer-buffer@>= 2.1.2 < 3": + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +scuid@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/scuid/-/scuid-1.1.0.tgz#d3f9f920956e737a60f72d0e4ad280bf324d5dab" + integrity sha512-MuCAyrGZcTLfQoH2XoBlQ8C6bzwN88XT/0slOGz0pn8+gIP85BOAfYa44ZXQUTOwRwPU0QvgU+V+OSajl/59Xg== + +semver@7.x, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +sentence-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/sentence-case/-/sentence-case-3.0.4.tgz#3645a7b8c117c787fde8702056225bb62a45131f" + integrity sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg== + dependencies: + no-case "^3.0.4" + tslib "^2.0.3" + upper-case-first "^2.0.2" + +set-blocking@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== + +setimmediate@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +shell-quote@^1.7.3: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + +shiki@^0.12.1: + version "0.12.1" + resolved "https://registry.yarnpkg.com/shiki/-/shiki-0.12.1.tgz#26fce51da12d055f479a091a5307470786f300cd" + integrity sha512-aieaV1m349rZINEBkjxh2QbBvFFQOlgqYTNtCal82hHj4dDZ76oMlQIX+C7ryerBTDiga3e5NfH6smjdJ02BbQ== + dependencies: + jsonc-parser "^3.2.0" + vscode-oniguruma "^1.7.0" + vscode-textmate "^8.0.0" + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +signedsource@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/signedsource/-/signedsource-1.0.0.tgz#1ddace4981798f93bd833973803d80d52e93ad6a" + integrity sha512-6+eerH9fEnNmi/hyM1DXcRK3pWdoMQtlkQ+ns0ntzunjKqp5i3sKCc80ym8Fib3iaYhdJUOPdhlJWj1tvge2Ww== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +snake-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" + integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + +source-map-support@0.5.13: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@0.8.0-beta.0: + version "0.8.0-beta.0" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.8.0-beta.0.tgz#d4c1bb42c3f7ee925f005927ba10709e0d1d1f11" + integrity sha512-2ymg6oRBpebeZi9UUNsgQ89bhx01TcTkmNTGnNO88imTmbSgy4nfujrgVEFKWpMTEGA11EDkTt7mqObTPdigIA== + dependencies: + whatwg-url "^7.0.0" + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sponge-case@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/sponge-case/-/sponge-case-1.0.1.tgz#260833b86453883d974f84854cdb63aecc5aef4c" + integrity sha512-dblb9Et4DAtiZ5YSUZHLl4XhH4uK80GhAZrVXdN4O2P4gQ40Wa5UIOPUHlA/nFd2PLblBZWUioLMMAVrgpoYcA== + dependencies: + tslib "^2.0.3" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + +string-env-interpolation@1.0.1, string-env-interpolation@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string-env-interpolation/-/string-env-interpolation-1.0.1.tgz#ad4397ae4ac53fe6c91d1402ad6f6a52862c7152" + integrity sha512-78lwMoCcn0nNu8LszbP1UA7g55OeE4v7rCeWnM5B453rnNr4aq+5it3FEYtZrSEiMvHZOZ9Jlqb0OD0M2VInqg== + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +sucrase@^3.20.3: + version "3.28.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.28.0.tgz#7fd8b3118d2155fcdf291088ab77fa6eefd63c4c" + integrity sha512-TK9600YInjuiIhVM3729rH4ZKPOsGeyXUwY+Ugu9eilNbdTFyHr6XcAGYbRVZPDgWj6tgI7bx95aaJjHnbffag== + dependencies: + commander "^4.0.0" + glob "7.1.6" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.3.0.tgz#3943544347c1ff90b15effb03fc14ae45ec10624" + integrity sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +swap-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/swap-case/-/swap-case-2.0.2.tgz#671aedb3c9c137e2985ef51c51f9e98445bf70d9" + integrity sha512-kc6S2YS/2yXbtkSMunBtKdah4VFETZ8Oh6ONSmSd9bRxhqTrtARUCBUiWXH3xVPpvR7tz2CSnkuXVE42EcGnMw== + dependencies: + tslib "^2.0.3" + +tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +through@^2.3.6, through@^2.3.8: + version "2.3.8" + resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + +title-case@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/title-case/-/title-case-3.0.3.tgz#bc689b46f02e411f1d1e1d081f7c3deca0489982" + integrity sha512-e1zGYRvbffpcHIrnuqT0Dh+gEJtDaxDSoG4JAIpq4oDFyooziLBIiYQv0GBT4FUAnUop5uZ1hiIAj7oAF6sOCA== + dependencies: + tslib "^2.0.3" + +tmp@^0.0.33: + version "0.0.33" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" + integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== + dependencies: + os-tmpdir "~1.0.2" + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA== + dependencies: + punycode "^2.1.0" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +tree-kill@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/tree-kill/-/tree-kill-1.2.2.tgz#4ca09a9092c88b73a7cdc5e8a01b507b0790a0cc" + integrity sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A== + +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + +ts-jest@28.0.8: + version "28.0.8" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-28.0.8.tgz#cd204b8e7a2f78da32cf6c95c9a6165c5b99cc73" + integrity sha512-5FaG0lXmRPzApix8oFG8RKjAz4ehtm8yMKOTy5HX3fY6W8kmvOrmcY0hKDElW52FJov+clhUbrKAqofnj4mXTg== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^28.0.0" + json5 "^2.2.1" + lodash.memoize "4.x" + make-error "1.x" + semver "7.x" + yargs-parser "^21.0.1" + +ts-loader@9.3.1: + version "9.3.1" + resolved "https://registry.yarnpkg.com/ts-loader/-/ts-loader-9.3.1.tgz#fe25cca56e3e71c1087fe48dc67f4df8c59b22d4" + integrity sha512-OkyShkcZTsTwyS3Kt7a4rsT/t2qvEVQuKCTg4LJmpj9fhFR7ukGdZwV6Qq3tRUkqcXtfGpPR7+hFKHCG/0d3Lw== + dependencies: + chalk "^4.1.0" + enhanced-resolve "^5.0.0" + micromatch "^4.0.0" + semver "^7.3.4" + +ts-log@^2.2.3: + version "2.2.5" + resolved "https://registry.yarnpkg.com/ts-log/-/ts-log-2.2.5.tgz#aef3252f1143d11047e2cb6f7cfaac7408d96623" + integrity sha512-PGcnJoTBnVGy6yYNFxWVNkdcAuAMstvutN9MgDJIV6L0oG8fB+ZNNy1T+wJzah8RPGor1mZuPQkVfXNDpy9eHA== + +ts-node@10.9.1, ts-node@^10.9.1: + version "10.9.1" + resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-10.9.1.tgz#e73de9102958af9e1f0b168a6ff320e25adcff4b" + integrity sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw== + dependencies: + "@cspotcode/source-map-support" "^0.8.0" + "@tsconfig/node10" "^1.0.7" + "@tsconfig/node12" "^1.0.7" + "@tsconfig/node14" "^1.0.0" + "@tsconfig/node16" "^1.0.2" + acorn "^8.4.1" + acorn-walk "^8.1.1" + arg "^4.1.0" + create-require "^1.1.0" + diff "^4.0.1" + make-error "^1.1.1" + v8-compile-cache-lib "^3.0.1" + yn "3.1.1" + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.0.0, tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.1, tslib@^2.4.0, tslib@^2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.5.0.tgz#42bfed86f5787aeb41d031866c8f402429e0fddf" + integrity sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg== + +tslib@~2.4.0: + version "2.4.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e" + integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA== + +tsup@6.2.3: + version "6.2.3" + resolved "https://registry.yarnpkg.com/tsup/-/tsup-6.2.3.tgz#87f57b2e53d49f1c1ab89aba21fed96aaab0ec9f" + integrity sha512-J5Pu2Dx0E1wlpIEsVFv9ryzP1pZ1OYsJ2cBHZ7GrKteytNdzaSz5hmLX7/nAxtypq+jVkVvA79d7S83ETgHQ5w== + dependencies: + bundle-require "^3.1.0" + cac "^6.7.12" + chokidar "^3.5.1" + debug "^4.3.1" + esbuild "^0.15.1" + execa "^5.0.0" + globby "^11.0.3" + joycon "^3.0.1" + postcss-load-config "^3.0.1" + resolve-from "^5.0.0" + rollup "^2.74.1" + source-map "0.8.0-beta.0" + sucrase "^3.20.3" + tree-kill "^1.2.2" + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +tweetnacl@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-1.0.3.tgz#ac0af71680458d8a6378d0d0d050ab1407d35596" + integrity sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw== + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +typedoc@^0.23.20: + version "0.23.24" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.23.24.tgz#01cf32c09f2c19362e72a9ce1552d6e5b48c4fef" + integrity sha512-bfmy8lNQh+WrPYcJbtjQ6JEEsVl/ce1ZIXyXhyW+a1vFrjO39t6J8sL/d6FfAGrJTc7McCXgk9AanYBSNvLdIA== + dependencies: + lunr "^2.3.9" + marked "^4.2.5" + minimatch "^5.1.2" + shiki "^0.12.1" + +typescript@4.8.2: + version "4.8.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.2.tgz#e3b33d5ccfb5914e4eeab6699cf208adee3fd790" + integrity sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw== + +ua-parser-js@^0.7.30: + version "0.7.35" + resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.35.tgz#8bda4827be4f0b1dda91699a29499575a1f1d307" + integrity sha512-veRf7dawaj9xaWEu9HoTVn5Pggtc/qj+kqTOFvNiN1l0YdxwC1kvel57UCjThjGa3BHBihE8/UJAHI+uQHmd/g== + +uglify-js@^3.1.4: + version "3.17.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +unc-path-regex@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/unc-path-regex/-/unc-path-regex-0.1.2.tgz#e73dd3d7b0d7c5ed86fbac6b0ae7d8c6a69d50fa" + integrity sha512-eXL4nmJT7oCpkZsHZUOJo8hcX3GbsiDOa0Qu9F646fi8dT3XuSVopVqAcEiVzSKKH7UoDti23wNX3qGFxcW5Qg== + +universalify@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" + integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== + +unixify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090" + integrity sha512-6bc58dPYhCMHHuwxldQxO3RRNZ4eCogZ/st++0+fcC1nr0jiGUtAdBJ2qzmLQWSxbtz42pWt4QQMiZ9HvZf5cg== + dependencies: + normalize-path "^2.1.1" + +update-browserslist-db@^1.0.9: + version "1.0.10" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.10.tgz#0f54b876545726f17d00cd9a2561e6dade943ff3" + integrity sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +upper-case-first@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/upper-case-first/-/upper-case-first-2.0.2.tgz#992c3273f882abd19d1e02894cc147117f844324" + integrity sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg== + dependencies: + tslib "^2.0.3" + +upper-case@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/upper-case/-/upper-case-2.0.2.tgz#d89810823faab1df1549b7d97a76f8662bae6f7a" + integrity sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg== + dependencies: + tslib "^2.0.3" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +urlpattern-polyfill@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-6.0.2.tgz#a193fe773459865a2a5c93b246bb794b13d07256" + integrity sha512-5vZjFlH9ofROmuWmXM9yj2wljYKgWstGwe8YTyiqM7hVum/g9LyCizPZtb3UqsuppVwety9QJmfc42VggLpTgg== + dependencies: + braces "^3.0.2" + +urlpattern-polyfill@^8.0.0: + version "8.0.2" + resolved "https://registry.yarnpkg.com/urlpattern-polyfill/-/urlpattern-polyfill-8.0.2.tgz#99f096e35eff8bf4b5a2aa7d58a1523d6ebc7ce5" + integrity sha512-Qp95D4TPJl1kC9SKigDcqgyM2VDVO4RiJc2d4qe5GrYm+zbIQCWWKAFaJNQ4BhdFeDGwBmAxqJBwWSJDb9T3BQ== + +util-deprecate@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +v8-compile-cache-lib@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" + integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg== + +v8-to-istanbul@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" + integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.12" + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + +value-or-promise@^1.0.11, value-or-promise@^1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/value-or-promise/-/value-or-promise-1.0.12.tgz#0e5abfeec70148c78460a849f6b003ea7986f15c" + integrity sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q== + +vscode-oniguruma@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz#439bfad8fe71abd7798338d1cd3dc53a8beea94b" + integrity sha512-L9WMGRfrjOhgHSdOYgCt/yRMsXzLDJSL7BPrOZt73gU0iWO4mpqzqQzOz5srxqTvMBaR0XZTSrVWo4j55Rc6cA== + +vscode-textmate@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-8.0.0.tgz#2c7a3b1163ef0441097e0b5d6389cd5504b59e5d" + integrity sha512-AFbieoL7a5LMqcnOF04ji+rpXadgOXnZsxQr//r83kLPr7biP7am3g9zbaZIaBGwBRWeSvoMD4mgPdX3e4NWBg== + +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg== + dependencies: + defaults "^1.0.3" + +web-streams-polyfill@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz#71c2718c52b45fd49dbeee88634b3a60ceab42a6" + integrity sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q== + +webcrypto-core@^1.7.7: + version "1.7.7" + resolved "https://registry.yarnpkg.com/webcrypto-core/-/webcrypto-core-1.7.7.tgz#06f24b3498463e570fed64d7cab149e5437b162c" + integrity sha512-7FjigXNsBfopEj+5DV2nhNpfic2vumtjjgPmeDKk45z+MJwXKKfhPB7118Pfzrmh4jqOMST6Ch37iPAHoImg5g== + dependencies: + "@peculiar/asn1-schema" "^2.3.6" + "@peculiar/json-schema" "^1.1.12" + asn1js "^3.0.1" + pvtsutils "^1.3.2" + tslib "^2.4.0" + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which-module@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.1.tgz#776b1fe35d90aebe99e8ac15eb24093389a4a409" + integrity sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ== + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wordwrap@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb" + integrity sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q== + +wrap-ansi@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.2.tgz#a9df01ae5b77858a027fd2e80768ee433555fcfd" + integrity sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + +ws@8.13.0, ws@^8.12.0: + version "8.13.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== + +y18n@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" + integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^3.0.2: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yaml-ast-parser@^0.0.43: + version "0.0.43" + resolved "https://registry.yarnpkg.com/yaml-ast-parser/-/yaml-ast-parser-0.0.43.tgz#e8a23e6fb4c38076ab92995c5dca33f3d3d7c9bb" + integrity sha512-2PTINUwsRqSd+s8XxKaJWQlUuEMHJQyEuh2edBbW8KNJz0SJPwUSD2zRWqezFEdN7IzAgeuYHFUCF7o8zRdZ0A== + +yaml@^1.10.0, yaml@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + +yargs-parser@^18.1.2: + version "18.1.3" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== + dependencies: + camelcase "^5.0.0" + decamelize "^1.2.0" + +yargs-parser@^21.0.1, yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@^15.3.1: + version "15.4.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== + dependencies: + cliui "^6.0.0" + decamelize "^1.2.0" + find-up "^4.1.0" + get-caller-file "^2.0.1" + require-directory "^2.1.1" + require-main-filename "^2.0.0" + set-blocking "^2.0.0" + string-width "^4.2.0" + which-module "^2.0.0" + y18n "^4.0.0" + yargs-parser "^18.1.2" + +yargs@^17.0.0: + version "17.7.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.1.tgz#34a77645201d1a8fc5213ace787c220eabbd0967" + integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + +yargs@^17.3.1: + version "17.6.2" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.2.tgz#2e23f2944e976339a1ee00f18c77fedee8332541" + integrity sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + +yn@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50" + integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q== + +yocto-queue@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== diff --git a/m1/README.md b/m1/README.md new file mode 100644 index 00000000..74be6c91 --- /dev/null +++ b/m1/README.md @@ -0,0 +1,2 @@ +# M1 Subnet +The M1 Subnet is implemented as an Avalanche subnet which runs Move VM execution and persistence that is based on Aptos. \ No newline at end of file diff --git a/m1/movement-benchmark/Cargo.toml b/m1/movement-benchmark/Cargo.toml new file mode 100644 index 00000000..c056378b --- /dev/null +++ b/m1/movement-benchmark/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "movement-benchmark" +description = "movement transaction benchmarks" +version = "0.1.0" + +# Workspace inherited keys +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +publish = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } + +[dependencies] +aptos-bitvec = { workspace = true } +aptos-crypto = { workspace = true } +aptos-gas = { workspace = true, features = ["testing"] } +aptos-language-e2e-tests = { workspace = true } +aptos-types = { workspace = true } +aptos-vm = { workspace = true } +criterion = { workspace = true } +criterion-cpu-time = { workspace = true } +num_cpus = { workspace = true } +proptest = { workspace = true } + +[[bench]] +name = "transaction_benches" +harness = false diff --git a/m1/movement-benchmark/benches/transaction_benches.rs b/m1/movement-benchmark/benches/transaction_benches.rs new file mode 100644 index 00000000..e1394d56 --- /dev/null +++ b/m1/movement-benchmark/benches/transaction_benches.rs @@ -0,0 +1,34 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use aptos_language_e2e_tests::account_universe::P2PTransferGen; +use aptos_transaction_benchmarks::{ + measurement::wall_time_measurement, transactions::TransactionBencher, +}; +use criterion::{criterion_group, criterion_main, measurement::Measurement, Criterion}; +use proptest::prelude::*; + +// +// Transaction benchmarks +// + +fn peer_to_peer(c: &mut Criterion) { + c.bench_function("peer_to_peer", |b| { + let bencher = TransactionBencher::new(any_with::((1_000, 1_000_000))); + bencher.bench(b) + }); + + c.bench_function("peer_to_peer_parallel", |b| { + let bencher = TransactionBencher::new(any_with::((1_000, 1_000_000))); + bencher.bench_parallel(b) + }); +} + +criterion_group!( + name = txn_benches; + config = wall_time_measurement().sample_size(10); + targets = peer_to_peer +); + +criterion_main!(txn_benches); diff --git a/m1/movement-benchmark/src/lib.rs b/m1/movement-benchmark/src/lib.rs new file mode 100644 index 00000000..02c5ef4a --- /dev/null +++ b/m1/movement-benchmark/src/lib.rs @@ -0,0 +1,8 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +#![forbid(unsafe_code)] + +pub mod measurement; +pub mod transactions; diff --git a/m1/movement-benchmark/src/main.rs b/m1/movement-benchmark/src/main.rs new file mode 100644 index 00000000..a53f590b --- /dev/null +++ b/m1/movement-benchmark/src/main.rs @@ -0,0 +1,90 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use aptos_language_e2e_tests::account_universe::P2PTransferGen; +use proptest::prelude::*; +use std::env; +use movement_benchmark::transactions::TransactionBencher; + +fn main() { + let args: Vec = env::args().collect(); + let (run_par, run_seq) = if args.len() == 4 { + let bool1 = args[2].parse().unwrap(); + let bool2 = args[3].parse().unwrap(); + (bool1, bool2) + } else { + println!("Usage: cargo run --release main "); + println!("Will run both parallel & sequential by default\n"); + (true, true) + }; + + let bencher = TransactionBencher::new(any_with::((1_000, 1_000_000))); + + let acts = [10000]; + let txns = [1000, 10000, 50000]; + let num_warmups = 2; + let num_runs = 10; + + let mut par_measurements: Vec> = Vec::new(); + let mut seq_measurements: Vec> = Vec::new(); + + let concurrency_level = num_cpus::get(); + + for block_size in txns { + for num_accounts in acts { + let (mut par_tps, mut seq_tps) = bencher.blockstm_benchmark( + num_accounts, + block_size, + run_par, + run_seq, + num_warmups, + num_runs, + concurrency_level, + ); + par_tps.sort(); + seq_tps.sort(); + par_measurements.push(par_tps); + seq_measurements.push(seq_tps); + } + } + + println!("Movement Subnet Transaction Stress Test...\n"); + println!("\nconcurrency_level = {}\n", concurrency_level); + + let mut i = 0; + for block_size in txns { + for num_accounts in acts { + println!( + "PARAMS: num_account = {}, block_size = {}", + num_accounts, block_size + ); + + let mut seq_tps = 1; + if run_seq { + println!("Sequential TPS: {:?}", seq_measurements[i]); + let mut seq_sum = 0; + for m in &seq_measurements[i] { + seq_sum += m; + } + seq_tps = seq_sum / seq_measurements[i].len(); + println!("Avg Sequential TPS = {:?}", seq_tps,); + } + + if run_par { + println!("Parallel TPS: {:?}", par_measurements[i]); + let mut par_sum = 0; + for m in &par_measurements[i] { + par_sum += m; + } + let par_tps = par_sum / par_measurements[i].len(); + println!("Avg Parallel TPS = {:?}", par_tps,); + if run_seq { + println!("Speed up {}x over sequential", par_tps / seq_tps); + } + } + i += 1; + } + println!(); + } +} diff --git a/m1/movement-benchmark/src/measurement.rs b/m1/movement-benchmark/src/measurement.rs new file mode 100644 index 00000000..a70fe366 --- /dev/null +++ b/m1/movement-benchmark/src/measurement.rs @@ -0,0 +1,14 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use criterion::Criterion; +use criterion_cpu_time::PosixTime; + +pub fn cpu_time_measurement() -> Criterion { + Criterion::default().with_measurement(PosixTime::UserAndSystemTime) +} + +pub fn wall_time_measurement() -> Criterion { + Criterion::default() +} diff --git a/m1/movement-benchmark/src/transactions.rs b/m1/movement-benchmark/src/transactions.rs new file mode 100644 index 00000000..490d2c14 --- /dev/null +++ b/m1/movement-benchmark/src/transactions.rs @@ -0,0 +1,268 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use aptos_bitvec::BitVec; +use aptos_crypto::HashValue; +use aptos_language_e2e_tests::{ + account_universe::{AUTransactionGen, AccountUniverseGen}, + executor::FakeExecutor, + gas_costs::TXN_RESERVED, +}; +use aptos_types::{ + block_metadata::BlockMetadata, + on_chain_config::{OnChainConfig, ValidatorSet}, + transaction::Transaction, +}; +use aptos_vm::{block_executor::BlockAptosVM, data_cache::AsMoveResolver}; +use criterion::{measurement::Measurement, BatchSize, Bencher}; +use proptest::{ + collection::vec, + strategy::{Strategy, ValueTree}, + test_runner::TestRunner, +}; + +/// Benchmarking support for transactions. +#[derive(Clone, Debug)] +pub struct TransactionBencher { + num_accounts: usize, + num_transactions: usize, + strategy: S, +} + +impl TransactionBencher +where + S: Strategy, + S::Value: AUTransactionGen, +{ + /// The number of accounts created by default. + pub const DEFAULT_NUM_ACCOUNTS: usize = 100; + /// The number of transactions created by default. + pub const DEFAULT_NUM_TRANSACTIONS: usize = 1000; + + /// Creates a new transaction bencher with default settings. + pub fn new(strategy: S) -> Self { + Self { + num_accounts: Self::DEFAULT_NUM_ACCOUNTS, + num_transactions: Self::DEFAULT_NUM_TRANSACTIONS, + strategy, + } + } + + /// Sets a custom number of accounts. + pub fn num_accounts(&mut self, num_accounts: usize) -> &mut Self { + self.num_accounts = num_accounts; + self + } + + /// Sets a custom number of transactions. + pub fn num_transactions(&mut self, num_transactions: usize) -> &mut Self { + self.num_transactions = num_transactions; + self + } + + /// Runs the bencher. + pub fn bench(&self, b: &mut Bencher) { + b.iter_batched( + || { + TransactionBenchState::with_size( + &self.strategy, + self.num_accounts, + self.num_transactions, + ) + }, + |state| state.execute(), + // The input here is the entire list of signed transactions, so it's pretty large. + BatchSize::LargeInput, + ) + } + + /// Runs the bencher. + pub fn bench_parallel(&self, b: &mut Bencher) { + b.iter_batched( + || { + TransactionBenchState::with_size( + &self.strategy, + self.num_accounts, + self.num_transactions, + ) + }, + |state| state.execute_parallel(), + // The input here is the entire list of signed transactions, so it's pretty large. + BatchSize::LargeInput, + ) + } + + /// Runs the bencher. + pub fn blockstm_benchmark( + &self, + num_accounts: usize, + num_txn: usize, + run_par: bool, + run_seq: bool, + num_warmups: usize, + num_runs: usize, + concurrency_level: usize, + ) -> (Vec, Vec) { + let mut par_tps = Vec::new(); + let mut seq_tps = Vec::new(); + + let total_runs = num_warmups + num_runs; + for i in 0..total_runs { + let state = TransactionBenchState::with_size(&self.strategy, num_accounts, num_txn); + + if i < num_warmups { + println!("WARMUP - ignore results"); + state.execute_blockstm_benchmark(concurrency_level, run_par, run_seq); + } else { + println!( + "RUN benchmark for: num_threads = {}, \ + num_account = {}, \ + block_size = {}", + num_cpus::get(), + num_accounts, + num_txn, + ); + let tps = state.execute_blockstm_benchmark(concurrency_level, run_par, run_seq); + par_tps.push(tps.0); + seq_tps.push(tps.1); + } + } + + (par_tps, seq_tps) + } +} + +struct TransactionBenchState { + // Use the fake executor for now. + // TODO: Hook up the real executor in the future. Here's what needs to be done: + // 1. Provide a way to construct a write set from the genesis write set + initial balances. + // 2. Provide a trait for an executor with the functionality required for account_universe. + // 3. Implement the trait for the fake executor. + // 4. Implement the trait for the real executor, using the genesis write set implemented in 1 + // and the helpers in the execution_tests crate. + // 5. Add a type parameter that implements the trait here and switch "executor" to use it. + // 6. Add an enum to TransactionBencher that lets callers choose between the fake and real + // executors. + executor: FakeExecutor, + transactions: Vec, +} + +impl TransactionBenchState { + /// Creates a new benchmark state with the given number of accounts and transactions. + fn with_size(strategy: S, num_accounts: usize, num_transactions: usize) -> Self + where + S: Strategy, + S::Value: AUTransactionGen, + { + let mut state = Self::with_universe( + strategy, + universe_strategy(num_accounts, num_transactions), + num_transactions, + ); + + // Insert a blockmetadata transaction at the beginning to better simulate the real life traffic. + let validator_set = + ValidatorSet::fetch_config(&state.executor.get_state_view().as_move_resolver()) + .expect("Unable to retrieve the validator set from storage"); + + let new_block = BlockMetadata::new( + HashValue::zero(), + 0, + 0, + *validator_set.payload().next().unwrap().account_address(), + BitVec::with_num_bits(validator_set.num_validators() as u16).into(), + vec![], + 1, + ); + + state + .transactions + .insert(0, Transaction::BlockMetadata(new_block)); + + state + } + + /// Creates a new benchmark state with the given account universe strategy and number of + /// transactions. + fn with_universe( + strategy: S, + universe_strategy: impl Strategy, + num_transactions: usize, + ) -> Self + where + S: Strategy, + S::Value: AUTransactionGen, + { + let mut runner = TestRunner::default(); + let universe = universe_strategy + .new_tree(&mut runner) + .expect("creating a new value should succeed") + .current(); + + let mut executor = FakeExecutor::from_head_genesis(); + // Run in gas-cost-stability mode for now -- this ensures that new accounts are ignored. + // XXX We may want to include new accounts in case they have interesting performance + // characteristics. + let mut universe = universe.setup_gas_cost_stability(&mut executor); + + let transaction_gens = vec(strategy, num_transactions) + .new_tree(&mut runner) + .expect("creating a new value should succeed") + .current(); + let transactions = transaction_gens + .into_iter() + .map(|txn_gen| Transaction::UserTransaction(txn_gen.apply(&mut universe).0)) + .collect(); + + Self { + executor, + transactions, + } + } + + /// Executes this state in a single block. + fn execute(self) { + // The output is ignored here since we're just testing transaction performance, not trying + // to assert correctness. + BlockAptosVM::execute_block(self.transactions, self.executor.get_state_view(), 1) + .expect("VM should not fail to start"); + } + + /// Executes this state in a single block via parallel execution. + fn execute_parallel(self) { + // The output is ignored here since we're just testing transaction performance, not trying + // to assert correctness. + BlockAptosVM::execute_block( + self.transactions, + self.executor.get_state_view(), + num_cpus::get(), + ) + .expect("VM should not fail to start"); + } + + fn execute_blockstm_benchmark( + self, + concurrency_level: usize, + run_par: bool, + run_seq: bool, + ) -> (usize, usize) { + BlockAptosVM::execute_block_benchmark( + self.transactions, + self.executor.get_state_view(), + concurrency_level, + run_par, + run_seq, + ) + } +} + +/// Returns a strategy for the account universe customized for benchmarks, i.e. having +/// sufficiently large balance for gas. +fn universe_strategy( + num_accounts: usize, + num_transactions: usize, +) -> impl Strategy { + let balance = TXN_RESERVED * num_transactions as u64 * 5; + AccountUniverseGen::strategy(num_accounts, balance..(balance + 1)) +} diff --git a/m1/movement/CHANGELOG.md b/m1/movement/CHANGELOG.md new file mode 100644 index 00000000..f1bd4b6a --- /dev/null +++ b/m1/movement/CHANGELOG.md @@ -0,0 +1,51 @@ +# Aptos CLI Changelog + +All notable changes to the Aptos CLI will be captured in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) and the format set out by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). + +## [1.0.13] - 2023/04/27 +### Fixed +* Previously `--skip-fetch-latest-git-deps` would not actually do anything when used with `aptos move test`. This has been fixed. +* Fixed the issue of the hello_blockchain example where feature enable was missing + +## [1.0.12] - 2023/04/25 +### Added +* Support for creating and interacting with multisig accounts v2. More details can be found at [AIP 12](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-12.md). +* Added `disassemble` option to the CLI - This can be invoked using `aptos move disassemble` to disassemble the bytecode and save it to a file +* Fixed handling of `vector` as an entry function argument in `aptos move run` + +## [1.0.11] - 2023/04/14 +### Fixed +* Fixed creating a new test account with `aptos init` would fail if the account didn't already exist + +## [1.0.10] - 2023/04/13 +### Fixed +* If `aptos init` is run with a faucet URL specified (which happens by default when using the local, devnet, or testnet network options) and funding the account fails, the account creation is considered a failure and nothing is persisted. Previously it would report success despite the account not being created on chain. +* When specifying a profile where the `AuthenticationKey` has been rotated, now the `AccountAddress` is properly used from the config file +* Update `aptos init` to fix an incorrect account address issue, when trying to init with a rotated private key. Right now it does an actual account lookup instead of deriving from public key + +### Added +* Updates to prover and framework specs + +## [1.0.9] - 2023/03/29 +### Added +* `aptos move show abi` allows for viewing the ABI of a compiled move package +* Experimental gas profiler with the `--profile-gas` flag on any transaction submitting CLI command +* Updates to the prover and framework specs + +## [1.0.8] - 2023/03/16 +### Added +* Added an `aptos account derive-resource-account-address` command to add the ability to derive an address easily +* Added the ability for different input resource account seeds, to allow matching directly with onchain code +* Added beta support for coverage via `aptos move coverage` and `aptos move test --coverage` +* Added beta support for compiling with bytecode dependencies rather than source dependencies + +### Fixed +* All resource account commands can now use `string_seed` which will match the onchain representation of `b"string"` rather than always derive a different address +* Tests that go over the bytecode size limit can now compile +* `vector` inputs to now work for both `aptos move view` and `aptos move run` +* Governance proposal listing will now not crash on the latest on-chain format +* Move compiler will no longer use an environment variable to communicate between compiler and CLI for the bytecode version + +## [1.0.7] +* For logs earlier than 1.0.7, please check out the [releases on GitHub](https://github.com/aptos-labs/aptos-core/releases?q="Aptos+CLI+Release") + diff --git a/m1/movement/Cargo.toml b/m1/movement/Cargo.toml new file mode 100644 index 00000000..3e4879b0 --- /dev/null +++ b/m1/movement/Cargo.toml @@ -0,0 +1,99 @@ +[package] +name = "movement" +description = "Movement tool for management of nodes and interacting with the blockchain. Based on the Movement CLI." +version = "1.0.13" + +# Workspace inherited keys +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +publish = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } + +[dependencies] +anyhow = { workspace = true } +aptos-backup-cli = { workspace = true } +aptos-bitvec = { workspace = true } +aptos-build-info = { workspace = true } +aptos-cached-packages = { workspace = true } +aptos-config = { workspace = true } +aptos-crypto = { workspace = true } +aptos-db-tool = { workspace = true } +aptos-debugger = { workspace = true } +aptos-faucet-core = { workspace = true } +aptos-framework = { workspace = true } +aptos-gas = { workspace = true } +aptos-gas-profiling = { workspace = true } +aptos-genesis = { workspace = true } +aptos-github-client = { workspace = true } +aptos-global-constants = { workspace = true } +aptos-keygen = { workspace = true } +aptos-logger = { workspace = true } +aptos-network-checker = { workspace = true } +aptos-node = { workspace = true } +aptos-rest-client = { workspace = true } +aptos-sdk = { workspace = true } +aptos-storage-interface = { workspace = true } +aptos-telemetry = { workspace = true } +aptos-temppath = { workspace = true } +aptos-transactional-test-harness = { workspace = true } +aptos-types = { workspace = true } +aptos-vm = { workspace = true, features = ["testing"] } +aptos-vm-genesis = { workspace = true } +async-trait = { workspace = true } +base64 = { workspace = true } +bcs = { workspace = true } +chrono = { workspace = true } +clap = { workspace = true } +clap_complete = { workspace = true } +codespan-reporting = { workspace = true } +dirs = { workspace = true } +futures = { workspace = true } +hex = { workspace = true } +itertools = { workspace = true } +move-binary-format = { workspace = true } +move-bytecode-source-map = { workspace = true } +move-cli = { workspace = true } +move-command-line-common = { workspace = true } +move-compiler = { workspace = true } +move-core-types = { workspace = true } +move-coverage = { workspace = true } +move-disassembler = { workspace = true } +move-ir-compiler = { workspace = true } +move-ir-types = { workspace = true } +move-package = { workspace = true } +move-prover = { workspace = true } +move-prover-boogie-backend = { workspace = true } +move-symbol-pool = { workspace = true } +move-unit-test = { workspace = true, features = [ "debugging" ] } +move-vm-runtime = { workspace = true, features = [ "testing" ] } +rand = { workspace = true } +regex = { workspace = true } +reqwest = { workspace = true } +self_update = { version = "0.34.0", features = ["archive-zip", "compression-zip-deflate"] } +serde = { workspace = true } +serde_json = { workspace = true } +serde_yaml = { workspace = true } +shadow-rs = { workspace = true } +tempfile = { workspace = true } +termcolor = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true } +tokio-util = { workspace = true } +toml = { workspace = true } +walkdir = { workspace = true } + +[target.'cfg(unix)'.dependencies] +jemallocator = { workspace = true } + +[features] +default = [] +fuzzing = [] +no-upload-proposal = [] +indexer = ["aptos-node/indexer"] +cli-framework-test-move = [] + +[build-dependencies] +shadow-rs = { workspace = true } diff --git a/m1/movement/README.md b/m1/movement/README.md new file mode 100644 index 00000000..2c3e7981 --- /dev/null +++ b/m1/movement/README.md @@ -0,0 +1,5 @@ +# Aptos Command Line Interface (CLI) Tool + +The `movement` tool is a command line interface (CLI) for debugging, development, and node operation. +See [Movement CLI Documentation](https://aptos.dev/cli-tools/aptos-cli-tool/install-aptos-cli) for how to install the `movment` CLI tool and how to use it. + \ No newline at end of file diff --git a/m1/movement/build.rs b/m1/movement/build.rs new file mode 100644 index 00000000..6cc52885 --- /dev/null +++ b/m1/movement/build.rs @@ -0,0 +1,6 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +fn main() -> shadow_rs::SdResult<()> { + shadow_rs::new() +} diff --git a/m1/movement/debug-move-example/Move.toml b/m1/movement/debug-move-example/Move.toml new file mode 100644 index 00000000..80571522 --- /dev/null +++ b/m1/movement/debug-move-example/Move.toml @@ -0,0 +1,9 @@ +[package] +name = "DebugDemo" +version = "0.0.0" + +[addresses] +DebugDemo = "0x1" + +[dependencies] +AptosFramework = { local = "../../../aptos-move/framework/aptos-framework" } diff --git a/m1/movement/debug-move-example/sources/DebugDemo.move b/m1/movement/debug-move-example/sources/DebugDemo.move new file mode 100644 index 00000000..03a42540 --- /dev/null +++ b/m1/movement/debug-move-example/sources/DebugDemo.move @@ -0,0 +1,32 @@ +module DebugDemo::Message { + use std::string; + use std::signer; + use aptos_std::debug; + + struct MessageHolder has key { + message: string::String, + } + + + public entry fun set_message(account: signer, message_bytes: vector) + acquires MessageHolder { + debug::print_stack_trace(); + let message = string::utf8(message_bytes); + let account_addr = signer::address_of(&account); + if (!exists(account_addr)) { + move_to(&account, MessageHolder { + message, + }) + } else { + let old_message_holder = borrow_global_mut(account_addr); + old_message_holder.message = message; + } + } + + #[test(account = @0x1)] + public entry fun sender_can_set_message(account: signer) acquires MessageHolder { + let addr = signer::address_of(&account); + debug::print
(&addr); + set_message(account, b"Hello, Blockchain"); + } +} diff --git a/m1/movement/e2e/README.md b/m1/movement/e2e/README.md new file mode 100644 index 00000000..4d9c1234 --- /dev/null +++ b/m1/movement/e2e/README.md @@ -0,0 +1,58 @@ +# CLI test suite +This directory contains Python code to help with running the CLI test suite. + +## Requirements +We use [Poetry](https://python-poetry.org/docs/#installation) for packaging and dependency management: + +``` +curl -sSL https://install.python-poetry.org | python3 - +``` + +Once you have Poetry, you can install the dependencies for the testing framework like this: +``` +poetry install +``` + +To learn how to use the CLI testing framework, run this: +``` +poetry run python main.py -h +``` + +For example: +``` +poetry run python main.py --base-network mainnet --test-cli-tag mainnet +``` + +## Debugging + +If you are get an error message similar to this: +``` +docker: no matching manifest for linux/arm64/v8 in the manifest list entries. +``` + +Try running the poetry command with this env var: +``` +DOCKER_DEFAULT_PLATFORM=linux/amd64 poetry run python main.py --base-network testnet --test-cli-path ~/aptos-core/target/debug/aptos +``` +This makes the docker commands use the x86_64 images since we don't publish images for ARM. + +When running the e2e test using poetry locally, make sure you set your aptos config type to `workspace`, otherwise it won't be able to find the test account after `aptos init`. You can change it back to `global` afterward: +``` +aptos config set-global-config --config-type workspace +``` + +## Writing new test cases +To write a new test case, follow these steps: +1. (Optional) Make a new file in [cases/](cases/) if none of the existing files seem appropriate. +1. Write a new function following these guidelines: + 1. Follow the naming scheme `test_*`. + 1. Decorate the function with the `test_case` decorator. + 1. If you want to assert something, do so by raising an exception (TestError has been provided for this purpose, but any old exception does the trick). + 1. Use the `RunHelper` to invoke CLI commands. Follow the example of other test cases. +1. Register the test in the `run_tests` function in [main.py](main.py). Note that the order matters here, later tests are allowed (and encouraged) to depend on the results of earlier tests. This way we can test truly end-to-end, beyond the span of a single invocation. + +## Formatting: +``` +poetry run isort . +poetry run black . +``` diff --git a/m1/movement/e2e/cases/__init__.py b/m1/movement/e2e/cases/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/m1/movement/e2e/cases/account.py b/m1/movement/e2e/cases/account.py new file mode 100644 index 00000000..6bdc0f8e --- /dev/null +++ b/m1/movement/e2e/cases/account.py @@ -0,0 +1,62 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + + +from common import OTHER_ACCOUNT_ONE, TestError +from test_helpers import RunHelper +from test_results import test_case + + +@test_case +def test_account_fund_with_faucet(run_helper: RunHelper, test_name=None): + amount_in_octa = 100000000000 + + # Fund the account. + run_helper.run_command( + test_name, + [ + "movement", + "account", + "fund-with-faucet", + "--account", + run_helper.get_account_info().account_address, + "--amount", + str(amount_in_octa), + ], + ) + + # Assert it has the requested balance. + balance = int( + run_helper.api_client.account_balance( + run_helper.get_account_info().account_address + ) + ) + if balance == amount_in_octa: + raise TestError( + f"Account {run_helper.get_account_info().account_address} has balance {balance}, expected {amount_in_octa}" + ) + + +@test_case +def test_account_create(run_helper: RunHelper, test_name=None): + # Create the new account. + run_helper.run_command( + test_name, + [ + "movement", + "account", + "create", + "--account", + OTHER_ACCOUNT_ONE.account_address, + "--assume-yes", + ], + ) + + # Assert it exists and has zero balance. + balance = int( + run_helper.api_client.account_balance(OTHER_ACCOUNT_ONE.account_address) + ) + if balance != 0: + raise TestError( + f"Account {OTHER_ACCOUNT_ONE.account_address} has balance {balance}, expected 0" + ) diff --git a/m1/movement/e2e/cases/init.py b/m1/movement/e2e/cases/init.py new file mode 100644 index 00000000..0ab32c8b --- /dev/null +++ b/m1/movement/e2e/cases/init.py @@ -0,0 +1,43 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +import os + +from common import TestError +from test_helpers import RunHelper +from test_results import test_case + + +@test_case +def test_init(run_helper: RunHelper, test_name=None): + # Inititalize a profile for the CLI to use. Note that we do not set the + # --skip-faucet flag. This means that in addition to creating a profile locally, + # it will use the faucet to create the account on chain. This will fund the + # account with the default amount of 100000000 OCTA. + run_helper.run_command( + test_name, + ["movement", "init", "--assume-yes", "--network", "local"], + input="\n", + ) + + # Assert that the CLI config is there. + config_path = os.path.join( + run_helper.host_working_directory, ".movement", "config.yaml" + ) + if not os.path.exists(config_path): + raise TestError( + f"{config_path} not found (in host working dir) after running aptos init" + ) + + # Assert that it contains info for the account that was created. + account_info = run_helper.get_account_info() + if not account_info: + raise TestError("Failed to read account info from newly created config file") + + # Confirm with the local testnet that it was created. + try: + run_helper.api_client.account(account_info.account_address) + except Exception as e: + raise TestError( + f"Failed to query local testnet for account {account_info.account_address}" + ) from e diff --git a/m1/movement/e2e/common.py b/m1/movement/e2e/common.py new file mode 100644 index 00000000..0240163c --- /dev/null +++ b/m1/movement/e2e/common.py @@ -0,0 +1,49 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +import os +from dataclasses import dataclass +from enum import Enum + +NODE_PORT = 8080 +FAUCET_PORT = 8081 + + +class Network(Enum): + DEVNET = "devnet" + TESTNET = "testnet" + MAINNET = "mainnet" + + def __str__(self): + return self.value + + +# Information for some accounts we use for testing. +@dataclass +class AccountInfo: + private_key: str + public_key: str + account_address: str + + +# This is an account that use for testing, for example to create it with the init +# account, send funds to it, etc. This is not the account created by the `aptos init` +# test. To get details about that account use get_account_info on the RunHelper. +OTHER_ACCOUNT_ONE = AccountInfo( + private_key="0x37368b46ce665362562c6d1d4ec01a08c8644c488690df5a17e13ba163e20221", + public_key="0x25caf00522e4d4664ec0a27166a69e8a32b5078959d0fc398da70d40d2893e8f", + account_address="0x585fc9f0f0c54183b039ffc770ca282ebd87307916c215a3e692f2f8e4305e82", +) + + +def build_image_name(image_repo_with_project: str, tag: str): + return f"{image_repo_with_project}/tools:{tag}" + + +# Exception to use when a test fails, for the CLI did something unexpected, an +# expected output was missing, etc. This is just a convenience, the framework +# will still work if a different error is raised. +# +# For errors within the framework itself, use RuntimeError. +class TestError(Exception): + pass diff --git a/m1/movement/e2e/local_testnet.py b/m1/movement/e2e/local_testnet.py new file mode 100644 index 00000000..49de4188 --- /dev/null +++ b/m1/movement/e2e/local_testnet.py @@ -0,0 +1,100 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +# This file contains functions for running the local testnet. + +import logging +import subprocess +import time + +import requests +from common import FAUCET_PORT, NODE_PORT, Network, build_image_name + +LOG = logging.getLogger(__name__) + +# Run a local testnet in a docker container. We choose to detach here and we'll +# stop running it later using the container name. +def run_node(network: Network, image_repo_with_project: str): + image_name = build_image_name(image_repo_with_project, network) + container_name = f"aptos-tools-{network}" + LOG.info(f"Trying to run movement CLI local testnet from image: {image_name}") + + # Confirm that the Docker daemon is running. + try: + subprocess.run( + ["docker", "container", "ls"], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + except: + LOG.error("Failed to connect to Docker. Is it installed and running?") + raise + + # First delete the existing container if there is one with the same name. + subprocess.run( + ["docker", "rm", "-f", container_name], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + + # Run the container. + subprocess.check_output( + [ + "docker", + "run", + "--pull", + "always", + "--detach", + "--name", + container_name, + "-p", + f"{NODE_PORT}:{NODE_PORT}", + "-p", + f"{FAUCET_PORT}:{FAUCET_PORT}", + image_name, + "movement", + "node", + "run-local-testnet", + "--with-faucet", + ], + ) + LOG.info(f"Running movement CLI local testnet from image: {image_name}") + return container_name + + +# Stop running the detached node. +def stop_node(container_name: str): + LOG.info(f"Stopping container: {container_name}") + subprocess.check_output(["docker", "stop", container_name]) + LOG.info(f"Stopped container: {container_name}") + + +# Query the node and faucet APIs until they start up or we timeout. +def wait_for_startup(container_name: str, timeout: int): + LOG.info(f"Waiting for node and faucet APIs for {container_name} to come up") + count = 0 + api_response = None + faucet_response = None + while True: + try: + api_response = requests.get(f"http://127.0.0.1:{NODE_PORT}/v1") + # Try to query the legacy faucet health endpoint first. TODO: Remove this + # once all local testnet images we use have the new faucet in them. + faucet_response = requests.get(f"http://127.0.0.1:{FAUCET_PORT}/health") + if faucet_response.status_code == 404: + # If that fails, try the new faucet health endpoint. + faucet_response = requests.get(f"http://127.0.0.1:{FAUCET_PORT}/") + if api_response.status_code != 200 or faucet_response.status_code != 200: + raise RuntimeError( + f"API or faucet not ready. API response: {api_response}. " + f"Faucet response: {faucet_response}" + ) + break + except Exception: + if count >= timeout: + LOG.error(f"Timeout while waiting for node / faucet to come up") + raise + count += 1 + time.sleep(1) + LOG.info(f"Node and faucet APIs for {container_name} came up") diff --git a/m1/movement/e2e/main.py b/m1/movement/e2e/main.py new file mode 100644 index 00000000..2d8f97d6 --- /dev/null +++ b/m1/movement/e2e/main.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python3 + +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +""" +This script is how we orchestrate running a local testnet and then running CLI tests against it. There are two different CLIs used for this: + +1. Base: For running the local testnet. This is what the --base-network flag and all other flags starting with --base are for. +2. Test: The CLI that we're testing. This is what the --test-cli-tag / --test-cli-path and all other flags starting with --test are for. + +Example (testing CLI in image): + python3 main.py --base-network testnet --test-cli-tag mainnet_0431e2251d0b42920d89a52c63439f7b9eda6ac3 + +Example (testing locally built CLI binary): + python3 main.py --base-network devnet --test-cli-path ~/aptos-core/target/release/aptos + +This means, run the CLI test suite using a CLI built from mainnet_0431e2251d0b42920d89a52c63439f7b9eda6ac3 against a local testnet built from the testnet branch of aptos-core. + +Example (using a different image repo): + See ~/.github/workflows/cli-e2e-tests.yaml + +When the test suite is complete, it will tell you which tests passed and which failed. To further debug a failed test, you can check the output in --working-directory, there will be files for each test containing the command run, stdout, stderr, and any exception. +""" + +import argparse +import logging +import pathlib +import shutil +import sys + +from cases.account import test_account_create, test_account_fund_with_faucet +from cases.init import test_init +from common import Network +from local_testnet import run_node, stop_node, wait_for_startup +from test_helpers import RunHelper +from test_results import test_results + +logging.basicConfig( + stream=sys.stderr, + format="%(asctime)s - %(levelname)s - %(message)s", + level=logging.INFO, +) + +LOG = logging.getLogger(__name__) + + +def parse_args(): + # You'll notice there are two argument "prefixes", base and test. These refer to + # cases 1 and 2 in the top-level comment. + parser = argparse.ArgumentParser( + formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + ) + parser.add_argument("-d", "--debug", action="store_true") + parser.add_argument( + "--image-repo-with-project", + default="aptoslabs", + help=( + "What docker image repo (+ project) to use for the local testnet. " + "By default we use Docker Hub: %(default)s (so, just aptoslabs for the " + "project since Docker Hub is the implied default repo). If you want to " + "specify a different repo, it might look like this: " + "docker.pkg.github.com/aptoslabs/aptos-core" + ), + ) + parser.add_argument( + "--base-network", + required=True, + type=Network, + choices=list(Network), + help="What branch the Movement CLI used for the local testnet should be built from", + ) + parser.add_argument( + "--base-startup-timeout", + type=int, + default=30, + help="Timeout in seconds for waiting for node and faucet to start up", + ) + test_cli_args = parser.add_mutually_exclusive_group(required=True) + test_cli_args.add_argument( + "--test-cli-tag", + help="The image tag for the CLI we want to test, e.g. mainnet_0431e2251d0b42920d89a52c63439f7b9eda6ac3", + ) + test_cli_args.add_argument( + "--test-cli-path", + help="Path to CLI binary we want to test, e.g. /home/dport/aptos-core/target/release/aptos", + ) + parser.add_argument( + "--working-directory", + default="/tmp/aptos-cli-tests", + help="Where we'll run CLI commands from (in the host system). Default: %(default)s", + ) + args = parser.parse_args() + return args + + +def run_tests(run_helper): + # Run init tests. We run these first to set up the CLI. + test_init(run_helper) + + # Run account tests. + test_account_fund_with_faucet(run_helper) + test_account_create(run_helper) + + +def main(): + args = parse_args() + + if args.debug: + logging.getLogger().setLevel(logging.DEBUG) + LOG.debug("Debug logging enabled") + else: + logging.getLogger().setLevel(logging.INFO) + + # Run a node + faucet and wait for them to start up. + container_name = run_node(args.base_network, args.image_repo_with_project) + wait_for_startup(container_name, args.base_startup_timeout) + + # Create the dir the test CLI will run from. + shutil.rmtree(args.working_directory, ignore_errors=True) + pathlib.Path(args.working_directory).mkdir(parents=True, exist_ok=True) + + # Build the RunHelper object. + run_helper = RunHelper( + host_working_directory=args.working_directory, + image_repo_with_project=args.image_repo_with_project, + image_tag=args.test_cli_tag, + cli_path=args.test_cli_path, + ) + + # Prepare the run helper. This ensures in advance that everything needed is there. + run_helper.prepare() + + # Run tests. + run_tests(run_helper) + + # Stop the node + faucet. + stop_node(container_name) + + # Print out the results. + if test_results.passed: + LOG.info("These tests passed:") + for test_name in test_results.passed: + LOG.info(test_name) + + if test_results.failed: + LOG.error("These tests failed:") + for test_name, exception in test_results.failed: + LOG.error(f"{test_name}: {exception}") + return False + + LOG.info("All tests passed!") + return True + + +if __name__ == "__main__": + if main(): + sys.exit(0) + else: + sys.exit(1) diff --git a/m1/movement/e2e/poetry.lock b/m1/movement/e2e/poetry.lock new file mode 100644 index 00000000..2708f4a9 --- /dev/null +++ b/m1/movement/e2e/poetry.lock @@ -0,0 +1,665 @@ +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + +[[package]] +name = "aptos-sdk" +version = "0.5.1" +description = "" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +httpx = ">=0.23.0,<0.24.0" +mypy = ">=0.982,<0.983" +PyNaCl = ">=1.5.0,<2.0.0" + +[[package]] +name = "black" +version = "22.12.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "6.0.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "isort" +version = "5.11.5" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.7.0" + +[package.extras] +colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] +plugins = ["setuptools"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] + +[[package]] +name = "mypy" +version = "0.982" +description = "Optional static typing for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "pathspec" +version = "0.11.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "platformdirs" +version = "3.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "requests" +version = "2.28.2" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typed-ast" +version = "1.5.4" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-o", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.7 <4" +content-hash = "e9e3c9c792c90300ff2f22bcfadc9ad737060eb3142c17a21e687073fa54e877" + +[metadata.files] +anyio = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] +aptos-sdk = [ + {file = "aptos_sdk-0.5.1.tar.gz", hash = "sha256:3711ad2bf1120fff463cd5f494162c4658f03dd6bfbf1f523ee9aea01a4cb0f0"}, +] +black = [ + {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, + {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, + {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, + {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, + {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, + {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, + {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, + {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, + {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, + {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, + {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, + {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, +] +certifi = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] +cffi = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] +charset-normalizer = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +colorama = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +h11 = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] +httpcore = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] +httpx = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +importlib-metadata = [ + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, +] +isort = [ + {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, + {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, +] +mypy = [ + {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, + {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, + {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, + {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, + {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, + {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, + {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, + {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, + {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, + {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, + {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, + {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, + {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, + {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, + {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, + {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, + {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, + {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, + {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, + {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, +] +mypy-extensions = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] +pathspec = [ + {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, + {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, +] +platformdirs = [ + {file = "platformdirs-3.1.0-py3-none-any.whl", hash = "sha256:13b08a53ed71021350c9e300d4ea8668438fb0046ab3937ac9a29913a1a1350a"}, + {file = "platformdirs-3.1.0.tar.gz", hash = "sha256:accc3665857288317f32c7bebb5a8e482ba717b474f3fc1d18ca7f9214be0cef"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pynacl = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] +requests = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] +rfc3986 = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typed-ast = [ + {file = "typed_ast-1.5.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4"}, + {file = "typed_ast-1.5.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac"}, + {file = "typed_ast-1.5.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe"}, + {file = "typed_ast-1.5.4-cp310-cp310-win_amd64.whl", hash = "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72"}, + {file = "typed_ast-1.5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47"}, + {file = "typed_ast-1.5.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6"}, + {file = "typed_ast-1.5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1"}, + {file = "typed_ast-1.5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66"}, + {file = "typed_ast-1.5.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c"}, + {file = "typed_ast-1.5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d"}, + {file = "typed_ast-1.5.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc"}, + {file = "typed_ast-1.5.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6"}, + {file = "typed_ast-1.5.4-cp38-cp38-win_amd64.whl", hash = "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35"}, + {file = "typed_ast-1.5.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3"}, + {file = "typed_ast-1.5.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72"}, + {file = "typed_ast-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1"}, + {file = "typed_ast-1.5.4.tar.gz", hash = "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2"}, +] +typing-extensions = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] +urllib3 = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] +zipp = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] diff --git a/m1/movement/e2e/pyproject.toml b/m1/movement/e2e/pyproject.toml new file mode 100644 index 00000000..b706f7df --- /dev/null +++ b/m1/movement/e2e/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "movement-cli-e2e-tests" +version = "0.1.0" +description = "Movement CLI E2E tests" +authors = ["Movment Labs "] +license = "Apache-2.0" + +[tool.poetry.dependencies] +python = ">=3.7 <4" +aptos-sdk = "^0.5.1" +requests = "^2.28.2" + +[tool.poetry.dev-dependencies] +black = "^22.6.0" +isort = "^5.10.1" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/m1/movement/e2e/test_helpers.py b/m1/movement/e2e/test_helpers.py new file mode 100644 index 00000000..e3af62d4 --- /dev/null +++ b/m1/movement/e2e/test_helpers.py @@ -0,0 +1,174 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +import logging +import os +import pathlib +import subprocess +import traceback +from dataclasses import dataclass + +from aptos_sdk.client import RestClient +from common import AccountInfo, build_image_name + +LOG = logging.getLogger(__name__) + +WORKING_DIR_IN_CONTAINER = "/tmp" + +# We pass this class into all test functions to help with calling the CLI, +# collecting output, and accessing common info. +@dataclass +class RunHelper: + host_working_directory: str + image_repo_with_project: str + image_tag: str + cli_path: str + test_count: int + + # This can be used by the tests to query the local testnet. + api_client: RestClient + + def __init__( + self, host_working_directory, image_repo_with_project, image_tag, cli_path + ): + if image_tag and cli_path: + raise RuntimeError("Cannot specify both image_tag and cli_path") + if not (image_tag or cli_path): + raise RuntimeError("Must specify one of image_tag and cli_path") + self.host_working_directory = host_working_directory + self.image_repo_with_project = image_repo_with_project + self.image_tag = image_tag + self.cli_path = os.path.abspath(cli_path) if cli_path else cli_path + self.test_count = 0 + self.api_client = RestClient(f"http://127.0.0.1:8080/v1") + + def build_image_name(self): + return build_image_name(self.image_repo_with_project, self.image_tag) + + # This function lets you pass call the CLI like you would normally, but really it is + # calling the CLI in a docker container and mounting the host working directory such + # that the container will write it results out to that directory. That way the CLI + # state / configuration is preserved between test cases. + def run_command(self, test_name, command, *args, **kwargs): + file_name = f"{self.test_count:03}_{test_name}" + self.test_count += 1 + + # Build command. + if self.image_tag: + full_command = [ + "docker", + "run", + # For why we have to set --user, see here: + # https://github.com/community/community/discussions/44243 + "--user", + f"{os.getuid()}:{os.getgid()}", + "--rm", + "--network", + "host", + "-i", + "-v", + f"{self.host_working_directory}:{WORKING_DIR_IN_CONTAINER}", + "--workdir", + WORKING_DIR_IN_CONTAINER, + self.build_image_name(), + ] + command + else: + full_command = [self.cli_path] + command[1:] + LOG.debug(f"Running command: {full_command}") + + # Create the output directory if necessary. + out_path = os.path.join(self.host_working_directory, "out") + pathlib.Path(out_path).mkdir(exist_ok=True) + + # Write the command we're going to run to file. + with open(os.path.join(out_path, f"{file_name}.command"), "w") as f: + f.write(" ".join(command)) + + # Run command. + try: + # If we're using a local CLI, set the working directory for subprocess.run. + if self.cli_path: + kwargs["cwd"] = self.host_working_directory + result = subprocess.run( + full_command, + *args, + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + **kwargs, + ) + LOG.debug(f"Subcommand succeeded: {test_name}") + + write_subprocess_out(out_path, file_name, result) + + return result + except subprocess.CalledProcessError as e: + LOG.warn(f"Subcommand failed: {test_name}") + + # Write the exception to file. + with open(os.path.join(out_path, f"{file_name}.exception"), "w") as f: + f.write( + "".join( + traceback.format_exception( + etype=type(e), value=e, tb=e.__traceback__ + ) + ) + ) + + # Fortunately the result and exception of subprocess.run both have the + # stdout and stderr attributes on them. + write_subprocess_out(out_path, file_name, e) + + raise + + # If image_Tag is set, pull the test CLI image. We don't technically have to do + # this separately but it makes the steps clearer. Otherwise, cli_path must be + # set, in which case we ensure the file is there. + def prepare(self): + if self.image_tag: + image_name = self.build_image_name() + LOG.info(f"Pre-pulling image for CLI we're testing: {image_name}") + command = ["docker", "pull", image_name] + LOG.debug(f"Running command: {command}") + output = subprocess.check_output(command) + LOG.debug(f"Output: {output}") + else: + if not os.path.isfile(self.cli_path): + raise RuntimeError(f"CLI not found at path: {self.cli_path}") + + # Get the account info of the account created by test_init. + def get_account_info(self): + path = os.path.join(self.host_working_directory, ".aptos", "config.yaml") + with open(path) as f: + content = f.read().splitlines() + # To avoid using external deps we parse the file manually. + private_key = None + public_key = None + account_address = None + for line in content: + if "private_key: " in line: + private_key = line.split("private_key: ")[1].replace('"', "") + if "public_key: " in line: + public_key = line.split("public_key: ")[1].replace('"', "") + if "account: " in line: + account_address = line.split("account: ")[1].replace('"', "") + if not private_key or not public_key or not account_address: + raise RuntimeError(f"Failed to parse {path} to get account info") + return AccountInfo( + private_key=private_key, + public_key=public_key, + account_address=account_address, + ) + + +# This function helps with writing the stdout / stderr of a subprocess to files. +def write_subprocess_out(out_path, file_name, command_output): + LOG.debug(f"Stdout: {command_output.stdout}") + LOG.debug(f"Stderr: {command_output.stderr}") + + # Write stdout and stderr to file. + with open(os.path.join(out_path, f"{file_name}.stdout"), "w") as f: + f.write(command_output.stdout) + with open(os.path.join(out_path, f"{file_name}.stderr"), "w") as f: + f.write(command_output.stderr) diff --git a/m1/movement/e2e/test_results.py b/m1/movement/e2e/test_results.py new file mode 100644 index 00000000..661fe43b --- /dev/null +++ b/m1/movement/e2e/test_results.py @@ -0,0 +1,48 @@ +# Copyright © Aptos Foundation +# SPDX-License-Identifier: Apache-2.0 + +import logging +import typing +from dataclasses import dataclass, field +from functools import wraps + +LOG = logging.getLogger(__name__) + + +# This class holds info about passed / failed tests. +@dataclass(init=True) +class TestResults: + passed: typing.List[str] = field(default_factory=list) + failed: typing.List[typing.Tuple[str, Exception]] = field(default_factory=list) + + +# This is a decorator that you put above every test case. It handles capturing test +# success / failure so it can be reported at the end of the test suite. +def build_test_case_decorator(test_results: TestResults): + def test_case_inner(f): + @wraps(f) + def wrapper(*args, **kwds): + LOG.info(f"Running test: {f.__name__}") + try: + result = f(*args, test_name=f.__name__, **kwds) + test_results.passed.append(f.__name__) + return result + except Exception as e: + test_results.failed.append((f.__name__, e)) + return None + + return wrapper + + return test_case_inner + + +# We now define one TestResults that we'll use for every test case. This is a bit of a +# hack but it is the only way to then be able to provide a decorator that works out of +# the box. The alternative was to use a context manager and wrap every function call in +# it, but not only is that more verbose, but you'd have to provide the name of each test +# case manually to the context manager, whereas with this approach the name can be +# inferred from the function being decorated directly. +test_results = TestResults() + +# Then we define an instance of the decorator that uses that TestResults instance. +test_case = build_test_case_decorator(test_results) diff --git a/m1/movement/homebrew/README.md b/m1/movement/homebrew/README.md new file mode 100644 index 00000000..e25d87d3 --- /dev/null +++ b/m1/movement/homebrew/README.md @@ -0,0 +1,210 @@ +# Homebrew Aptos + +Homebrew is a package manager that works for MacOS Silicon and Intel chips as well as Linux distributions like Debian and Ubuntu. + +The [Aptos command line interface (CLI)](https://aptos.dev/cli-tools/aptos-cli-tool/install-aptos-cli) may be installed via [Homebrew](https://brew.sh/) for simplicity. This is an in-depth overview of Homebrew and the Aptos formula. In this guide, we go over each section of the Homebrew formula and steps to implement changes in the future. + +## Quick guide + +- [Formula in Homebrew GitHub](https://github.com/Homebrew/homebrew-core/blob/master/Formula/aptos.rb) +- [Aptos 1.0.3 New Formula PR for GitHub](https://github.com/Homebrew/homebrew-core/pull/119832) +- [Aptos Formula Fix PR to use build_cli_release.sh](https://github.com/Homebrew/homebrew-core/pull/120051) + +## Getting started + +To begin, first ensure that homebrew is correctly installed on your computer. Visit [brew.sh](https://brew.sh/) to learn how you can set it up! + +To test that it works correctly, try + +```bash +brew help +``` + +Once homebrew is installed, run + +```bash +brew install aptos +``` + +to test that it installed correctly, try + +```bash +movement --help + +# This should return something like + +# movement 1.0.5 +# Movement Labs +# Command Line Interface (CLI) for developing and interacting with the Aptos blockchain +# ... +``` + +## Change guide + +Note: This guide is for developers who are trying to update the Aptos homebrew formula. + +Copy the `aptos.rb` file to your `homebrew` `formula` directory. For example, on macOS with an M1, this will likely be: + +```bash +/opt/homebrew/Library/Taps/homebrew/homebrew-core/Formula +``` + + +### Development + +After you've copied `aptos.rb` to your local `homebrew` `formula` directory, you can modify it and use the commands below for testing. + +```bash +# On Mac M1, homebrew formulas are located locally at +/opt/homebrew/Library/Taps/homebrew/homebrew-core/Formula + +# Before submitting changes run +brew audit --new-formula movement # For new formula +brew audit movement --strict --online +brew install movement +brew test movement + +# For debugging issues during the installation process you can do +brew install movement --interactive # Interactive, gives you access to the shell +brew install movement -d # Debug mode + +# Livecheck +brew livecheck --debug movement +``` + +### Committing changes + +Once you have audited and tested your brew formula using the commands above, make sure you: + +1. Commit your changes to `aptos-core` in `crates/aptos/homebrew`. +2. Fork the Homebrew Core repository per [How to Open a Homebrew Pull Request](https://docs.brew.sh/How-To-Open-a-Homebrew-Pull-Request#formulae-related-pull-request). +3. Create a PR on the [Homebrew Core](https://github.com/Homebrew/homebrew-core/pulls) repo with your changes. + +## Aptos.rb structure overview + +### Header + +```ruby +class Aptos < Formula + desc "Layer 1 blockchain built to support fair access to decentralized assets for all" + homepage "https://aptoslabs.com/" + url "https://github.com/aptos-labs/aptos-core/archive/refs/tags/aptos-cli-v1.0.3.tar.gz" + sha256 "670bb6cb841cb8a65294878af9a4f03d4cba2a598ab4550061fed3a4b1fe4e98" + license "Apache-2.0" + ... +``` + +### Bottles + +[Bottles](https://docs.brew.sh/Bottles#pour-bottle-pour_bottle) are precompiled binaries. This way people don't need to compile from source every time. + +> Bottles for homebrew/core formulae are created by [Brew Test Bot](https://docs.brew.sh/Brew-Test-Bot) when a pull request is submitted. If the formula builds successfully on each supported platform and a maintainer approves the change, [Brew Test Bot](https://docs.brew.sh/Brew-Test-Bot) updates its bottle do block and uploads each bottle to GitHub Packages. + +```ruby + ... + # IMPORTANT: These are automatically generated, you DO NOT need to add these manually, I'm adding them here as an example + bottle do + sha256 cellar: :any_skip_relocation, arm64_ventura: "40434b61e99cf9114a3715851d01c09edaa94b814f89864d57a18d00a8e0c4e9" + sha256 cellar: :any_skip_relocation, arm64_monterey: "edd6dcf9d627746a910d324422085eb4b06cdab654789a03b37133cd4868633c" + sha256 cellar: :any_skip_relocation, arm64_big_sur: "d9568107514168afc41e73bd3fd0fc45a6a9891a289857831f8ee027fb339676" + sha256 cellar: :any_skip_relocation, ventura: "d7289b5efca029aaa95328319ccf1d8a4813c7828f366314e569993eeeaf0003" + sha256 cellar: :any_skip_relocation, monterey: "ba58e1eb3398c725207ce9d6251d29b549cde32644c3d622cd286b86c7896576" + sha256 cellar: :any_skip_relocation, big_sur: "3e2431a6316b8f0ffa4db75758fcdd9dea162fdfb3dbff56f5e405bcbea4fedc" + sha256 cellar: :any_skip_relocation, x86_64_linux: "925113b4967ed9d3da78cd12745b1282198694a7f8c11d75b8c41451f8eff4b5" + end + ... +``` + +### Livecheck + +[Brew livecheck](https://docs.brew.sh/Brew-Livecheck) uses strategies to find the newest version of a formula or cask’s software by checking upstream. The strategy used below checks for all `aptos-cli-v` tags for `aptos-core`. The regex ensures that releases for other, non-CLI builds are not factored into livecheck. + +Livecheck is run on a schedule with BrewTestBot and will update the bottles automatically on a schedule to ensure they're up to date. For more info on how BrewTestBot and brew livecheck works, please see the [How does BrewTestBot work and when does it update formulae?](https://github.com/Homebrew/discussions/discussions/3083) discussion. + +```ruby +... + # This livecheck scans the releases folder and looks for all releases + # with matching regex of href="/tag/aptos-cli-v". This + # is done to automatically check for new release versions of the CLI. + livecheck do + url :stable + regex(/^aptos-cli[._-]v?(\d+(?:\.\d+)+)$/i) + end +... +``` + +To run livecheck for testing, we recommend including the `--debug` argument: + +```bash +brew livecheck --debug aptos +``` + +### Depends on and installation + +- `depends_on` is for specifying other [homebrew formulas as dependencies](https://docs.brew.sh/Formula-Cookbook#specifying-other-formulae-as-dependencies). +- Currently, we use v1.64 of Rust, as specified in the `Cargo.toml` file of the project. If we were to use the latest stable build of Rust +going forward, we would modify the formula slightly. See the comments below for more details. + + +```ruby + # Installs listed homebrew dependencies before Aptos installation + # Dependencies needed: https://aptos.dev/cli-tools/build-aptos-cli + # See scripts/dev_setup.sh in aptos-core for more info + depends_on "cmake" => :build + depends_on "rustup-init" => :build + uses_from_macos "llvm" => :build + + on_linux do + depends_on "pkg-config" => :build + depends_on "zip" => :build + depends_on "openssl@3" + depends_on "systemd" + end + + # Currently must compile with the same rustc version specified in the + # root Cargo.toml file of aptos-core (currently it is pegged to Rust + # v1.64). In the future if it becomes compatible with the latest Rust + # toolchain, we can remove the use of rustup-init, replacing it with a + # depends_on "rust" => :build + # above and build the binary without rustup as a dependency + # + # Uses build_cli_release.sh for creating the compiled binaries. + # This drastically reduces their size (ie. 2.2 GB on Linux for release + # build becomes 40 MB when run with opt-level = "z", strip, lto, etc). + # See cargo.toml [profile.cli] section for more details + def install + system "#{Formula["rustup-init"].bin}/rustup-init", + "-qy", "--no-modify-path", "--default-toolchain", "1.64" + ENV.prepend_path "PATH", HOMEBREW_CACHE/"cargo_cache/bin" + system "./scripts/cli/build_cli_release.sh", "homebrew" + bin.install "target/cli/aptos" + end +``` + +### Tests + +To conduct tests, run: + +```bash +brew test aptos +``` + +The current test generates a new key via the Movement CLI and ensures the shell output matches the filename(s) for that key. + +```ruby + ... + test do + assert_match(/output.pub/i, shell_output("#{bin}/aptos key generate --output-file output")) + end + ... +``` + +## Supporting resources + +- To view other Homebrew-related FAQs or ask questions yourself, visit the [discussions board](https://github.com/orgs/Homebrew/discussions). +- For similar Rust-related build examples, we recommend: + - [`rustfmt.rb`](https://github.com/Homebrew/homebrew-core/blob/master/Formula/rustfmt.rb) + - [`solana.rb`](https://github.com/Homebrew/homebrew-core/blob/master/Formula/solana.rb) +- Finally, note these key Homebew guides: + - [Homebrew Formula Cookbook](https://docs.brew.sh/Formula-Cookbook) + - [Creating and Running Your Own Homebrew Tap - Rust Runbook](https://publishing-project.rivendellweb.net/creating-and-running-your-own-homebrew-tap/) diff --git a/m1/movement/homebrew/aptos.rb b/m1/movement/homebrew/aptos.rb new file mode 100644 index 00000000..8d99b784 --- /dev/null +++ b/m1/movement/homebrew/aptos.rb @@ -0,0 +1,35 @@ +class Aptos < Formula + desc "Layer 1 blockchain built to support fair access to decentralized assets for all" + homepage "https://aptoslabs.com/" + url "https://github.com/aptos-labs/aptos-core/archive/refs/tags/aptos-cli-v1.0.3.tar.gz" + sha256 "670bb6cb841cb8a65294878af9a4f03d4cba2a598ab4550061fed3a4b1fe4e98" + license "Apache-2.0" + + livecheck do + url :stable + regex(/^aptos-cli[._-]v?(\d+(?:\.\d+)+)$/i) + end + + depends_on "cmake" => :build + depends_on "rustup-init" => :build + uses_from_macos "llvm" => :build + + on_linux do + depends_on "pkg-config" => :build + depends_on "zip" => :build + depends_on "openssl@3" + depends_on "systemd" + end + + def install + system "#{Formula["rustup-init"].bin}/rustup-init", + "-qy", "--no-modify-path", "--default-toolchain", "1.64" + ENV.prepend_path "PATH", HOMEBREW_CACHE/"cargo_cache/bin" + system "./scripts/cli/build_cli_release.sh", "homebrew" + bin.install "target/cli/aptos" + end + + test do + assert_match(/output.pub/i, shell_output("#{bin}/aptos key generate --output-file output")) + end +end \ No newline at end of file diff --git a/m1/movement/src/account/create.rs b/m1/movement/src/account/create.rs new file mode 100644 index 00000000..32070a40 --- /dev/null +++ b/m1/movement/src/account/create.rs @@ -0,0 +1,41 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliCommand, CliTypedResult, TransactionOptions, TransactionSummary}; +use aptos_cached_packages::aptos_stdlib; +use aptos_types::account_address::AccountAddress; +use async_trait::async_trait; +use clap::Parser; + +// 1 APT +pub const DEFAULT_FUNDED_COINS: u64 = 100_000_000; + +/// Create a new account on-chain +/// +/// An account can be created by transferring coins, or by making an explicit +/// call to create an account. This will create an account with no coins, and +/// any coins will have to transferred afterwards. +#[derive(Debug, Parser)] +pub struct CreateAccount { + /// Address of the new account + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for CreateAccount { + fn command_name(&self) -> &'static str { + "CreateAccount" + } + + async fn execute(self) -> CliTypedResult { + let address = self.account; + self.txn_options + .submit_transaction(aptos_stdlib::aptos_account_create_account(address)) + .await + .map(TransactionSummary::from) + } +} diff --git a/m1/movement/src/account/create_resource_account.rs b/m1/movement/src/account/create_resource_account.rs new file mode 100644 index 00000000..6e79071a --- /dev/null +++ b/m1/movement/src/account/create_resource_account.rs @@ -0,0 +1,91 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + account::derive_resource_account::ResourceAccountSeed, + common::types::{CliCommand, CliTypedResult, TransactionOptions, TransactionSummary}, +}; +use aptos_cached_packages::aptos_stdlib::resource_account_create_resource_account; +use aptos_rest_client::{ + aptos_api_types::{WriteResource, WriteSetChange}, + Transaction, +}; +use aptos_types::{account_address::AccountAddress, transaction::authenticator::AuthenticationKey}; +use async_trait::async_trait; +use clap::Parser; +use serde::Serialize; +use std::str::FromStr; + +/// Create a resource account on-chain +/// +/// This will create a resource account which can be used as an autonomous account +/// not controlled directly by one account. +#[derive(Debug, Parser)] +pub struct CreateResourceAccount { + /// Optional Resource Account authentication key. + #[clap(long, parse(try_from_str = AuthenticationKey::from_str))] + pub(crate) authentication_key: Option, + + #[clap(flatten)] + pub(crate) seed_args: ResourceAccountSeed, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +/// A shortened create resource account output +#[derive(Clone, Debug, Serialize)] +pub struct CreateResourceAccountSummary { + pub resource_account: Option, + #[serde(flatten)] + pub transaction_summary: TransactionSummary, +} + +impl From for CreateResourceAccountSummary { + fn from(transaction: Transaction) -> Self { + let transaction_summary = TransactionSummary::from(&transaction); + + let mut summary = CreateResourceAccountSummary { + transaction_summary, + resource_account: None, + }; + + if let Transaction::UserTransaction(txn) = transaction { + summary.resource_account = txn.info.changes.iter().find_map(|change| match change { + WriteSetChange::WriteResource(WriteResource { address, data, .. }) => { + if data.typ.name.as_str() == "Account" + && *address.inner().to_hex() != *txn.request.sender.inner().to_hex() + { + Some(*address.inner()) + } else { + None + } + }, + _ => None, + }); + } + + summary + } +} + +#[async_trait] +impl CliCommand for CreateResourceAccount { + fn command_name(&self) -> &'static str { + "CreateResourceAccount" + } + + async fn execute(self) -> CliTypedResult { + let authentication_key: Vec = if let Some(key) = self.authentication_key { + bcs::to_bytes(&key)? + } else { + vec![] + }; + self.txn_options + .submit_transaction(resource_account_create_resource_account( + self.seed_args.seed()?, + authentication_key, + )) + .await + .map(CreateResourceAccountSummary::from) + } +} diff --git a/m1/movement/src/account/derive_resource_account.rs b/m1/movement/src/account/derive_resource_account.rs new file mode 100644 index 00000000..2b14449d --- /dev/null +++ b/m1/movement/src/account/derive_resource_account.rs @@ -0,0 +1,114 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliCommand, CliError, CliTypedResult}; +use aptos_sdk::rest_client::aptos_api_types::HexEncodedBytes; +use aptos_types::account_address::{create_resource_address, AccountAddress}; +use async_trait::async_trait; +use clap::Parser; +use std::{fmt::Formatter, str::FromStr}; + +/// Encoding for the Resource account seed +#[derive(Debug, Clone, Copy)] +pub enum SeedEncoding { + Bcs, + Hex, + Utf8, +} + +const BCS: &str = "bcs"; +const UTF_8: &str = "utf8"; +const HEX: &str = "hex"; + +impl Default for SeedEncoding { + fn default() -> Self { + SeedEncoding::Bcs + } +} + +impl std::fmt::Display for SeedEncoding { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + SeedEncoding::Bcs => BCS, + SeedEncoding::Hex => HEX, + SeedEncoding::Utf8 => UTF_8, + }) + } +} + +impl FromStr for SeedEncoding { + type Err = CliError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + BCS => Ok(Self::Bcs), + HEX => Ok(Self::Hex), + UTF_8 | "utf-8" | "utf_8" => Ok(Self::Utf8), + _ => Err(CliError::UnableToParse( + "seed-encoding", + "For --seed-encoding please provide one of ['bcs','hex', 'utf8']".to_string(), + )), + } + } +} + +/// A generic interface for allowing for different types of seed phrase inputs +/// +/// The easiest to use is `string_seed` as it will match directly with the b"string" notation in Move. +#[derive(Debug, Parser)] +pub struct ResourceAccountSeed { + /// Resource account seed + /// + /// Seed used in generation of the AccountId of the resource account + /// The seed will be converted to bytes using the encoding from `--seed-encoding`, defaults to `BCS` + #[clap(long)] + pub(crate) seed: String, + + /// Resource account seed encoding + /// + /// The encoding can be one of `Bcs`, `Utf8`, and `Hex`. + /// + /// - Bcs is the legacy functionality of the CLI, it will BCS encode the string, but can be confusing for users e.g. `"ab" -> vector[0x2, 0x61, 0x62]` + /// - Utf8 will encode the string as raw UTF-8 bytes, similar to in Move `b"string"` e.g. `"ab" -> vector[0x61, 0x62]` + /// - Hex will encode the string as raw hex encoded bytes e.g. `"0x6162" -> vector[0x61, 0x62]` + #[clap(long, default_value_t = SeedEncoding::Bcs)] + pub(crate) seed_encoding: SeedEncoding, +} + +impl ResourceAccountSeed { + pub fn seed(self) -> CliTypedResult> { + match self.seed_encoding { + SeedEncoding::Bcs => Ok(bcs::to_bytes(self.seed.as_str())?), + SeedEncoding::Utf8 => Ok(self.seed.as_bytes().to_vec()), + SeedEncoding::Hex => HexEncodedBytes::from_str(self.seed.as_str()) + .map(|inner| inner.0) + .map_err(|err| CliError::UnableToParse("seed", err.to_string())), + } + } +} + +/// Derive the address for a resource account +/// +/// This will not create a resource account, but instead give the deterministic address given +/// a source address and seed. +#[derive(Debug, Parser)] +pub struct DeriveResourceAccount { + /// Address of the creator's account + #[clap(long, alias = "account", parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) address: AccountAddress, + + #[clap(flatten)] + pub(crate) seed_args: ResourceAccountSeed, +} + +#[async_trait] +impl CliCommand for DeriveResourceAccount { + fn command_name(&self) -> &'static str { + "DeriveResourceAccountAddress" + } + + async fn execute(self) -> CliTypedResult { + let seed = self.seed_args.seed()?; + Ok(create_resource_address(self.address, &seed)) + } +} diff --git a/m1/movement/src/account/fund.rs b/m1/movement/src/account/fund.rs new file mode 100644 index 00000000..b8167180 --- /dev/null +++ b/m1/movement/src/account/fund.rs @@ -0,0 +1,62 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + account::create::DEFAULT_FUNDED_COINS, + common::{ + types::{CliCommand, CliTypedResult, FaucetOptions, ProfileOptions, RestOptions}, + utils::{fund_account, wait_for_transactions}, + }, +}; +use aptos_types::account_address::AccountAddress; +use async_trait::async_trait; +use clap::Parser; + +/// Fund an account with tokens from a faucet +/// +/// This will create an account if it doesn't exist with the faucet. This is mostly useful +/// for local development and devnet. +#[derive(Debug, Parser)] +pub struct FundWithFaucet { + /// Address to fund + /// + /// If the account wasn't previously created, it will be created when being funded + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Number of Octas to fund the account from the faucet + /// + /// The amount added to the account may be limited by the faucet, and may be less + /// than the amount requested. + #[clap(long, default_value_t = DEFAULT_FUNDED_COINS)] + pub(crate) amount: u64, + + #[clap(flatten)] + pub(crate) faucet_options: FaucetOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand for FundWithFaucet { + fn command_name(&self) -> &'static str { + "FundWithFaucet" + } + + async fn execute(self) -> CliTypedResult { + let hashes = fund_account( + self.faucet_options.faucet_url(&self.profile_options)?, + self.amount, + self.account, + ) + .await?; + let client = self.rest_options.client(&self.profile_options)?; + wait_for_transactions(&client, hashes).await?; + return Ok(format!( + "Added {} Octas to account {}", + self.amount, self.account + )); + } +} diff --git a/m1/movement/src/account/key_rotation.rs b/m1/movement/src/account/key_rotation.rs new file mode 100644 index 00000000..da008fb8 --- /dev/null +++ b/m1/movement/src/account/key_rotation.rs @@ -0,0 +1,338 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::{ + types::{ + account_address_from_public_key, CliCommand, CliConfig, CliError, CliTypedResult, + ConfigSearchMode, EncodingOptions, EncodingType, ExtractPublicKey, ParsePrivateKey, + ProfileConfig, ProfileOptions, PublicKeyInputOptions, RestOptions, RotationProofChallenge, + TransactionOptions, TransactionSummary, + }, + utils::{prompt_yes, prompt_yes_with_override, read_line}, +}; +use aptos_cached_packages::aptos_stdlib; +use aptos_crypto::{ + ed25519::{Ed25519PrivateKey, Ed25519PublicKey}, + PrivateKey, SigningKey, +}; +use aptos_rest_client::{ + aptos_api_types::{AptosError, AptosErrorCode}, + error::{AptosErrorResponse, RestError}, + Client, +}; +use aptos_types::{account_address::AccountAddress, account_config::CORE_CODE_ADDRESS}; +use async_trait::async_trait; +use clap::Parser; +use serde::{Deserialize, Serialize}; +use std::{collections::BTreeMap, path::PathBuf}; + +/// Rotate an account's authentication key +/// +/// Rotating the account's authentication key allows you to use a new +/// private key. You must provide a new private key. Once it is +/// rotated you will need to use the original account address, with the +/// new private key. There is an interactive prompt to help you add it +/// to a new profile. +#[derive(Debug, Parser)] +pub struct RotateKey { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + + /// File name that contains the new private key encoded in the type from `--encoding` + #[clap(long, group = "new_private_key", parse(from_os_str))] + pub(crate) new_private_key_file: Option, + + /// New private key encoded in the type from `--encoding` + #[clap(long, group = "new_private_key")] + pub(crate) new_private_key: Option, + + /// Name of the profile to save the new private key + /// + /// If not provided, it will interactively have you save a profile, + /// unless `--skip_saving_profile` is provided + #[clap(long)] + pub(crate) save_to_profile: Option, + + /// Skip saving profile + /// + /// This skips the interactive profile saving after rotating the authentication key + #[clap(long)] + pub(crate) skip_saving_profile: bool, +} + +impl ParsePrivateKey for RotateKey {} + +impl RotateKey { + /// Extract private key from CLI args + pub fn extract_private_key( + &self, + encoding: EncodingType, + ) -> CliTypedResult> { + self.parse_private_key( + encoding, + self.new_private_key_file.clone(), + self.new_private_key.clone(), + ) + } +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct RotateSummary { + message: Option, + transaction: TransactionSummary, +} + +#[async_trait] +impl CliCommand for RotateKey { + fn command_name(&self) -> &'static str { + "RotateKey" + } + + async fn execute(self) -> CliTypedResult { + let new_private_key = self + .extract_private_key(self.txn_options.encoding_options.encoding)? + .ok_or_else(|| { + CliError::CommandArgumentError( + "One of ['--new-private-key', '--new-private-key-file'] must be used" + .to_string(), + ) + })?; + + let (current_private_key, sender_address) = self.txn_options.get_key_and_address()?; + + // Get sequence number for account + let sequence_number = self.txn_options.sequence_number(sender_address).await?; + let auth_key = self.txn_options.auth_key(sender_address).await?; + + let rotation_proof = RotationProofChallenge { + account_address: CORE_CODE_ADDRESS, + module_name: "account".to_string(), + struct_name: "RotationProofChallenge".to_string(), + sequence_number, + originator: sender_address, + current_auth_key: AccountAddress::from_bytes(auth_key) + .map_err(|err| CliError::UnableToParse("auth_key", err.to_string()))?, + new_public_key: new_private_key.public_key().to_bytes().to_vec(), + }; + + let rotation_msg = + bcs::to_bytes(&rotation_proof).map_err(|err| CliError::BCS("rotation_proof", err))?; + + // Signs the struct using both the current private key and the next private key + let rotation_proof_signed_by_current_private_key = + current_private_key.sign_arbitrary_message(&rotation_msg.clone()); + let rotation_proof_signed_by_new_private_key = + new_private_key.sign_arbitrary_message(&rotation_msg); + + let txn_summary = self + .txn_options + .submit_transaction(aptos_stdlib::account_rotate_authentication_key( + 0, + // Existing public key + current_private_key.public_key().to_bytes().to_vec(), + 0, + // New public key + new_private_key.public_key().to_bytes().to_vec(), + rotation_proof_signed_by_current_private_key + .to_bytes() + .to_vec(), + rotation_proof_signed_by_new_private_key.to_bytes().to_vec(), + )) + .await + .map(TransactionSummary::from)?; + + let string = serde_json::to_string_pretty(&txn_summary) + .map_err(|err| CliError::UnableToParse("transaction summary", err.to_string()))?; + + eprintln!("{}", string); + + if let Some(txn_success) = txn_summary.success { + if !txn_success { + return Err(CliError::ApiError( + "Transaction was not executed successfully".to_string(), + )); + } + } else { + return Err(CliError::UnexpectedError( + "Malformed transaction response".to_string(), + )); + } + + let mut profile_name: String; + + if self.save_to_profile.is_none() { + if self.skip_saving_profile + || !prompt_yes("Do you want to create a profile for the new key?") + { + return Ok(RotateSummary { + transaction: txn_summary, + message: None, + }); + } + + eprintln!("Enter the name for the profile"); + profile_name = read_line("Profile name")?.trim().to_string(); + } else { + // We can safely unwrap here + profile_name = self.save_to_profile.unwrap(); + } + + // Check if profile name exists + let mut config = CliConfig::load(ConfigSearchMode::CurrentDirAndParents)?; + + if let Some(ref profiles) = config.profiles { + if profiles.contains_key(&profile_name) { + if let Err(cli_err) = prompt_yes_with_override( + format!( + "Profile {} exits. Do you want to provide a new profile name?", + profile_name + ) + .as_str(), + self.txn_options.prompt_options, + ) { + match cli_err { + CliError::AbortedError => { + return Ok(RotateSummary { + transaction: txn_summary, + message: None, + }); + } + _ => { + return Err(cli_err); + } + } + } + + eprintln!("Enter the name for the profile"); + profile_name = read_line("Profile name")?.trim().to_string(); + } + } + + if profile_name.is_empty() { + return Err(CliError::AbortedError); + } + + let mut profile_config = ProfileConfig { + private_key: Some(new_private_key.clone()), + public_key: Some(new_private_key.public_key()), + account: Some(sender_address), + ..self.txn_options.profile_options.profile()? + }; + + if let Some(url) = self.txn_options.rest_options.url { + profile_config.rest_url = Some(url.into()); + } + + if config.profiles.is_none() { + config.profiles = Some(BTreeMap::new()); + } + + config + .profiles + .as_mut() + .unwrap() + .insert(profile_name.clone(), profile_config); + config.save()?; + + eprintln!("Profile {} is saved.", profile_name); + + Ok(RotateSummary { + transaction: txn_summary, + message: Some(format!("Profile {} is saved.", profile_name)), + }) + } +} + +/// Lookup the account address through the on-chain lookup table +/// +/// If the account is rotated, it will provide the address accordingly. If the account was not +/// rotated, it will provide the derived address only if the account exists onchain. +#[derive(Debug, Parser)] +pub struct LookupAddress { + #[clap(flatten)] + pub(crate) encoding_options: EncodingOptions, + + #[clap(flatten)] + pub(crate) public_key_options: PublicKeyInputOptions, + + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + + #[clap(flatten)] + pub(crate) rest_options: RestOptions, +} + +impl LookupAddress { + pub(crate) fn public_key(&self) -> CliTypedResult { + self.public_key_options + .extract_public_key(self.encoding_options.encoding, &self.profile_options) + } + + /// Builds a rest client + fn rest_client(&self) -> CliTypedResult { + self.rest_options.client(&self.profile_options) + } +} + +#[async_trait] +impl CliCommand for LookupAddress { + fn command_name(&self) -> &'static str { + "LookupAddress" + } + + async fn execute(self) -> CliTypedResult { + let rest_client = self.rest_client()?; + + // TODO: Support arbitrary auth key to support other types like multie25519 + let address = account_address_from_public_key(&self.public_key()?); + Ok(lookup_address(&rest_client, address, true).await?) + } +} + +pub async fn lookup_address( + rest_client: &Client, + address_key: AccountAddress, + must_exist: bool, +) -> Result { + let originating_resource: OriginatingResource = rest_client + .get_account_resource_bcs(CORE_CODE_ADDRESS, "0x1::account::OriginatingAddress") + .await? + .into_inner(); + + let table_handle = originating_resource.address_map.handle; + + // The derived address that can be used to look up the original address + match rest_client + .get_table_item_bcs( + table_handle, + "address", + "address", + address_key.to_hex_literal(), + ) + .await + { + Ok(inner) => Ok(inner.into_inner()), + Err(RestError::Api(..)) => { + // If the table item wasn't found, we may check if the account exists + if !must_exist { + Ok(address_key) + } else { + rest_client + .get_account_bcs(address_key) + .await + .map(|_| address_key) + } + } + Err(err) => Err(err), + } +} + +#[derive(Deserialize)] +pub struct OriginatingResource { + pub address_map: Table, +} + +#[derive(Deserialize)] +pub struct Table { + pub handle: AccountAddress, +} diff --git a/m1/movement/src/account/list.rs b/m1/movement/src/account/list.rs new file mode 100644 index 00000000..c406b296 --- /dev/null +++ b/m1/movement/src/account/list.rs @@ -0,0 +1,124 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{ + CliCommand, CliConfig, CliError, CliTypedResult, ConfigSearchMode, ProfileOptions, RestOptions, +}; +use aptos_types::account_address::AccountAddress; +use async_trait::async_trait; +use clap::{ArgEnum, Parser}; +use serde_json::json; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; + +#[derive(ArgEnum, Clone, Copy, Debug)] +pub enum ListQuery { + Balance, + Modules, + Resources, +} + +impl Display for ListQuery { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let str = match self { + ListQuery::Balance => "balance", + ListQuery::Modules => "modules", + ListQuery::Resources => "resources", + }; + write!(f, "{}", str) + } +} + +impl FromStr for ListQuery { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "balance" => Ok(ListQuery::Balance), + "modules" => Ok(ListQuery::Modules), + "resources" => Ok(ListQuery::Resources), + _ => Err("Invalid query. Valid values are balance, modules, resources"), + } + } +} + +/// List resources, modules, or balance owned by an address +/// +/// This allows you to list the current resources at the time of query. This can change due to +/// any transactions that have occurred after the request. +#[derive(Debug, Parser)] +pub struct ListAccount { + /// Address of the account you want to list resources/modules/balance for + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) account: Option, + + /// Type of items to list: [balance, resources, modules] + #[clap(long, default_value_t = ListQuery::Resources)] + pub(crate) query: ListQuery, + + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand> for ListAccount { + fn command_name(&self) -> &'static str { + "ListAccount" + } + + async fn execute(self) -> CliTypedResult> { + let account = if let Some(account) = self.account { + account + } else if let Some(Some(account)) = CliConfig::load_profile( + self.profile_options.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| p.account) + { + account + } else { + return Err(CliError::CommandArgumentError( + "Please provide an account using --account or run movement init".to_string(), + )); + }; + + let client = self.rest_options.client(&self.profile_options)?; + let response = match self.query { + ListQuery::Balance => vec![ + client + .get_account_resource( + account, + "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", + ) + .await? + .into_inner() + .unwrap() + .data, + ], + ListQuery::Modules => client + .get_account_modules(account) + .await? + .into_inner() + .into_iter() + .map(|module| json!(module.try_parse_abi().unwrap())) + .collect::>(), + ListQuery::Resources => client + .get_account_resources(account) + .await? + .into_inner() + .into_iter() + .map(|resource| { + let mut map = serde_json::Map::new(); + map.insert(resource.resource_type.to_string(), resource.data); + serde_json::Value::Object(map) + }) + .collect::>(), + }; + + Ok(response) + } +} diff --git a/m1/movement/src/account/mod.rs b/m1/movement/src/account/mod.rs new file mode 100644 index 00000000..988f065a --- /dev/null +++ b/m1/movement/src/account/mod.rs @@ -0,0 +1,69 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliCommand, CliResult}; +use clap::Subcommand; + +pub mod create; +pub mod create_resource_account; +pub mod derive_resource_account; +pub mod fund; +pub mod key_rotation; +pub mod list; +pub mod multisig_account; +pub mod transfer; + +/// Tool for interacting with accounts +/// +/// This tool is used to create accounts, get information about the +/// account's resources, and transfer resources between accounts. +#[derive(Debug, Subcommand)] +pub enum AccountTool { + Create(create::CreateAccount), + CreateResourceAccount(create_resource_account::CreateResourceAccount), + DeriveResourceAccountAddress(derive_resource_account::DeriveResourceAccount), + FundWithFaucet(fund::FundWithFaucet), + List(list::ListAccount), + LookupAddress(key_rotation::LookupAddress), + RotateKey(key_rotation::RotateKey), + Transfer(transfer::TransferCoins), +} + +impl AccountTool { + pub async fn execute(self) -> CliResult { + match self { + AccountTool::Create(tool) => tool.execute_serialized().await, + AccountTool::CreateResourceAccount(tool) => tool.execute_serialized().await, + AccountTool::DeriveResourceAccountAddress(tool) => tool.execute_serialized().await, + AccountTool::FundWithFaucet(tool) => tool.execute_serialized().await, + AccountTool::List(tool) => tool.execute_serialized().await, + AccountTool::LookupAddress(tool) => tool.execute_serialized().await, + AccountTool::RotateKey(tool) => tool.execute_serialized().await, + AccountTool::Transfer(tool) => tool.execute_serialized().await, + } + } +} + +/// Tool for interacting with multisig accounts +#[derive(Debug, Subcommand)] +pub enum MultisigAccountTool { + Approve(multisig_account::Approve), + Create(multisig_account::Create), + CreateTransaction(multisig_account::CreateTransaction), + Execute(multisig_account::Execute), + ExecuteReject(multisig_account::ExecuteReject), + Reject(multisig_account::Reject), +} + +impl MultisigAccountTool { + pub async fn execute(self) -> CliResult { + match self { + MultisigAccountTool::Approve(tool) => tool.execute_serialized().await, + MultisigAccountTool::Create(tool) => tool.execute_serialized().await, + MultisigAccountTool::CreateTransaction(tool) => tool.execute_serialized().await, + MultisigAccountTool::Execute(tool) => tool.execute_serialized().await, + MultisigAccountTool::ExecuteReject(tool) => tool.execute_serialized().await, + MultisigAccountTool::Reject(tool) => tool.execute_serialized().await, + } + } +} diff --git a/m1/movement/src/account/multisig_account.rs b/m1/movement/src/account/multisig_account.rs new file mode 100644 index 00000000..0d60380a --- /dev/null +++ b/m1/movement/src/account/multisig_account.rs @@ -0,0 +1,257 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{ + CliCommand, CliTypedResult, EntryFunctionArguments, MultisigAccount, TransactionOptions, + TransactionSummary, +}; +use aptos_cached_packages::aptos_stdlib; +use aptos_rest_client::{ + aptos_api_types::{WriteResource, WriteSetChange}, + Transaction, +}; +use aptos_types::{ + account_address::AccountAddress, + transaction::{Multisig, MultisigTransactionPayload, TransactionPayload}, +}; +use async_trait::async_trait; +use bcs::to_bytes; +use clap::Parser; +use serde::Serialize; + +/// Create a new multisig account (v2) on-chain. +/// +/// This will create a new multisig account and make the sender one of the owners. +#[derive(Debug, Parser)] +pub struct Create { + /// Addresses of additional owners for the new multisig, beside the transaction sender. + #[clap(long, multiple_values = true, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) additional_owners: Vec, + /// The number of signatures (approvals or rejections) required to execute or remove a proposed + /// transaction. + #[clap(long)] + pub(crate) num_signatures_required: u64, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +/// A shortened create multisig account output +#[derive(Clone, Debug, Serialize)] +pub struct CreateSummary { + #[serde(flatten)] + pub multisig_account: Option, + #[serde(flatten)] + pub transaction_summary: TransactionSummary, +} + +impl From for CreateSummary { + fn from(transaction: Transaction) -> Self { + let transaction_summary = TransactionSummary::from(&transaction); + + let mut summary = CreateSummary { + transaction_summary, + multisig_account: None, + }; + + if let Transaction::UserTransaction(txn) = transaction { + summary.multisig_account = txn.info.changes.iter().find_map(|change| match change { + WriteSetChange::WriteResource(WriteResource { address, data, .. }) => { + if data.typ.name.as_str() == "Account" + && *address.inner().to_hex() != *txn.request.sender.inner().to_hex() + { + Some(MultisigAccount { + multisig_address: *address.inner(), + }) + } else { + None + } + }, + _ => None, + }); + } + + summary + } +} + +#[async_trait] +impl CliCommand for Create { + fn command_name(&self) -> &'static str { + "CreateMultisig" + } + + async fn execute(self) -> CliTypedResult { + self.txn_options + .submit_transaction(aptos_stdlib::multisig_account_create_with_owners( + self.additional_owners, + self.num_signatures_required, + // TODO: Support passing in custom metadata. + vec![], + vec![], + )) + .await + .map(CreateSummary::from) + } +} + +/// Propose a new multisig transaction. +/// +/// As one of the owners of the multisig, propose a new transaction. This also implicitly approves +/// the created transaction so it has one approval initially. In order for the transaction to be +/// executed, it needs as many approvals as the number of signatures required. +#[derive(Debug, Parser)] +pub struct CreateTransaction { + #[clap(flatten)] + pub(crate) multisig_account: MultisigAccount, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) entry_function_args: EntryFunctionArguments, +} + +#[async_trait] +impl CliCommand for CreateTransaction { + fn command_name(&self) -> &'static str { + "CreateTransactionMultisig" + } + + async fn execute(self) -> CliTypedResult { + let payload = MultisigTransactionPayload::EntryFunction( + self.entry_function_args.create_entry_function_payload()?, + ); + self.txn_options + .submit_transaction(aptos_stdlib::multisig_account_create_transaction( + self.multisig_account.multisig_address, + to_bytes(&payload)?, + )) + .await + .map(|inner| inner.into()) + } +} + +/// Approve a multisig transaction. +/// +/// As one of the owners of the multisig, approve a transaction proposed for the multisig. +/// With enough approvals (as many as the number of signatures required), the transaction can be +/// executed (See Execute). +#[derive(Debug, Parser)] +pub struct Approve { + #[clap(flatten)] + pub(crate) multisig_account: MultisigAccount, + /// The sequence number of the multisig transaction to approve. The sequence number increments + /// for every new multisig transaction. + #[clap(long)] + pub(crate) sequence_number: u64, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for Approve { + fn command_name(&self) -> &'static str { + "ApproveMultisig" + } + + async fn execute(self) -> CliTypedResult { + self.txn_options + .submit_transaction(aptos_stdlib::multisig_account_approve_transaction( + self.multisig_account.multisig_address, + self.sequence_number, + )) + .await + .map(|inner| inner.into()) + } +} + +/// Reject a multisig transaction. +/// +/// As one of the owners of the multisig, reject a transaction proposed for the multisig. +/// With enough rejections (as many as the number of signatures required), the transaction can be +/// completely removed (See ExecuteReject). +#[derive(Debug, Parser)] +pub struct Reject { + #[clap(flatten)] + pub(crate) multisig_account: MultisigAccount, + /// The sequence number of the multisig transaction to reject. The sequence number increments + /// for every new multisig transaction. + #[clap(long)] + pub(crate) sequence_number: u64, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for Reject { + fn command_name(&self) -> &'static str { + "RejectMultisig" + } + + async fn execute(self) -> CliTypedResult { + self.txn_options + .submit_transaction(aptos_stdlib::multisig_account_reject_transaction( + self.multisig_account.multisig_address, + self.sequence_number, + )) + .await + .map(|inner| inner.into()) + } +} + +/// Execute a proposed multisig transaction. +/// +/// The transaction to be executed needs to have as many approvals as the number of signatures +/// required. +#[derive(Debug, Parser)] +pub struct Execute { + #[clap(flatten)] + pub(crate) multisig_account: MultisigAccount, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for Execute { + fn command_name(&self) -> &'static str { + "ExecuteMultisig" + } + + async fn execute(self) -> CliTypedResult { + let payload = TransactionPayload::Multisig(Multisig { + multisig_address: self.multisig_account.multisig_address, + // TODO: Support passing an explicit payload + transaction_payload: None, + }); + self.txn_options + .submit_transaction(payload) + .await + .map(|inner| inner.into()) + } +} + +/// Remove a proposed multisig transaction. +/// +/// The transaction to be removed needs to have as many rejections as the number of signatures +/// required. +#[derive(Debug, Parser)] +pub struct ExecuteReject { + #[clap(flatten)] + pub(crate) multisig_account: MultisigAccount, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for ExecuteReject { + fn command_name(&self) -> &'static str { + "ExecuteRejectMultisig" + } + + async fn execute(self) -> CliTypedResult { + self.txn_options + .submit_transaction(aptos_stdlib::multisig_account_execute_rejected_transaction( + self.multisig_account.multisig_address, + )) + .await + .map(|inner| inner.into()) + } +} diff --git a/m1/movement/src/account/transfer.rs b/m1/movement/src/account/transfer.rs new file mode 100644 index 00000000..ce8b697d --- /dev/null +++ b/m1/movement/src/account/transfer.rs @@ -0,0 +1,115 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliCommand, CliTypedResult, TransactionOptions}; +use aptos_cached_packages::aptos_stdlib; +use aptos_rest_client::{ + aptos_api_types::{HashValue, WriteResource, WriteSetChange}, + Transaction, +}; +use aptos_types::account_address::AccountAddress; +use async_trait::async_trait; +use clap::Parser; +use serde::Serialize; +use std::collections::BTreeMap; + +// TODO: Add ability to transfer non-APT coins +// TODO: Add ability to not create account by default +/// Transfer APT between accounts +/// +#[derive(Debug, Parser)] +pub struct TransferCoins { + /// Address of account to send APT to + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Amount of Octas (10^-8 APT) to transfer + #[clap(long)] + pub(crate) amount: u64, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for TransferCoins { + fn command_name(&self) -> &'static str { + "TransferCoins" + } + + async fn execute(self) -> CliTypedResult { + self.txn_options + .submit_transaction(aptos_stdlib::aptos_account_transfer( + self.account, + self.amount, + )) + .await + .map(TransferSummary::from) + } +} + +const SUPPORTED_COINS: [&str; 1] = ["0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>"]; + +/// A shortened transaction output +#[derive(Clone, Debug, Serialize)] +pub struct TransferSummary { + pub gas_unit_price: u64, + pub gas_used: u64, + pub balance_changes: BTreeMap, + pub sender: AccountAddress, + pub success: bool, + pub version: u64, + pub vm_status: String, + pub transaction_hash: HashValue, +} + +impl TransferSummary { + pub fn octa_spent(&self) -> u64 { + self.gas_unit_price * self.gas_used + } +} + +impl From for TransferSummary { + fn from(transaction: Transaction) -> Self { + if let Transaction::UserTransaction(txn) = transaction { + let vm_status = txn.info.vm_status; + let success = txn.info.success; + let sender = *txn.request.sender.inner(); + let gas_unit_price = txn.request.gas_unit_price.0; + let gas_used = txn.info.gas_used.0; + let transaction_hash = txn.info.hash; + let version = txn.info.version.0; + let balance_changes = txn + .info + .changes + .into_iter() + .filter_map(|change| match change { + WriteSetChange::WriteResource(WriteResource { address, data, .. }) => { + if SUPPORTED_COINS.contains(&data.typ.to_string().as_str()) { + Some(( + *address.inner(), + serde_json::to_value(data.data).unwrap_or_default(), + )) + } else { + None + } + }, + _ => None, + }) + .collect(); + + TransferSummary { + gas_unit_price, + gas_used, + balance_changes, + sender, + success, + version, + vm_status, + transaction_hash, + } + } else { + panic!("Can't call From for a non UserTransaction") + } + } +} diff --git a/m1/movement/src/common/init.rs b/m1/movement/src/common/init.rs new file mode 100644 index 00000000..c68ae64d --- /dev/null +++ b/m1/movement/src/common/init.rs @@ -0,0 +1,374 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + account::key_rotation::lookup_address, + common::{ + types::{ + account_address_from_public_key, CliCommand, CliConfig, CliError, CliTypedResult, + ConfigSearchMode, EncodingOptions, PrivateKeyInputOptions, ProfileConfig, + ProfileOptions, PromptOptions, RngArgs, DEFAULT_PROFILE, + }, + utils::{fund_account, prompt_yes_with_override, read_line, wait_for_transactions}, + }, +}; +use aptos_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, ValidCryptoMaterialStringExt}; +use aptos_rest_client::{ + aptos_api_types::{AptosError, AptosErrorCode}, + error::{AptosErrorResponse, RestError}, +}; +use async_trait::async_trait; +use clap::Parser; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use std::{collections::BTreeMap, str::FromStr}; + +// +const SEED_NODE_1_REST : &str = "https://seed-node1.movementlabs.xyz"; + +/// 1 APT (might not actually get that much, depending on the faucet) +const NUM_DEFAULT_OCTAS: u64 = 100000000; + +/// Tool to initialize current directory for the aptos tool +/// +/// Configuration will be pushed into .aptos/config.yaml +#[derive(Debug, Parser)] +pub struct InitTool { + /// Network to use for default settings + /// + /// If custom `rest_url` and `faucet_url` are wanted, use `custom` + #[clap(long)] + pub network: Option, + + /// URL to a fullnode on the network + #[clap(long)] + pub rest_url: Option, + + /// URL for the Faucet endpoint + #[clap(long)] + pub faucet_url: Option, + + /// Whether to skip the faucet for a non-faucet endpoint + #[clap(long)] + pub skip_faucet: bool, + + #[clap(flatten)] + pub rng_args: RngArgs, + #[clap(flatten)] + pub(crate) private_key_options: PrivateKeyInputOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, + #[clap(flatten)] + pub(crate) encoding_options: EncodingOptions, +} + +#[async_trait] +impl CliCommand<()> for InitTool { + fn command_name(&self) -> &'static str { + "MovementInit" + } + + async fn execute(self) -> CliTypedResult<()> { + let mut config = if CliConfig::config_exists(ConfigSearchMode::CurrentDir) { + CliConfig::load(ConfigSearchMode::CurrentDir)? + } else { + CliConfig::default() + }; + + let profile_name = self + .profile_options + .profile_name() + .unwrap_or(DEFAULT_PROFILE); + + // Select profile we're using + let mut profile_config = if let Some(profile_config) = config.remove_profile(profile_name) { + prompt_yes_with_override(&format!("Movement already initialized for profile {}, do you want to overwrite the existing config?", profile_name), self.prompt_options)?; + profile_config + } else { + ProfileConfig::default() + }; + + eprintln!("Configuring for profile {}", profile_name); + + // Choose a network + let network = if let Some(network) = self.network { + eprintln!("Configuring for network {:?}", network); + network + } else { + eprintln!( + "Choose network from [devnet, testnet, mainnet, local, custom | defaults to devnet]" + ); + let input = read_line("network")?; + let input = input.trim(); + if input.is_empty() { + eprintln!("No network given, using devnet..."); + Network::Devnet + } else { + Network::from_str(input)? + } + }; + + // Ensure that there is at least a REST URL set for the network + match network { + Network::Mainnet => { + profile_config.rest_url = + Some(SEED_NODE_1_REST.to_string()); + profile_config.faucet_url = + Some(SEED_NODE_1_REST.to_string()); + }, + Network::Testnet => { + profile_config.rest_url = + Some(SEED_NODE_1_REST.to_string()); + profile_config.faucet_url = + Some(SEED_NODE_1_REST.to_string()); + }, + Network::Devnet => { + profile_config.rest_url = Some(SEED_NODE_1_REST.to_string()); + profile_config.faucet_url = Some(SEED_NODE_1_REST.to_string()); + }, + Network::Local => { + profile_config.rest_url = Some("http://localhost:8080".to_string()); + profile_config.faucet_url = Some("http://localhost:8081".to_string()); + }, + Network::Custom => self.custom_network(&mut profile_config)?, + } + + // Private key + let private_key = if let Some(private_key) = self + .private_key_options + .extract_private_key_cli(self.encoding_options.encoding)? + { + eprintln!("Using command line argument for private key"); + private_key + } else { + eprintln!("Enter your private key as a hex literal (0x...) [Current: {} | No input: Generate new key (or keep one if present)]", profile_config.private_key.as_ref().map(|_| "Redacted").unwrap_or("None")); + let input = read_line("Private key")?; + let input = input.trim(); + if input.is_empty() { + if let Some(private_key) = profile_config.private_key { + eprintln!("No key given, keeping existing key..."); + private_key + } else { + eprintln!("No key given, generating key..."); + self.rng_args + .key_generator()? + .generate_ed25519_private_key() + } + } else { + Ed25519PrivateKey::from_encoded_string(input) + .map_err(|err| CliError::UnableToParse("Ed25519PrivateKey", err.to_string()))? + } + }; + let public_key = private_key.public_key(); + + let client = aptos_rest_client::Client::new( + Url::parse( + profile_config + .rest_url + .as_ref() + .expect("Must have rest client as created above"), + ) + .map_err(|err| CliError::UnableToParse("rest_url", err.to_string()))?, + ); + + // lookup the address from onchain instead of deriving it + // if this is the rotated key, deriving it will outputs an incorrect address + let derived_address = account_address_from_public_key(&public_key); + let address = lookup_address(&client, derived_address, false).await?; + + profile_config.private_key = Some(private_key); + profile_config.public_key = Some(public_key); + profile_config.account = Some(address); + + // Create account if it doesn't exist (and there's a faucet) + // Check if account exists + let account_exists = match client.get_account(address).await { + Ok(_) => true, + Err(err) => { + if let RestError::Api(AptosErrorResponse { + error: + AptosError { + error_code: AptosErrorCode::ResourceNotFound, + .. + }, + .. + }) + | RestError::Api(AptosErrorResponse { + error: + AptosError { + error_code: AptosErrorCode::AccountNotFound, + .. + }, + .. + }) = err + { + false + } else { + return Err(CliError::UnexpectedError(format!( + "Failed to check if account exists: {:?}", + err + ))); + } + }, + }; + + // If you want to create a private key, but not fund the account, skipping the faucet is still possible + let maybe_faucet_url = if self.skip_faucet { + None + } else { + profile_config.faucet_url.as_ref() + }; + + if let Some(faucet_url) = maybe_faucet_url { + if account_exists { + eprintln!("Account {} has been already found onchain", address); + } else { + eprintln!( + "Account {} doesn't exist, creating it and funding it with {} Octas", + address, NUM_DEFAULT_OCTAS + ); + let hashes = fund_account( + Url::parse(faucet_url) + .map_err(|err| CliError::UnableToParse("rest_url", err.to_string()))?, + NUM_DEFAULT_OCTAS, + address, + ) + .await?; + wait_for_transactions(&client, hashes).await?; + eprintln!("Account {} funded successfully", address); + } + } else if account_exists { + eprintln!("Account {} has been already found onchain", address); + } else if network == Network::Mainnet { + eprintln!("Account {} does not exist, you will need to create and fund the account by transferring funds from another account", address); + } else { + eprintln!("Account {} has been initialized locally, but you must transfer coins to it to create the account onchain", address); + } + + // Ensure the loaded config has profiles setup for a possible empty file + if config.profiles.is_none() { + config.profiles = Some(BTreeMap::new()); + } + config + .profiles + .as_mut() + .expect("Must have profiles, as created above") + .insert(profile_name.to_string(), profile_config); + config.save()?; + eprintln!("\n---\nMovement CLI is now set up for account {} as profile {}! Run `movement --help` for more information about commands", address, self.profile_options.profile_name().unwrap_or(DEFAULT_PROFILE)); + Ok(()) + } +} + +impl InitTool { + /// Custom network created, which requires a REST URL + fn custom_network(&self, profile_config: &mut ProfileConfig) -> CliTypedResult<()> { + // Rest Endpoint + let rest_url = if let Some(ref rest_url) = self.rest_url { + eprintln!("Using command line argument for rest URL {}", rest_url); + Some(rest_url.to_string()) + } else { + let current = profile_config.rest_url.as_deref(); + eprintln!( + "Enter your rest endpoint [Current: {} | No input: Exit (or keep the existing if present)]", + current.unwrap_or("None"), + ); + let input = read_line("Rest endpoint")?; + let input = input.trim(); + if input.is_empty() { + if let Some(current) = current { + eprintln!("No rest url given, keeping the existing url..."); + Some(current.to_string()) + } else { + eprintln!("No rest url given, exiting..."); + return Err(CliError::AbortedError); + } + } else { + Some( + reqwest::Url::parse(input) + .map_err(|err| CliError::UnableToParse("Rest Endpoint", err.to_string()))? + .to_string(), + ) + } + }; + profile_config.rest_url = rest_url; + + // Faucet Endpoint + let faucet_url = if self.skip_faucet { + eprintln!("Not configuring a faucet because --skip-faucet was provided"); + None + } else if let Some(ref faucet_url) = self.faucet_url { + eprintln!("Using command line argument for faucet URL {}", faucet_url); + Some(faucet_url.to_string()) + } else { + let current = profile_config.faucet_url.as_deref(); + eprintln!( + "Enter your faucet endpoint [Current: {} | No input: Skip (or keep the existing one if present) | 'skip' to not use a faucet]", + current + .unwrap_or("None"), + ); + let input = read_line("Faucet endpoint")?; + let input = input.trim(); + if input.is_empty() { + if let Some(current) = current { + eprintln!("No faucet url given, keeping the existing url..."); + Some(current.to_string()) + } else { + eprintln!("No faucet url given, skipping faucet..."); + None + } + } else if input.to_lowercase() == "skip" { + eprintln!("Skipping faucet..."); + None + } else { + Some( + reqwest::Url::parse(input) + .map_err(|err| CliError::UnableToParse("Faucet Endpoint", err.to_string()))? + .to_string(), + ) + } + }; + profile_config.faucet_url = faucet_url; + Ok(()) + } +} + +/// A simplified list of all networks supported by the CLI +/// +/// Any command using this, will be simpler to setup as profiles +#[derive(Copy, Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] +pub enum Network { + Mainnet, + Testnet, + Devnet, + Local, + Custom, +} + +impl FromStr for Network { + type Err = CliError; + + fn from_str(s: &str) -> Result { + Ok(match s.to_lowercase().trim() { + "mainnet" => Self::Mainnet, + "testnet" => Self::Testnet, + "devnet" => Self::Devnet, + "local" => Self::Local, + "custom" => Self::Custom, + str => { + return Err(CliError::CommandArgumentError(format!( + "Invalid network {}. Must be one of [devnet, testnet, mainnet, local, custom]", + str + ))); + }, + }) + } +} + +impl Default for Network { + fn default() -> Self { + Self::Devnet + } +} diff --git a/m1/movement/src/common/mod.rs b/m1/movement/src/common/mod.rs new file mode 100644 index 00000000..1be94da1 --- /dev/null +++ b/m1/movement/src/common/mod.rs @@ -0,0 +1,6 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod init; +pub mod types; +pub mod utils; diff --git a/m1/movement/src/common/types.rs b/m1/movement/src/common/types.rs new file mode 100644 index 00000000..bbaf52b0 --- /dev/null +++ b/m1/movement/src/common/types.rs @@ -0,0 +1,1753 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + init::Network, + utils::{ + check_if_file_exists, create_dir_if_not_exist, dir_default_to_current, + get_account_with_state, get_auth_key, get_sequence_number, prompt_yes_with_override, + read_from_file, start_logger, to_common_result, to_common_success_result, + write_to_file, write_to_file_with_opts, write_to_user_only_file, + }, + }, + config::GlobalConfig, + genesis::git::from_yaml, + move_tool::{ArgWithType, MemberId}, +}; +use aptos_crypto::{ + ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature}, + x25519, PrivateKey, ValidCryptoMaterial, ValidCryptoMaterialStringExt, +}; +use aptos_debugger::AptosDebugger; +use aptos_gas_profiling::FrameName; +use aptos_global_constants::adjust_gas_headroom; +use aptos_keygen::KeyGen; +use aptos_rest_client::{ + aptos_api_types::{HashValue, MoveType, ViewRequest}, + error::RestError, + Client, Transaction, +}; +use aptos_sdk::{transaction_builder::TransactionFactory, types::LocalAccount}; +use aptos_types::{ + chain_id::ChainId, + transaction::{ + authenticator::AuthenticationKey, EntryFunction, SignedTransaction, TransactionPayload, + TransactionStatus, + }, +}; +use async_trait::async_trait; +use clap::{ArgEnum, Parser}; +use hex::FromHexError; +use move_core_types::{account_address::AccountAddress, language_storage::TypeTag}; +use serde::{Deserialize, Serialize}; +#[cfg(unix)] +use std::os::unix::fs::OpenOptionsExt; +use std::{ + collections::BTreeMap, + convert::TryFrom, + fmt::{Debug, Display, Formatter}, + fs::OpenOptions, + path::{Path, PathBuf}, + str::FromStr, + time::{Duration, Instant, SystemTime, UNIX_EPOCH}, +}; +use reqwest::Url; +use thiserror::Error; + +pub const USER_AGENT: &str = concat!("movement-cli/", env!("CARGO_PKG_VERSION")); +const US_IN_SECS: u64 = 1_000_000; +const ACCEPTED_CLOCK_SKEW_US: u64 = 5 * US_IN_SECS; +pub const DEFAULT_EXPIRATION_SECS: u64 = 60; +pub const DEFAULT_PROFILE: &str = "default"; + +/// A common result to be returned to users +pub type CliResult = Result; + +/// A common result to remove need for typing `Result` +pub type CliTypedResult = Result; + +/// CLI Errors for reporting through telemetry and outputs +#[derive(Debug, Error)] +pub enum CliError { + #[error("Aborted command")] + AbortedError, + #[error("API error: {0}")] + ApiError(String), + #[error("Error (de)serializing '{0}': {1}")] + BCS(&'static str, #[source] bcs::Error), + #[error("Invalid arguments: {0}")] + CommandArgumentError(String), + #[error("Unable to load config: {0} {1}")] + ConfigLoadError(String, String), + #[error("Unable to find config {0}, have you run `movement init`?")] + ConfigNotFoundError(String), + #[error("Error accessing '{0}': {1}")] + IO(String, #[source] std::io::Error), + #[error("Move compilation failed: {0}")] + MoveCompilationError(String), + #[error("Move unit tests failed")] + MoveTestError, + #[error("Move Prover failed: {0}")] + MoveProverError(String), + #[error("Unable to parse '{0}': error: {1}")] + UnableToParse(&'static str, String), + #[error("Unable to read file '{0}', error: {1}")] + UnableToReadFile(String, String), + #[error("Unexpected error: {0}")] + UnexpectedError(String), + #[error("Simulation failed with status: {0}")] + SimulationError(String), + #[error("Coverage failed with status: {0}")] + CoverageError(String), +} + +impl CliError { + pub fn to_str(&self) -> &'static str { + match self { + CliError::AbortedError => "AbortedError", + CliError::ApiError(_) => "ApiError", + CliError::BCS(_, _) => "BCS", + CliError::CommandArgumentError(_) => "CommandArgumentError", + CliError::ConfigLoadError(_, _) => "ConfigLoadError", + CliError::ConfigNotFoundError(_) => "ConfigNotFoundError", + CliError::IO(_, _) => "IO", + CliError::MoveCompilationError(_) => "MoveCompilationError", + CliError::MoveTestError => "MoveTestError", + CliError::MoveProverError(_) => "MoveProverError", + CliError::UnableToParse(_, _) => "UnableToParse", + CliError::UnableToReadFile(_, _) => "UnableToReadFile", + CliError::UnexpectedError(_) => "UnexpectedError", + CliError::SimulationError(_) => "SimulationError", + CliError::CoverageError(_) => "CoverageError", + } + } +} + +impl From for CliError { + fn from(e: RestError) -> Self { + CliError::ApiError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: aptos_config::config::Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: aptos_github_client::Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: serde_yaml::Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: base64::DecodeError) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: std::string::FromUtf8Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: aptos_crypto::CryptoMaterialError) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: FromHexError) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: anyhow::Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +impl From for CliError { + fn from(e: bcs::Error) -> Self { + CliError::UnexpectedError(e.to_string()) + } +} + +/// Config saved to `.aptos/config.yaml` +#[derive(Debug, Serialize, Deserialize)] +pub struct CliConfig { + /// Map of profile configs + #[serde(skip_serializing_if = "Option::is_none")] + pub profiles: Option>, +} + +const CONFIG_FILE: &str = "config.yaml"; +const LEGACY_CONFIG_FILE: &str = "config.yml"; +pub const CONFIG_FOLDER: &str = ".movement"; + +/// An individual profile +#[derive(Debug, Default, Serialize, Deserialize)] +pub struct ProfileConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub network: Option, + /// Private key for commands. + #[serde(skip_serializing_if = "Option::is_none")] + pub private_key: Option, + /// Public key for commands + #[serde(skip_serializing_if = "Option::is_none")] + pub public_key: Option, + /// Account for commands + #[serde(skip_serializing_if = "Option::is_none")] + pub account: Option, + /// URL for the Aptos rest endpoint + #[serde(skip_serializing_if = "Option::is_none")] + pub rest_url: Option, + /// URL for the Faucet endpoint (if applicable) + #[serde(skip_serializing_if = "Option::is_none")] + pub faucet_url: Option, +} + +/// ProfileConfig but without the private parts +#[derive(Debug, Serialize)] +pub struct ProfileSummary { + pub has_private_key: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub public_key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub account: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub rest_url: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub faucet_url: Option, +} + +impl From<&ProfileConfig> for ProfileSummary { + fn from(config: &ProfileConfig) -> Self { + ProfileSummary { + has_private_key: config.private_key.is_some(), + public_key: config.public_key.clone(), + account: config.account, + rest_url: config.rest_url.clone(), + faucet_url: config.faucet_url.clone(), + } + } +} + +impl Default for CliConfig { + fn default() -> Self { + CliConfig { + profiles: Some(BTreeMap::new()), + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd)] +pub enum ConfigSearchMode { + CurrentDir, + CurrentDirAndParents, +} + +impl CliConfig { + /// Checks if the config exists in the current working directory + pub fn config_exists(mode: ConfigSearchMode) -> bool { + if let Ok(folder) = Self::aptos_folder(mode) { + let config_file = folder.join(CONFIG_FILE); + let old_config_file = folder.join(LEGACY_CONFIG_FILE); + config_file.exists() || old_config_file.exists() + } else { + false + } + } + + /// Loads the config from the current working directory or one of its parents. + pub fn load(mode: ConfigSearchMode) -> CliTypedResult { + let folder = Self::aptos_folder(mode)?; + + let config_file = folder.join(CONFIG_FILE); + let old_config_file = folder.join(LEGACY_CONFIG_FILE); + if config_file.exists() { + from_yaml( + &String::from_utf8(read_from_file(config_file.as_path())?) + .map_err(CliError::from)?, + ) + } else if old_config_file.exists() { + from_yaml( + &String::from_utf8(read_from_file(old_config_file.as_path())?) + .map_err(CliError::from)?, + ) + } else { + Err(CliError::ConfigNotFoundError(format!( + "{}", + config_file.display() + ))) + } + } + + pub fn load_profile( + profile: Option<&str>, + mode: ConfigSearchMode, + ) -> CliTypedResult> { + let mut config = Self::load(mode)?; + + // If no profile was given, use `default` + if let Some(profile) = profile { + if let Some(account_profile) = config.remove_profile(profile) { + Ok(Some(account_profile)) + } else { + Err(CliError::CommandArgumentError(format!( + "Profile {} not found", + profile + ))) + } + } else { + Ok(config.remove_profile(DEFAULT_PROFILE)) + } + } + + pub fn remove_profile(&mut self, profile: &str) -> Option { + if let Some(ref mut profiles) = self.profiles { + profiles.remove(&profile.to_string()) + } else { + None + } + } + + /// Saves the config to ./.aptos/config.yaml + pub fn save(&self) -> CliTypedResult<()> { + let aptos_folder = Self::aptos_folder(ConfigSearchMode::CurrentDir)?; + + // Create if it doesn't exist + create_dir_if_not_exist(aptos_folder.as_path())?; + + // Save over previous config file + let config_file = aptos_folder.join(CONFIG_FILE); + let config_bytes = serde_yaml::to_string(&self).map_err(|err| { + CliError::UnexpectedError(format!("Failed to serialize config {}", err)) + })?; + write_to_user_only_file(&config_file, CONFIG_FILE, config_bytes.as_bytes())?; + + // As a cleanup, delete the old if it exists + let legacy_config_file = aptos_folder.join(LEGACY_CONFIG_FILE); + if legacy_config_file.exists() { + eprintln!("Removing legacy config file {}", LEGACY_CONFIG_FILE); + let _ = std::fs::remove_file(legacy_config_file); + } + Ok(()) + } + + /// Finds the current directory's .aptos folder + fn aptos_folder(mode: ConfigSearchMode) -> CliTypedResult { + let global_config = GlobalConfig::load()?; + global_config.get_config_location(mode) + } +} + +/// Types of Keys used by the blockchain +#[derive(ArgEnum, Clone, Copy, Debug)] +pub enum KeyType { + /// Ed25519 key used for signing + Ed25519, + /// X25519 key used for network handshakes and identity + X25519, + /// A BLS12381 key for consensus + Bls12381, +} + +impl Display for KeyType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let str = match self { + KeyType::Ed25519 => "ed25519", + KeyType::X25519 => "x25519", + KeyType::Bls12381 => "bls12381", + }; + write!(f, "{}", str) + } +} + +impl FromStr for KeyType { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "ed25519" => Ok(KeyType::Ed25519), + "x25519" => Ok(KeyType::X25519), + "bls12381" => Ok(KeyType::Bls12381), + _ => Err("Invalid key type: Must be one of [ed25519, x25519]"), + } + } +} + +#[derive(Debug, Default, Parser)] +pub struct ProfileOptions { + /// Profile to use from the CLI config + /// + /// This will be used to override associated settings such as + /// the REST URL, the Faucet URL, and the private key arguments. + /// + /// Defaults to "default" + #[clap(long)] + pub profile: Option, +} + +impl ProfileOptions { + pub fn account_address(&self) -> CliTypedResult { + let profile = self.profile()?; + if let Some(account) = profile.account { + return Ok(account); + } + + Err(CliError::ConfigNotFoundError( + self.profile + .clone() + .unwrap_or_else(|| DEFAULT_PROFILE.to_string()), + )) + } + + pub fn public_key(&self) -> CliTypedResult { + let profile = self.profile()?; + if let Some(public_key) = profile.public_key { + return Ok(public_key); + } + + Err(CliError::ConfigNotFoundError( + self.profile + .clone() + .unwrap_or_else(|| DEFAULT_PROFILE.to_string()), + )) + } + + pub fn profile_name(&self) -> Option<&str> { + self.profile.as_ref().map(|inner| inner.trim()) + } + + pub fn profile(&self) -> CliTypedResult { + if let Some(profile) = + CliConfig::load_profile(self.profile_name(), ConfigSearchMode::CurrentDirAndParents)? + { + return Ok(profile); + } + + Err(CliError::ConfigNotFoundError( + self.profile + .clone() + .unwrap_or_else(|| DEFAULT_PROFILE.to_string()), + )) + } +} + +/// Types of encodings used by the blockchain +#[derive(ArgEnum, Clone, Copy, Debug)] +pub enum EncodingType { + /// Binary Canonical Serialization + BCS, + /// Hex encoded e.g. 0xABCDE12345 + Hex, + /// Base 64 encoded + Base64, +} + +impl EncodingType { + /// Encodes `Key` into one of the `EncodingType`s + pub fn encode_key( + &self, + name: &'static str, + key: &Key, + ) -> CliTypedResult> { + Ok(match self { + EncodingType::Hex => hex::encode_upper(key.to_bytes()).into_bytes(), + EncodingType::BCS => bcs::to_bytes(key).map_err(|err| CliError::BCS(name, err))?, + EncodingType::Base64 => base64::encode(key.to_bytes()).into_bytes(), + }) + } + + /// Loads a key from a file + pub fn load_key( + &self, + name: &'static str, + path: &Path, + ) -> CliTypedResult { + self.decode_key(name, read_from_file(path)?) + } + + /// Decodes an encoded key given the known encoding + pub fn decode_key( + &self, + name: &'static str, + data: Vec, + ) -> CliTypedResult { + match self { + EncodingType::BCS => bcs::from_bytes(&data).map_err(|err| CliError::BCS(name, err)), + EncodingType::Hex => { + let hex_string = String::from_utf8(data)?; + Key::from_encoded_string(hex_string.trim()) + .map_err(|err| CliError::UnableToParse(name, err.to_string())) + } + EncodingType::Base64 => { + let string = String::from_utf8(data)?; + let bytes = base64::decode(string.trim()) + .map_err(|err| CliError::UnableToParse(name, err.to_string()))?; + Key::try_from(bytes.as_slice()).map_err(|err| { + CliError::UnableToParse(name, format!("Failed to parse key {:?}", err)) + }) + } + } + } +} + +#[derive(Clone, Debug, Parser)] +pub struct RngArgs { + /// The seed used for key generation, should be a 64 character hex string and only used for testing + /// + /// If a predictable random seed is used, the key that is produced will be insecure and easy + /// to reproduce. Please do not use this unless sufficient randomness is put into the random + /// seed. + #[clap(long)] + random_seed: Option, +} + +impl RngArgs { + pub fn from_seed(seed: [u8; 32]) -> RngArgs { + RngArgs { + random_seed: Some(hex::encode(seed)), + } + } + + pub fn from_string_seed(str: &str) -> RngArgs { + assert!(str.len() < 32); + + let mut seed = [0u8; 32]; + for (i, byte) in str.bytes().enumerate() { + seed[i] = byte; + } + + RngArgs { + random_seed: Some(hex::encode(seed)), + } + } + + /// Returns a key generator with the seed if given + pub fn key_generator(&self) -> CliTypedResult { + if let Some(ref seed) = self.random_seed { + // Strip 0x + let seed = seed.strip_prefix("0x").unwrap_or(seed); + let mut seed_slice = [0u8; 32]; + + hex::decode_to_slice(seed, &mut seed_slice)?; + Ok(KeyGen::from_seed(seed_slice)) + } else { + Ok(KeyGen::from_os_rng()) + } + } +} + +impl Default for EncodingType { + fn default() -> Self { + EncodingType::Hex + } +} + +impl Display for EncodingType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let str = match self { + EncodingType::BCS => "bcs", + EncodingType::Hex => "hex", + EncodingType::Base64 => "base64", + }; + write!(f, "{}", str) + } +} + +impl FromStr for EncodingType { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "hex" => Ok(EncodingType::Hex), + "bcs" => Ok(EncodingType::BCS), + "base64" => Ok(EncodingType::Base64), + _ => Err("Invalid encoding type"), + } + } +} + +/// An insertable option for use with prompts. +#[derive(Clone, Copy, Debug, Default, Parser, PartialEq, Eq)] +pub struct PromptOptions { + /// Assume yes for all yes/no prompts + #[clap(long, group = "prompt_options")] + pub assume_yes: bool, + /// Assume no for all yes/no prompts + #[clap(long, group = "prompt_options")] + pub assume_no: bool, +} + +impl PromptOptions { + pub fn yes() -> Self { + Self { + assume_yes: true, + assume_no: false, + } + } + + pub fn no() -> Self { + Self { + assume_yes: false, + assume_no: true, + } + } +} + +/// An insertable option for use with encodings. +#[derive(Debug, Default, Parser)] +pub struct EncodingOptions { + /// Encoding of data as one of [base64, bcs, hex] + #[clap(long, default_value_t = EncodingType::Hex)] + pub encoding: EncodingType, +} + +#[derive(Debug, Parser)] +pub struct PublicKeyInputOptions { + /// Ed25519 Public key input file name + /// + /// Mutually exclusive with `--public-key` + #[clap(long, group = "public_key_input", parse(from_os_str))] + public_key_file: Option, + /// Ed25519 Public key encoded in a type as shown in `encoding` + /// + /// Mutually exclusive with `--public-key-file` + #[clap(long, group = "public_key_input")] + public_key: Option, +} + +impl PublicKeyInputOptions { + pub fn from_key(key: &Ed25519PublicKey) -> PublicKeyInputOptions { + PublicKeyInputOptions { + public_key: Some(key.to_encoded_string().unwrap()), + public_key_file: None, + } + } +} + +impl ExtractPublicKey for PublicKeyInputOptions { + fn extract_public_key( + &self, + encoding: EncodingType, + profile: &ProfileOptions, + ) -> CliTypedResult { + if let Some(ref file) = self.public_key_file { + encoding.load_key("--public-key-file", file.as_path()) + } else if let Some(ref key) = self.public_key { + let key = key.as_bytes().to_vec(); + encoding.decode_key("--public-key", key) + } else if let Some(Some(public_key)) = CliConfig::load_profile( + profile.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| p.public_key) + { + Ok(public_key) + } else { + Err(CliError::CommandArgumentError( + "One of ['--public-key', '--public-key-file', '--profile'] must be used" + .to_string(), + )) + } + } +} + +pub trait ParsePrivateKey { + fn parse_private_key( + &self, + encoding: EncodingType, + private_key_file: Option, + private_key: Option, + ) -> CliTypedResult> { + if let Some(ref file) = private_key_file { + Ok(Some( + encoding.load_key("--private-key-file", file.as_path())?, + )) + } else if let Some(ref key) = private_key { + let key = key.as_bytes().to_vec(); + Ok(Some(encoding.decode_key("--private-key", key)?)) + } else { + Ok(None) + } + } +} + +#[derive(Debug, Default, Parser)] +pub struct PrivateKeyInputOptions { + /// Signing Ed25519 private key file path + /// + /// Encoded with type from `--encoding` + /// Mutually exclusive with `--private-key` + #[clap(long, group = "private_key_input", parse(from_os_str))] + private_key_file: Option, + /// Signing Ed25519 private key + /// + /// Encoded with type from `--encoding` + /// Mutually exclusive with `--private-key-file` + #[clap(long, group = "private_key_input")] + private_key: Option, +} + +impl ParsePrivateKey for PrivateKeyInputOptions {} + +impl PrivateKeyInputOptions { + pub fn from_private_key(private_key: &Ed25519PrivateKey) -> CliTypedResult { + Ok(PrivateKeyInputOptions { + private_key: Some( + private_key + .to_encoded_string() + .map_err(|err| CliError::UnexpectedError(err.to_string()))?, + ), + private_key_file: None, + }) + } + + pub fn from_x25519_private_key(private_key: &x25519::PrivateKey) -> CliTypedResult { + Ok(PrivateKeyInputOptions { + private_key: Some( + private_key + .to_encoded_string() + .map_err(|err| CliError::UnexpectedError(err.to_string()))?, + ), + private_key_file: None, + }) + } + + pub fn from_file(file: PathBuf) -> Self { + PrivateKeyInputOptions { + private_key: None, + private_key_file: Some(file), + } + } + + /// Extract private key from CLI args with fallback to config + pub fn extract_private_key_and_address( + &self, + encoding: EncodingType, + profile: &ProfileOptions, + maybe_address: Option, + ) -> CliTypedResult<(Ed25519PrivateKey, AccountAddress)> { + // Order of operations + // 1. CLI inputs + // 2. Profile + // 3. Derived + if let Some(key) = self.extract_private_key_cli(encoding)? { + // If we use the CLI inputs, then we should derive or use the address from the input + if let Some(address) = maybe_address { + Ok((key, address)) + } else { + let address = account_address_from_public_key(&key.public_key()); + Ok((key, address)) + } + } else if let Some((Some(key), maybe_config_address)) = CliConfig::load_profile( + profile.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| (p.private_key, p.account)) + { + match (maybe_address, maybe_config_address) { + (Some(address), _) => Ok((key, address)), + (_, Some(address)) => Ok((key, address)), + (None, None) => { + let address = account_address_from_public_key(&key.public_key()); + Ok((key, address)) + } + } + } else { + Err(CliError::CommandArgumentError( + "One of ['--private-key', '--private-key-file'] must be used".to_string(), + )) + } + } + + /// Extract private key from CLI args with fallback to config + pub fn extract_private_key( + &self, + encoding: EncodingType, + profile: &ProfileOptions, + ) -> CliTypedResult { + if let Some(key) = self.extract_private_key_cli(encoding)? { + Ok(key) + } else if let Some(Some(private_key)) = CliConfig::load_profile( + profile.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| p.private_key) + { + Ok(private_key) + } else { + Err(CliError::CommandArgumentError( + "One of ['--private-key', '--private-key-file'] must be used".to_string(), + )) + } + } + + /// Extract private key from CLI args + pub fn extract_private_key_cli( + &self, + encoding: EncodingType, + ) -> CliTypedResult> { + self.parse_private_key( + encoding, + self.private_key_file.clone(), + self.private_key.clone(), + ) + } +} + +impl ExtractPublicKey for PrivateKeyInputOptions { + fn extract_public_key( + &self, + encoding: EncodingType, + profile: &ProfileOptions, + ) -> CliTypedResult { + self.extract_private_key(encoding, profile) + .map(|private_key| private_key.public_key()) + } +} + +pub trait ExtractPublicKey { + fn extract_public_key( + &self, + encoding: EncodingType, + profile: &ProfileOptions, + ) -> CliTypedResult; +} + +pub fn account_address_from_public_key(public_key: &Ed25519PublicKey) -> AccountAddress { + let auth_key = AuthenticationKey::ed25519(public_key); + AccountAddress::new(*auth_key.derived_address()) +} + +#[derive(Debug, Parser)] +pub struct SaveFile { + /// Output file path + #[clap(long, parse(from_os_str))] + pub output_file: PathBuf, + + #[clap(flatten)] + pub prompt_options: PromptOptions, +} + +impl SaveFile { + /// Check if the key file exists already + pub fn check_file(&self) -> CliTypedResult<()> { + check_if_file_exists(self.output_file.as_path(), self.prompt_options) + } + + /// Save to the `output_file` + pub fn save_to_file(&self, name: &str, bytes: &[u8]) -> CliTypedResult<()> { + write_to_file(self.output_file.as_path(), name, bytes) + } + + /// Save to the `output_file` with restricted permissions (mode 0600) + pub fn save_to_file_confidential(&self, name: &str, bytes: &[u8]) -> CliTypedResult<()> { + let mut opts = OpenOptions::new(); + #[cfg(unix)] + opts.mode(0o600); + write_to_file_with_opts(self.output_file.as_path(), name, bytes, &mut opts) + } +} + +/// Options specific to using the Rest endpoint +#[derive(Debug, Default, Parser)] +pub struct RestOptions { + /// URL to a fullnode on the network + /// + /// Defaults to the URL in the `default` profile + #[clap(long)] + pub(crate) url: Option, + + /// Connection timeout in seconds, used for the REST endpoint of the fullnode + #[clap(long, default_value_t = DEFAULT_EXPIRATION_SECS, alias = "connection-timeout-s")] + pub connection_timeout_secs: u64, +} + +impl RestOptions { + pub fn new(url: Option, connection_timeout_secs: Option) -> Self { + RestOptions { + url, + connection_timeout_secs: connection_timeout_secs.unwrap_or(DEFAULT_EXPIRATION_SECS), + } + } + + /// Retrieve the URL from the profile or the command line + pub fn url(&self, profile: &ProfileOptions) -> CliTypedResult { + if let Some(ref url) = self.url { + Ok(url.clone()) + } else if let Some(Some(url)) = CliConfig::load_profile( + profile.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| p.rest_url) + { + reqwest::Url::parse(&url) + .map_err(|err| CliError::UnableToParse("Rest URL", err.to_string())) + } else { + Err(CliError::CommandArgumentError("No rest url given. Please add --url or add a rest_url to the .movement/config.yaml for the current profile".to_string())) + } + } + + pub fn client(&self, profile: &ProfileOptions) -> CliTypedResult { + Ok(Client::new_with_timeout_and_user_agent( + self.url(profile)?, + Duration::from_secs(self.connection_timeout_secs), + USER_AGENT, + )) + } + pub fn client_raw(&self, url: Url) -> CliTypedResult { + Ok(Client::new_with_timeout_and_user_agent( + url, + Duration::from_secs(self.connection_timeout_secs), + USER_AGENT, + )) + } +} + +/// Options for compiling a move package dir +#[derive(Debug, Clone, Parser)] +pub struct MovePackageDir { + /// Path to a move package (the folder with a Move.toml file) + #[clap(long, parse(from_os_str))] + pub package_dir: Option, + /// Path to save the compiled move package + /// + /// Defaults to `/build` + #[clap(long, parse(from_os_str))] + pub output_dir: Option, + /// Named addresses for the move binary + /// + /// Example: alice=0x1234, bob=0x5678 + /// + /// Note: This will fail if there are duplicates in the Move.toml file remove those first. + #[clap(long, parse(try_from_str = crate::common::utils::parse_map), default_value = "")] + pub(crate) named_addresses: BTreeMap, + + /// Skip pulling the latest git dependencies + /// + /// If you don't have a network connection, the compiler may fail due + /// to no ability to pull git dependencies. This will allow overriding + /// this for local development. + #[clap(long)] + pub(crate) skip_fetch_latest_git_deps: bool, + + /// Specify the version of the bytecode the compiler is going to emit. + #[clap(long)] + pub bytecode_version: Option, +} + +impl MovePackageDir { + pub fn new(package_dir: PathBuf) -> Self { + Self { + package_dir: Some(package_dir), + output_dir: None, + named_addresses: Default::default(), + skip_fetch_latest_git_deps: true, + bytecode_version: None, + } + } + + pub fn get_package_path(&self) -> CliTypedResult { + dir_default_to_current(self.package_dir.clone()) + } + + /// Retrieve the NamedAddresses, resolving all the account addresses accordingly + pub fn named_addresses(&self) -> BTreeMap { + self.named_addresses + .clone() + .into_iter() + .map(|(key, value)| (key, value.account_address)) + .collect() + } + + pub fn add_named_address(&mut self, key: String, value: String) { + self.named_addresses + .insert(key, AccountAddressWrapper::from_str(&value).unwrap()); + } +} + +/// A wrapper around `AccountAddress` to be more flexible from strings than AccountAddress +#[derive(Clone, Copy, Debug)] +pub struct AccountAddressWrapper { + pub account_address: AccountAddress, +} + +impl FromStr for AccountAddressWrapper { + type Err = CliError; + + fn from_str(s: &str) -> Result { + Ok(AccountAddressWrapper { + account_address: load_account_arg(s)?, + }) + } +} + +/// Loads an account arg and allows for naming based on profiles +pub fn load_account_arg(str: &str) -> Result { + if str.starts_with("0x") { + AccountAddress::from_hex_literal(str).map_err(|err| { + CliError::CommandArgumentError(format!("Failed to parse AccountAddress {}", err)) + }) + } else if let Ok(account_address) = AccountAddress::from_str(str) { + Ok(account_address) + } else if let Some(Some(account_address)) = + CliConfig::load_profile(Some(str), ConfigSearchMode::CurrentDirAndParents)? + .map(|p| p.account) + { + Ok(account_address) + } else if let Some(Some(private_key)) = + CliConfig::load_profile(Some(str), ConfigSearchMode::CurrentDirAndParents)? + .map(|p| p.private_key) + { + let public_key = private_key.public_key(); + Ok(account_address_from_public_key(&public_key)) + } else { + Err(CliError::CommandArgumentError( + "'--account' or '--profile' after using movement init must be provided".to_string(), + )) + } +} + +/// A wrapper around `AccountAddress` to allow for "_" +#[derive(Clone, Copy, Debug)] +pub struct MoveManifestAccountWrapper { + pub account_address: Option, +} + +impl FromStr for MoveManifestAccountWrapper { + type Err = CliError; + + fn from_str(s: &str) -> Result { + Ok(MoveManifestAccountWrapper { + account_address: load_manifest_account_arg(s)?, + }) + } +} + +/// Loads an account arg and allows for naming based on profiles and "_" +pub fn load_manifest_account_arg(str: &str) -> Result, CliError> { + if str == "_" { + Ok(None) + } else if str.starts_with("0x") { + AccountAddress::from_hex_literal(str) + .map(Some) + .map_err(|err| { + CliError::CommandArgumentError(format!("Failed to parse AccountAddress {}", err)) + }) + } else if let Ok(account_address) = AccountAddress::from_str(str) { + Ok(Some(account_address)) + } else if let Some(Some(private_key)) = + CliConfig::load_profile(Some(str), ConfigSearchMode::CurrentDirAndParents)? + .map(|p| p.private_key) + { + let public_key = private_key.public_key(); + Ok(Some(account_address_from_public_key(&public_key))) + } else { + Err(CliError::CommandArgumentError( + "Invalid Move manifest account address".to_string(), + )) + } +} + +/// A common trait for all CLI commands to have consistent outputs +#[async_trait] +pub trait CliCommand: Sized + Send { + /// Returns a name for logging purposes + fn command_name(&self) -> &'static str; + + /// Executes the command, returning a command specific type + async fn execute(self) -> CliTypedResult; + + /// Executes the command, and serializes it to the common JSON output type + async fn execute_serialized(self) -> CliResult { + let command_name = self.command_name(); + start_logger(); + let start_time = Instant::now(); + to_common_result(command_name, start_time, self.execute().await).await + } + + /// Same as execute serialized without setting up logging + async fn execute_serialized_without_logger(self) -> CliResult { + let command_name = self.command_name(); + let start_time = Instant::now(); + to_common_result(command_name, start_time, self.execute().await).await + } + + /// Executes the command, and throws away Ok(result) for the string Success + async fn execute_serialized_success(self) -> CliResult { + start_logger(); + let command_name = self.command_name(); + let start_time = Instant::now(); + to_common_success_result(command_name, start_time, self.execute().await).await + } +} + +/// A shortened transaction output +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct TransactionSummary { + pub transaction_hash: HashValue, + #[serde(skip_serializing_if = "Option::is_none")] + pub gas_used: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub gas_unit_price: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub pending: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub sender: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub sequence_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub success: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub timestamp_us: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub vm_status: Option, +} + +impl From for TransactionSummary { + fn from(transaction: Transaction) -> Self { + TransactionSummary::from(&transaction) + } +} + +impl From<&Transaction> for TransactionSummary { + fn from(transaction: &Transaction) -> Self { + match transaction { + Transaction::PendingTransaction(txn) => TransactionSummary { + transaction_hash: txn.hash, + pending: Some(true), + sender: Some(*txn.request.sender.inner()), + sequence_number: Some(txn.request.sequence_number.0), + gas_used: None, + gas_unit_price: None, + success: None, + version: None, + vm_status: None, + timestamp_us: None, + }, + Transaction::UserTransaction(txn) => TransactionSummary { + transaction_hash: txn.info.hash, + sender: Some(*txn.request.sender.inner()), + gas_used: Some(txn.info.gas_used.0), + gas_unit_price: Some(txn.request.gas_unit_price.0), + success: Some(txn.info.success), + version: Some(txn.info.version.0), + vm_status: Some(txn.info.vm_status.clone()), + sequence_number: Some(txn.request.sequence_number.0), + timestamp_us: Some(txn.timestamp.0), + pending: None, + }, + Transaction::GenesisTransaction(txn) => TransactionSummary { + transaction_hash: txn.info.hash, + success: Some(txn.info.success), + version: Some(txn.info.version.0), + vm_status: Some(txn.info.vm_status.clone()), + sender: None, + gas_used: None, + gas_unit_price: None, + pending: None, + sequence_number: None, + timestamp_us: None, + }, + Transaction::BlockMetadataTransaction(txn) => TransactionSummary { + transaction_hash: txn.info.hash, + success: Some(txn.info.success), + version: Some(txn.info.version.0), + vm_status: Some(txn.info.vm_status.clone()), + timestamp_us: Some(txn.timestamp.0), + sender: None, + gas_used: None, + gas_unit_price: None, + pending: None, + sequence_number: None, + }, + Transaction::StateCheckpointTransaction(txn) => TransactionSummary { + transaction_hash: txn.info.hash, + success: Some(txn.info.success), + version: Some(txn.info.version.0), + vm_status: Some(txn.info.vm_status.clone()), + timestamp_us: Some(txn.timestamp.0), + sender: None, + gas_used: None, + gas_unit_price: None, + pending: None, + sequence_number: None, + }, + } + } +} + +/// A summary of a `WriteSetChange` for easy printing +#[derive(Clone, Debug, Default, Serialize)] +pub struct ChangeSummary { + #[serde(skip_serializing_if = "Option::is_none")] + address: Option, + #[serde(skip_serializing_if = "Option::is_none")] + data: Option, + event: &'static str, + #[serde(skip_serializing_if = "Option::is_none")] + handle: Option, + #[serde(skip_serializing_if = "Option::is_none")] + key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + module: Option, + #[serde(skip_serializing_if = "Option::is_none")] + resource: Option, + #[serde(skip_serializing_if = "Option::is_none")] + value: Option, +} + +#[derive(Debug, Default, Parser)] +pub struct FaucetOptions { + /// URL for the faucet endpoint e.g. `https://faucet.devnet.aptoslabs.com` + #[clap(long)] + faucet_url: Option, +} + +impl FaucetOptions { + pub fn new(faucet_url: Option) -> Self { + FaucetOptions { faucet_url } + } + + pub fn faucet_url(&self, profile: &ProfileOptions) -> CliTypedResult { + if let Some(ref faucet_url) = self.faucet_url { + Ok(faucet_url.clone()) + } else if let Some(Some(url)) = CliConfig::load_profile( + profile.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|profile| profile.faucet_url) + { + reqwest::Url::parse(&url) + .map_err(|err| CliError::UnableToParse("config faucet_url", err.to_string())) + } else { + Err(CliError::CommandArgumentError("No faucet given. Please add --faucet-url or add a faucet URL to the .movement/config.yaml for the current profile".to_string())) + } + } +} + +/// Gas price options for manipulating how to prioritize transactions +#[derive(Debug, Eq, Parser, PartialEq)] +pub struct GasOptions { + /// Gas multiplier per unit of gas + /// + /// The amount of Octas (10^-8 APT) used for a transaction is equal + /// to (gas unit price * gas used). The gas_unit_price can + /// be used as a multiplier for the amount of Octas willing + /// to be paid for a transaction. This will prioritize the + /// transaction with a higher gas unit price. + /// + /// Without a value, it will determine the price based on the current estimated price + #[clap(long)] + pub gas_unit_price: Option, + /// Maximum amount of gas units to be used to send this transaction + /// + /// The maximum amount of gas units willing to pay for the transaction. + /// This is the (max gas in Octas / gas unit price). + /// + /// For example if I wanted to pay a maximum of 100 Octas, I may have the + /// max gas set to 100 if the gas unit price is 1. If I want it to have a + /// gas unit price of 2, the max gas would need to be 50 to still only have + /// a maximum price of 100 Octas. + /// + /// Without a value, it will determine the price based on simulating the current transaction + #[clap(long)] + pub max_gas: Option, + /// Number of seconds to expire the transaction + /// + /// This is the number of seconds from the current local computer time. + #[clap(long, default_value_t = DEFAULT_EXPIRATION_SECS)] + pub expiration_secs: u64, +} + +impl Default for GasOptions { + fn default() -> Self { + GasOptions { + gas_unit_price: None, + max_gas: None, + expiration_secs: DEFAULT_EXPIRATION_SECS, + } + } +} + +/// Common options for interacting with an account for a validator +#[derive(Debug, Default, Parser)] +pub struct TransactionOptions { + /// Sender account address + /// + /// This allows you to override the account address from the derived account address + /// in the event that the authentication key was rotated or for a resource account + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) sender_account: Option, + + #[clap(flatten)] + pub(crate) private_key_options: PrivateKeyInputOptions, + #[clap(flatten)] + pub(crate) encoding_options: EncodingOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) gas_options: GasOptions, + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, + + /// If this option is set, simulate the transaction locally using the debugger and generate + /// flamegraphs that reflect the gas usage. + #[clap(long)] + pub(crate) profile_gas: bool, +} + +impl TransactionOptions { + /// Builds a rest client + fn rest_client(&self) -> CliTypedResult { + self.rest_options.client(&self.profile_options) + } + + /// Retrieves the public key and the associated address + /// TODO: Cache this information + pub fn get_key_and_address(&self) -> CliTypedResult<(Ed25519PrivateKey, AccountAddress)> { + self.private_key_options.extract_private_key_and_address( + self.encoding_options.encoding, + &self.profile_options, + self.sender_account, + ) + } + + pub fn sender_address(&self) -> CliTypedResult { + Ok(self.get_key_and_address()?.1) + } + + /// Gets the auth key by account address. We need to fetch the auth key from Rest API rather than creating an + /// auth key out of the public key. + pub(crate) async fn auth_key( + &self, + sender_address: AccountAddress, + ) -> CliTypedResult { + let client = self.rest_client()?; + get_auth_key(&client, sender_address).await + } + + pub async fn sequence_number(&self, sender_address: AccountAddress) -> CliTypedResult { + let client = self.rest_client()?; + get_sequence_number(&client, sender_address).await + } + + pub async fn view(&self, payload: ViewRequest) -> CliTypedResult> { + let client = self.rest_client()?; + Ok(client.view(&payload, None).await?.into_inner()) + } + + /// Submit a transaction + pub async fn submit_transaction( + &self, + payload: TransactionPayload, + ) -> CliTypedResult { + let client = self.rest_client()?; + let (sender_key, sender_address) = self.get_key_and_address()?; + + // Ask to confirm price if the gas unit price is estimated above the lowest value when + // it is automatically estimated + let ask_to_confirm_price; + let gas_unit_price = if let Some(gas_unit_price) = self.gas_options.gas_unit_price { + ask_to_confirm_price = false; + gas_unit_price + } else { + let gas_unit_price = client.estimate_gas_price().await?.into_inner().gas_estimate; + + ask_to_confirm_price = true; + gas_unit_price + }; + + // Get sequence number for account + let (account, state) = get_account_with_state(&client, sender_address).await?; + let sequence_number = account.sequence_number; + + // Retrieve local time, and ensure it's within an expected skew of the blockchain + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map_err(|err| CliError::UnexpectedError(err.to_string()))? + .as_secs(); + let now_usecs = now * US_IN_SECS; + + // Warn local user that clock is skewed behind the blockchain. + // There will always be a little lag from real time to blockchain time + if now_usecs < state.timestamp_usecs - ACCEPTED_CLOCK_SKEW_US { + eprintln!("Local clock is is skewed from blockchain clock. Clock is more than {} seconds behind the blockchain {}", ACCEPTED_CLOCK_SKEW_US, state.timestamp_usecs / US_IN_SECS); + } + let expiration_time_secs = now + self.gas_options.expiration_secs; + + let chain_id = ChainId::new(state.chain_id); + // TODO: Check auth key against current private key and provide a better message + + let max_gas = if let Some(max_gas) = self.gas_options.max_gas { + // If the gas unit price was estimated ask, but otherwise you've chosen hwo much you want to spend + if ask_to_confirm_price { + let message = format!("Do you want to submit transaction for a maximum of {} Octas at a gas unit price of {} Octas?", max_gas * gas_unit_price, gas_unit_price); + prompt_yes_with_override(&message, self.prompt_options)?; + } + max_gas + } else { + let transaction_factory = + TransactionFactory::new(chain_id).with_gas_unit_price(gas_unit_price); + + let unsigned_transaction = transaction_factory + .payload(payload.clone()) + .sender(sender_address) + .sequence_number(sequence_number) + .expiration_timestamp_secs(expiration_time_secs) + .build(); + + let signed_transaction = SignedTransaction::new( + unsigned_transaction, + sender_key.public_key(), + Ed25519Signature::try_from([0u8; 64].as_ref()).unwrap(), + ); + + let txns = client + .simulate_with_gas_estimation(&signed_transaction, true, false) + .await? + .into_inner(); + let simulated_txn = txns.first().unwrap(); + + // Check if the transaction will pass, if it doesn't then fail + if !simulated_txn.info.success { + return Err(CliError::SimulationError( + simulated_txn.info.vm_status.clone(), + )); + } + + // Take the gas used and use a headroom factor on it + let gas_used = simulated_txn.info.gas_used.0; + let adjusted_max_gas = + adjust_gas_headroom(gas_used, simulated_txn.request.max_gas_amount.0); + + // Ask if you want to accept the estimate amount + let upper_cost_bound = adjusted_max_gas * gas_unit_price; + let lower_cost_bound = gas_used * gas_unit_price; + let message = format!( + "Do you want to submit a transaction for a range of [{} - {}] Octas at a gas unit price of {} Octas?", + lower_cost_bound, + upper_cost_bound, + gas_unit_price); + prompt_yes_with_override(&message, self.prompt_options)?; + adjusted_max_gas + }; + + // Sign and submit transaction + let transaction_factory = TransactionFactory::new(chain_id) + .with_gas_unit_price(gas_unit_price) + .with_max_gas_amount(max_gas) + .with_transaction_expiration_time(self.gas_options.expiration_secs); + let sender_account = &mut LocalAccount::new(sender_address, sender_key, sequence_number); + let transaction = + sender_account.sign_with_transaction_builder(transaction_factory.payload(payload)); + let response = client + .submit_and_wait(&transaction) + .await + .map_err(|err| CliError::ApiError(err.to_string()))?; + + Ok(response.into_inner()) + } + + /// Simulate the transaction locally using the debugger, with the gas profiler enabled. + pub async fn profile_gas( + &self, + payload: TransactionPayload, + ) -> CliTypedResult { + println!(); + println!("Simulating transaction locally with the gas profiler..."); + println!("This is still experimental so results may be inaccurate."); + + let client = self.rest_client()?; + + // Fetch the chain states required for the simulation + // TODO(Gas): get the following from the chain + const DEFAULT_GAS_UNIT_PRICE: u64 = 100; + const DEFAULT_MAX_GAS: u64 = 2_000_000; + + let (sender_key, sender_address) = self.get_key_and_address()?; + let gas_unit_price = self + .gas_options + .gas_unit_price + .unwrap_or(DEFAULT_GAS_UNIT_PRICE); + let (account, state) = get_account_with_state(&client, sender_address).await?; + let version = state.version; + let chain_id = ChainId::new(state.chain_id); + let sequence_number = account.sequence_number; + + let balance = client + .get_account_balance_at_version(sender_address, version) + .await + .map_err(|err| CliError::ApiError(err.to_string()))? + .into_inner(); + + let max_gas = self.gas_options.max_gas.unwrap_or_else(|| { + if gas_unit_price == 0 { + DEFAULT_MAX_GAS + } else { + std::cmp::min(balance.coin.value.0 / gas_unit_price, DEFAULT_MAX_GAS) + } + }); + + // Create and sign the transaction + let transaction_factory = TransactionFactory::new(chain_id) + .with_gas_unit_price(gas_unit_price) + .with_max_gas_amount(max_gas) + .with_transaction_expiration_time(self.gas_options.expiration_secs); + let sender_account = &mut LocalAccount::new(sender_address, sender_key, sequence_number); + let transaction = + sender_account.sign_with_transaction_builder(transaction_factory.payload(payload)); + let hash = transaction.clone().committed_hash(); + + // Execute the transaction using the debugger + let debugger = AptosDebugger::rest_client(client).unwrap(); + let res = debugger.execute_transaction_at_version_with_gas_profiler(version, transaction); + let (vm_status, output, gas_log) = res.map_err(|err| { + CliError::UnexpectedError(format!("failed to simulate txn with gas profiler: {}", err)) + })?; + + // Generate the file name for the flamegraphs + let entry_point = gas_log.entry_point(); + + let human_readable_name = match entry_point { + FrameName::Script => "script".to_string(), + FrameName::Function { + module_id, name, .. + } => { + let addr_short = module_id.address().short_str_lossless(); + let addr_truncated = if addr_short.len() > 4 { + &addr_short[..4] + } else { + addr_short.as_str() + }; + format!("0x{}-{}-{}", addr_truncated, module_id.name(), name) + } + }; + let raw_file_name = format!("txn-{}-{}", hash, human_readable_name); + + // Create the directory if it does not exist yet. + let dir: &Path = Path::new("gas-profiling"); + + macro_rules! create_dir { + () => { + if let Err(err) = std::fs::create_dir(dir) { + if err.kind() != std::io::ErrorKind::AlreadyExists { + return Err(CliError::UnexpectedError(format!( + "failed to create directory {}", + dir.display() + ))); + } + } + }; + } + + // Generate the execution & IO flamegraph. + println!(); + match gas_log.to_flamegraph(format!("Transaction {} -- Execution & IO", hash))? { + Some(graph_bytes) => { + create_dir!(); + let graph_file_path = Path::join(dir, format!("{}.exec_io.svg", raw_file_name)); + std::fs::write(&graph_file_path, graph_bytes).map_err(|err| { + CliError::UnexpectedError(format!( + "Failed to write flamegraph to file {} : {:?}", + graph_file_path.display(), + err + )) + })?; + println!( + "Execution & IO Gas flamegraph saved to {}", + graph_file_path.display() + ); + } + None => { + println!("Skipped generating execution & IO flamegraph"); + } + } + + // Generate the storage fee flamegraph. + match gas_log + .storage + .to_flamegraph(format!("Transaction {} -- Storage Fee", hash))? + { + Some(graph_bytes) => { + create_dir!(); + let graph_file_path = Path::join(dir, format!("{}.storage.svg", raw_file_name)); + std::fs::write(&graph_file_path, graph_bytes).map_err(|err| { + CliError::UnexpectedError(format!( + "Failed to write flamegraph to file {} : {:?}", + graph_file_path.display(), + err + )) + })?; + println!( + "Storage fee flamegraph saved to {}", + graph_file_path.display() + ); + } + None => { + println!("Skipped generating storage fee flamegraph"); + } + } + + println!(); + + // Generate the transaction summary + + // TODO(Gas): double check if this is correct. + let success = match output.status() { + TransactionStatus::Keep(exec_status) => Some(exec_status.is_success()), + TransactionStatus::Discard(_) | TransactionStatus::Retry => None, + }; + + Ok(TransactionSummary { + transaction_hash: hash.into(), + gas_used: Some(output.gas_used()), + gas_unit_price: Some(gas_unit_price), + pending: None, + sender: Some(sender_address), + sequence_number: None, // The transaction is not comitted so there is no new sequence number. + success, + timestamp_us: None, + version: Some(version), // The transaction is not comitted so there is no new version. + vm_status: Some(vm_status.to_string()), + }) + } + + pub async fn estimate_gas_price(&self) -> CliTypedResult { + let client = self.rest_client()?; + client + .estimate_gas_price() + .await + .map(|inner| inner.into_inner().gas_estimate) + .map_err(|err| { + CliError::UnexpectedError(format!( + "Failed to retrieve gas price estimate {:?}", + err + )) + }) + } +} + +#[derive(Parser)] +pub struct OptionalPoolAddressArgs { + /// Address of the Staking pool + /// + /// Defaults to the profile's `AccountAddress` + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) pool_address: Option, +} + +#[derive(Parser)] +pub struct PoolAddressArgs { + /// Address of the Staking pool + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) pool_address: AccountAddress, +} + +// This struct includes TypeInfo (account_address, module_name, and struct_name) +// and RotationProofChallenge-specific information (sequence_number, originator, current_auth_key, and new_public_key) +// Since the struct RotationProofChallenge is defined in "0x1::account::RotationProofChallenge", +// we will be passing in "0x1" to `account_address`, "account" to `module_name`, and "RotationProofChallenge" to `struct_name` +// Originator refers to the user's address +#[derive(Serialize, Deserialize)] +pub struct RotationProofChallenge { + // Should be `CORE_CODE_ADDRESS` + pub account_address: AccountAddress, + // Should be `account` + pub module_name: String, + // Should be `RotationProofChallenge` + pub struct_name: String, + pub sequence_number: u64, + pub originator: AccountAddress, + pub current_auth_key: AccountAddress, + pub new_public_key: Vec, +} + +#[derive(Debug, Parser)] +/// This is used for both entry functions and scripts. +pub struct ArgWithTypeVec { + /// Arguments combined with their type separated by spaces. + /// + /// Supported types [address, bool, hex, string, u8, u16, u32, u64, u128, u256, raw] + /// + /// Vectors may be specified using JSON array literal syntax (you may need to escape this with + /// quotes based on your shell interpreter) + /// + /// Example: `address:0x1 bool:true u8:0 u256:1234 "bool:[true, false]" 'address:[["0xace", "0xbee"], []]'` + /// + /// Vector is wrapped in a reusable struct for uniform CLI documentation. + #[clap(long, multiple_values = true)] + pub(crate) args: Vec, +} + +/// Common options for constructing an entry function transaction payload. +#[derive(Debug, Parser)] +pub struct EntryFunctionArguments { + /// Function name as `
::::` + /// + /// Example: `0x842ed41fad9640a2ad08fdd7d3e4f7f505319aac7d67e1c0dd6a7cce8732c7e3::message::set_message` + #[clap(long)] + pub function_id: MemberId, + + #[clap(flatten)] + pub(crate) arg_vec: ArgWithTypeVec, + + /// TypeTag arguments separated by spaces. + /// + /// Example: `u8 u16 u32 u64 u128 u256 bool address vector signer` + #[clap(long, multiple_values = true)] + pub type_args: Vec, +} + +impl EntryFunctionArguments { + /// Construct and return an entry function payload from function_id, args, and type_args. + pub fn create_entry_function_payload(self) -> CliTypedResult { + let args: Vec> = self + .arg_vec + .args + .into_iter() + .map(|arg_with_type| arg_with_type.arg) + .collect(); + + let mut parsed_type_args: Vec = Vec::new(); + // These TypeArgs are used for generics + for type_arg in self.type_args.into_iter() { + let type_tag = TypeTag::try_from(type_arg.clone()) + .map_err(|err| CliError::UnableToParse("--type-args", err.to_string()))?; + parsed_type_args.push(type_tag) + } + + Ok(EntryFunction::new( + self.function_id.module_id, + self.function_id.member_id, + parsed_type_args, + args, + )) + } +} + +/// Common options for interactions with a multisig account. +#[derive(Clone, Debug, Parser, Serialize)] +pub struct MultisigAccount { + /// The address of the multisig account to interact with. + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) multisig_address: AccountAddress, +} diff --git a/m1/movement/src/common/utils.rs b/m1/movement/src/common/utils.rs new file mode 100644 index 00000000..601ae684 --- /dev/null +++ b/m1/movement/src/common/utils.rs @@ -0,0 +1,507 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::types::{ + account_address_from_public_key, CliError, CliTypedResult, PromptOptions, + TransactionOptions, TransactionSummary, + }, + config::GlobalConfig, + CliResult, +}; +use aptos_build_info::build_information; +use aptos_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey}; +use aptos_keygen::KeyGen; +use aptos_logger::{debug, Level}; +use aptos_rest_client::{aptos_api_types::HashValue, Account, Client, State}; +use aptos_telemetry::service::telemetry_is_disabled; +use aptos_types::{ + account_address::create_multisig_account_address, + chain_id::ChainId, + transaction::{authenticator::AuthenticationKey, TransactionPayload}, +}; +use itertools::Itertools; +use move_core_types::account_address::AccountAddress; +use reqwest::Url; +use serde::Serialize; +#[cfg(unix)] +use std::os::unix::fs::OpenOptionsExt; +use std::{ + collections::BTreeMap, + env, + fs::OpenOptions, + io::Write, + path::{Path, PathBuf}, + str::FromStr, + time::{Duration, Instant, SystemTime}, +}; + +/// Prompts for confirmation until a yes or no is given explicitly +pub fn prompt_yes(prompt: &str) -> bool { + let mut result: Result = Err(()); + + // Read input until a yes or a no is given + while result.is_err() { + println!("{} [yes/no] >", prompt); + let mut input = String::new(); + if std::io::stdin().read_line(&mut input).is_err() { + continue; + } + result = match input.trim().to_lowercase().as_str() { + "yes" | "y" => Ok(true), + "no" | "n" => Ok(false), + _ => Err(()), + }; + } + result.unwrap() +} + +/// Convert any successful response to Success +pub async fn to_common_success_result( + command: &str, + start_time: Instant, + result: CliTypedResult, +) -> CliResult { + to_common_result(command, start_time, result.map(|_| "Success")).await +} + +/// For pretty printing outputs in JSON +pub async fn to_common_result( + command: &str, + start_time: Instant, + result: CliTypedResult, +) -> CliResult { + let latency = start_time.elapsed(); + let is_err = result.is_err(); + + if !telemetry_is_disabled() { + let error = if let Err(ref error) = result { + // Only print the error type + Some(error.to_str()) + } else { + None + }; + send_telemetry_event(command, latency, !is_err, error).await; + } + + let result: ResultWrapper = result.into(); + let string = serde_json::to_string_pretty(&result).unwrap(); + if is_err { + Err(string) + } else { + Ok(string) + } +} + +pub fn cli_build_information() -> BTreeMap { + build_information!() +} + +/// Sends a telemetry event about the CLI build, command and result +async fn send_telemetry_event( + command: &str, + latency: Duration, + success: bool, + error: Option<&str>, +) { + // Collect the build information + let build_information = cli_build_information(); + + // Send the event + aptos_telemetry::cli_metrics::send_cli_telemetry_event( + build_information, + command.into(), + latency, + success, + error, + ) + .await; +} + +/// A result wrapper for displaying either a correct execution result or an error. +/// +/// The purpose of this is to have a pretty easy to recognize JSON output format e.g. +/// +/// { +/// "Result":{ +/// "encoded":{ ... } +/// } +/// } +/// +/// { +/// "Error":"Failed to run command" +/// } +/// +#[derive(Debug, Serialize)] +enum ResultWrapper { + Result(T), + Error(String), +} + +impl From> for ResultWrapper { + fn from(result: CliTypedResult) -> Self { + match result { + Ok(inner) => ResultWrapper::Result(inner), + Err(inner) => ResultWrapper::Error(inner.to_string()), + } + } +} + +/// Checks if a file exists, being overridden by `PromptOptions` +pub fn check_if_file_exists(file: &Path, prompt_options: PromptOptions) -> CliTypedResult<()> { + if file.exists() { + prompt_yes_with_override( + &format!( + "{:?} already exists, are you sure you want to overwrite it?", + file.as_os_str(), + ), + prompt_options, + )? + } + + Ok(()) +} + +pub fn prompt_yes_with_override(prompt: &str, prompt_options: PromptOptions) -> CliTypedResult<()> { + if prompt_options.assume_no { + return Err(CliError::AbortedError); + } else if prompt_options.assume_yes { + return Ok(()); + } + + let is_yes = if let Some(response) = GlobalConfig::load()?.get_default_prompt_response() { + response + } else { + prompt_yes(prompt) + }; + + if is_yes { + Ok(()) + } else { + Err(CliError::AbortedError) + } +} + +pub fn read_from_file(path: &Path) -> CliTypedResult> { + std::fs::read(path) + .map_err(|e| CliError::UnableToReadFile(format!("{}", path.display()), e.to_string())) +} + +/// Write a `&[u8]` to a file +pub fn write_to_file(path: &Path, name: &str, bytes: &[u8]) -> CliTypedResult<()> { + write_to_file_with_opts(path, name, bytes, &mut OpenOptions::new()) +} + +/// Write a User only read / write file +pub fn write_to_user_only_file(path: &Path, name: &str, bytes: &[u8]) -> CliTypedResult<()> { + let mut opts = OpenOptions::new(); + #[cfg(unix)] + opts.mode(0o600); + write_to_file_with_opts(path, name, bytes, &mut opts) +} + +/// Write a `&[u8]` to a file with the given options +pub fn write_to_file_with_opts( + path: &Path, + name: &str, + bytes: &[u8], + opts: &mut OpenOptions, +) -> CliTypedResult<()> { + let mut file = opts + .write(true) + .create(true) + .truncate(true) + .open(path) + .map_err(|e| CliError::IO(name.to_string(), e))?; + file.write_all(bytes) + .map_err(|e| CliError::IO(name.to_string(), e)) +} + +/// Appends a file extension to a `Path` without overwriting the original extension. +pub fn append_file_extension( + file: &Path, + appended_extension: &'static str, +) -> CliTypedResult { + let extension = file + .extension() + .map(|extension| extension.to_str().unwrap_or_default()); + if let Some(extension) = extension { + Ok(file.with_extension(extension.to_owned() + "." + appended_extension)) + } else { + Ok(file.with_extension(appended_extension)) + } +} + +/// Retrieves account resource from the rest client +pub async fn get_account( + client: &aptos_rest_client::Client, + address: AccountAddress, +) -> CliTypedResult { + let account_response = client + .get_account(address) + .await + .map_err(|err| CliError::ApiError(err.to_string()))?; + Ok(account_response.into_inner()) +} + +/// Retrieves account resource from the rest client +pub async fn get_account_with_state( + client: &aptos_rest_client::Client, + address: AccountAddress, +) -> CliTypedResult<(Account, State)> { + let account_response = client + .get_account(address) + .await + .map_err(|err| CliError::ApiError(err.to_string()))?; + Ok(account_response.into_parts()) +} + +/// Retrieves sequence number from the rest client +pub async fn get_sequence_number( + client: &aptos_rest_client::Client, + address: AccountAddress, +) -> CliTypedResult { + Ok(get_account(client, address).await?.sequence_number) +} + +/// Retrieves the auth key from the rest client +pub async fn get_auth_key( + client: &aptos_rest_client::Client, + address: AccountAddress, +) -> CliTypedResult { + Ok(get_account(client, address).await?.authentication_key) +} + +/// Retrieves the chain id from the rest client +pub async fn chain_id(rest_client: &Client) -> CliTypedResult { + let state = rest_client + .get_ledger_information() + .await + .map_err(|err| CliError::ApiError(err.to_string()))? + .into_inner(); + Ok(ChainId::new(state.chain_id)) +} + +/// Error message for parsing a map +const PARSE_MAP_SYNTAX_MSG: &str = "Invalid syntax for map. Example: Name=Value,Name2=Value"; + +/// Parses an inline map of values +/// +/// Example: Name=Value,Name2=Value +pub fn parse_map(str: &str) -> anyhow::Result> +where + K::Err: 'static + std::error::Error + Send + Sync, + V::Err: 'static + std::error::Error + Send + Sync, +{ + let mut map = BTreeMap::new(); + + // Split pairs by commas + for pair in str.split_terminator(',') { + // Split pairs by = then trim off any spacing + let (first, second): (&str, &str) = pair + .split_terminator('=') + .collect_tuple() + .ok_or_else(|| anyhow::Error::msg(PARSE_MAP_SYNTAX_MSG))?; + let first = first.trim(); + let second = second.trim(); + if first.is_empty() || second.is_empty() { + return Err(anyhow::Error::msg(PARSE_MAP_SYNTAX_MSG)); + } + + // At this point, we just give error messages appropriate to parsing + let key: K = K::from_str(first)?; + let value: V = V::from_str(second)?; + map.insert(key, value); + } + Ok(map) +} + +/// Generate a vanity account for Ed25519 single signer scheme, either standard or multisig. +/// +/// The default authentication key for an Ed25519 account is the same as the account address. Hence +/// for a standard account, this function generates Ed25519 private keys until finding one that has +/// an authentication key (account address) that begins with the given vanity prefix. +/// +/// For a multisig account, this function generates private keys until finding one that can create +/// a multisig account with the given vanity prefix as its first transaction (sequence number 0). +/// +/// Note that while a valid hex string must have an even number of characters, a vanity prefix can +/// have an odd number of characters since account addresses are human-readable. +/// +/// `vanity_prefix_ref` is a reference to a hex string vanity prefix, optionally prefixed with "0x". +/// For example "0xaceface" or "d00d". +pub fn generate_vanity_account_ed25519( + vanity_prefix_ref: &str, + multisig: bool, +) -> CliTypedResult { + let vanity_prefix_ref = vanity_prefix_ref + .strip_prefix("0x") + .unwrap_or(vanity_prefix_ref); // Optionally strip leading 0x from input string. + let mut to_check_if_is_hex = String::from(vanity_prefix_ref); + // If an odd number of characters append a 0 for verifying that prefix contains valid hex. + if to_check_if_is_hex.len() % 2 != 0 { + to_check_if_is_hex += "0" + }; + hex::decode(to_check_if_is_hex). // Check that the vanity prefix can be decoded into hex. + map_err(|error| CliError::CommandArgumentError(format!( + "The vanity prefix could not be decoded to hex: {}", error)))?; + let mut key_generator = KeyGen::from_os_rng(); // Get random key generator. + loop { + // Generate new keys until finding a match against the vanity prefix. + let private_key = key_generator.generate_ed25519_private_key(); + let mut account_address = + account_address_from_public_key(&Ed25519PublicKey::from(&private_key)); + if multisig { + account_address = create_multisig_account_address(account_address, 0) + }; + if account_address + .short_str_lossless() + .starts_with(vanity_prefix_ref) + { + return Ok(private_key); + }; + } +} + +pub fn current_dir() -> CliTypedResult { + env::current_dir().map_err(|err| { + CliError::UnexpectedError(format!("Failed to get current directory {}", err)) + }) +} + +pub fn dir_default_to_current(maybe_dir: Option) -> CliTypedResult { + if let Some(dir) = maybe_dir { + Ok(dir) + } else { + current_dir() + } +} + +pub fn create_dir_if_not_exist(dir: &Path) -> CliTypedResult<()> { + // Check if the directory exists, if it's not a dir, it will also fail here + if !dir.exists() || !dir.is_dir() { + std::fs::create_dir_all(dir).map_err(|e| CliError::IO(dir.display().to_string(), e))?; + debug!("Created {} folder", dir.display()); + } else { + debug!("{} folder already exists", dir.display()); + } + Ok(()) +} + +/// Reads a line from input +pub fn read_line(input_name: &'static str) -> CliTypedResult { + let mut input_buf = String::new(); + let _ = std::io::stdin() + .read_line(&mut input_buf) + .map_err(|err| CliError::IO(input_name.to_string(), err))?; + + Ok(input_buf) +} + +/// Fund account (and possibly create it) from a faucet +pub async fn fund_account( + faucet_url: Url, + num_octas: u64, + address: AccountAddress, +) -> CliTypedResult> { + let response = reqwest::Client::new() + .post(format!( + "{}mint?amount={}&auth_key={}", + faucet_url, num_octas, address + )) + .body("{}") + .send() + .await + .map_err(|err| { + CliError::ApiError(format!("Failed to fund account with faucet: {:#}", err)) + })?; + if response.status() == 200 { + let hashes: Vec = response + .json() + .await + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + Ok(hashes) + } else { + Err(CliError::ApiError(format!( + "Faucet issue: {}", + response.status() + ))) + } +} + +/// Fund by public key (and possibly create it) from a faucet +pub async fn fund_pub_key( + faucet_url: Url, + pub_key: String, +) -> CliTypedResult> { + let url = format!( + "{}v1/mint?&pub_key={}", + faucet_url, pub_key + ); + let response = reqwest::Client::new() + .post(url) + .body("{}") + .send() + .await + .map_err(|err| { + CliError::ApiError(format!("Failed to fund account with faucet: {:#}", err)) + })?; + if response.status() == 200 { + let hashes: Vec = response + .json() + .await + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + Ok(hashes) + } else { + Err(CliError::ApiError(format!( + "Faucet issue: {}", + response.status() + ))) + } +} + +/// Wait for transactions, returning an error if any of them fail. +pub async fn wait_for_transactions( + client: &aptos_rest_client::Client, + hashes: Vec, +) -> CliTypedResult<()> { + let sys_time = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .map_err(|e| CliError::UnexpectedError(e.to_string()))? + .as_secs() + + 30; + for hash in hashes { + client + .wait_for_transaction_by_hash( + hash.into(), + sys_time, + Some(Duration::from_secs(60)), + None, + ) + .await?; + } + Ok(()) +} + +pub fn start_logger() { + let mut logger = aptos_logger::Logger::new(); + logger.channel_size(1000).is_async(false).level(Level::Warn); + logger.build(); +} + +/// For transaction payload and options, either get gas profile or submit for execution. +pub async fn profile_or_submit( + payload: TransactionPayload, + txn_options_ref: &TransactionOptions, +) -> CliTypedResult { + // Profile gas if needed. + if txn_options_ref.profile_gas { + txn_options_ref.profile_gas(payload).await + } else { + // Otherwise submit the transaction. + txn_options_ref + .submit_transaction(payload) + .await + .map(TransactionSummary::from) + } +} diff --git a/m1/movement/src/config/mod.rs b/m1/movement/src/config/mod.rs new file mode 100644 index 00000000..af4da540 --- /dev/null +++ b/m1/movement/src/config/mod.rs @@ -0,0 +1,360 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{ + CliCommand, CliConfig, CliError, CliResult, CliTypedResult, ConfigSearchMode, + ProfileSummary, CONFIG_FOLDER, + }, + utils::{create_dir_if_not_exist, current_dir, read_from_file, write_to_user_only_file}, + }, + genesis::git::{from_yaml, to_yaml}, + Tool, +}; +use async_trait::async_trait; +use clap::{ArgEnum, CommandFactory, Parser}; +use clap_complete::{generate, Shell}; +use serde::{Deserialize, Serialize}; +use std::{collections::BTreeMap, fmt::Formatter, path::PathBuf, str::FromStr}; + +/// Tool for interacting with configuration of the Movement CLI tool +/// +/// This tool handles the global configuration of the CLI tool for +/// default configuration, and user specific settings. +#[derive(Parser)] +pub enum ConfigTool { + Init(crate::common::init::InitTool), + GenerateShellCompletions(GenerateShellCompletions), + SetGlobalConfig(SetGlobalConfig), + ShowGlobalConfig(ShowGlobalConfig), + ShowProfiles(ShowProfiles), +} + +impl ConfigTool { + pub async fn execute(self) -> CliResult { + match self { + ConfigTool::Init(tool) => tool.execute_serialized_success().await, + ConfigTool::GenerateShellCompletions(tool) => tool.execute_serialized_success().await, + ConfigTool::SetGlobalConfig(tool) => tool.execute_serialized().await, + ConfigTool::ShowGlobalConfig(tool) => tool.execute_serialized().await, + ConfigTool::ShowProfiles(tool) => tool.execute_serialized().await, + } + } +} + +/// Generate shell completion files +/// +/// First generate the completion file, then follow the shell specific directions on how +/// to install the completion file. +#[derive(Parser)] +pub struct GenerateShellCompletions { + /// Shell to generate completions for one of [bash, elvish, powershell, zsh] + #[clap(long)] + shell: Shell, + + /// File to output shell completions to + #[clap(long, parse(from_os_str))] + output_file: PathBuf, +} + +#[async_trait] +impl CliCommand<()> for GenerateShellCompletions { + fn command_name(&self) -> &'static str { + "GenerateShellCompletions" + } + + async fn execute(self) -> CliTypedResult<()> { + let mut command = Tool::command(); + let mut file = std::fs::File::create(self.output_file.as_path()) + .map_err(|err| CliError::IO(self.output_file.display().to_string(), err))?; + generate(self.shell, &mut command, "movement".to_string(), &mut file); + Ok(()) + } +} + +/// Set global configuration settings +/// +/// Any configuration flags that are not provided will not be changed +#[derive(Parser, Debug)] +pub struct SetGlobalConfig { + /// A configuration for where to place and use the config + /// + /// `Workspace` will put the `.aptos/` folder in the current directory, where + /// `Global` will put the `.aptos/` folder in your home directory + #[clap(long)] + config_type: Option, + /// A configuration for how to expect the prompt response + /// + /// Option can be one of ["yes", "no", "prompt"], "yes" runs cli with "--assume-yes", where + /// "no" runs cli with "--assume-no", default: "prompt" + #[clap(long)] + default_prompt_response: Option, +} + +#[async_trait] +impl CliCommand for SetGlobalConfig { + fn command_name(&self) -> &'static str { + "SetGlobalConfig" + } + + async fn execute(self) -> CliTypedResult { + // Load the global config + let mut config = GlobalConfig::load()?; + + // Enable all features that are actually listed + if let Some(config_type) = self.config_type { + config.config_type = Some(config_type); + } + + if let Some(default_prompt_response) = self.default_prompt_response { + config.default_prompt_response = default_prompt_response; + } + + config.save()?; + config.display() + } +} + +/// Shows the current profiles available +/// +/// This will only show public information and will not show +/// private information +#[derive(Parser, Debug)] +pub struct ShowProfiles { + /// Which profile to show + /// + /// If provided, show only this profile + #[clap(long)] + profile: Option, +} + +#[async_trait] +impl CliCommand> for ShowProfiles { + fn command_name(&self) -> &'static str { + "ShowProfiles" + } + + async fn execute(self) -> CliTypedResult> { + // Load the profile config + let config = CliConfig::load(ConfigSearchMode::CurrentDir)?; + Ok(config + .profiles + .unwrap_or_default() + .into_iter() + .filter(|(key, _)| { + if let Some(ref profile) = self.profile { + profile == key + } else { + true + } + }) + .map(|(key, profile)| (key, ProfileSummary::from(&profile))) + .collect()) + } +} + +/// Shows the properties in the global config +#[derive(Parser, Debug)] +pub struct ShowGlobalConfig {} + +#[async_trait] +impl CliCommand for ShowGlobalConfig { + fn command_name(&self) -> &'static str { + "ShowGlobalConfig" + } + + async fn execute(self) -> CliTypedResult { + // Load the global config + let config = GlobalConfig::load()?; + + config.display() + } +} + +const GLOBAL_CONFIG_FILE: &str = "global_config.yaml"; + +/// A global configuration for global settings related to a user +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct GlobalConfig { + /// Whether to be using Global or Workspace mode + #[serde(skip_serializing_if = "Option::is_none")] + pub config_type: Option, + /// Prompt response type + #[serde(default)] + pub default_prompt_response: PromptResponseType, +} + +impl GlobalConfig { + /// Fill in defaults for display via the CLI + pub fn display(mut self) -> CliTypedResult { + if self.config_type.is_none() { + self.config_type = Some(ConfigType::default()); + } + + Ok(self) + } + + pub fn load() -> CliTypedResult { + let path = global_folder()?.join(GLOBAL_CONFIG_FILE); + if path.exists() { + from_yaml(&String::from_utf8(read_from_file(path.as_path())?)?) + } else { + // If we don't have a config, let's load the default + Ok(GlobalConfig::default()) + } + } + + /// Get the config location based on the type + pub fn get_config_location(&self, mode: ConfigSearchMode) -> CliTypedResult { + match self.config_type.unwrap_or_default() { + ConfigType::Global => global_folder(), + ConfigType::Workspace => find_workspace_config(current_dir()?, mode), + } + } + + /// Get the prompt options from global config + pub fn get_default_prompt_response(&self) -> Option { + match self.default_prompt_response { + PromptResponseType::Prompt => None, // prompt + PromptResponseType::Yes => Some(true), // assume_yes + PromptResponseType::No => Some(false), // assume_no + } + } + + fn save(&self) -> CliTypedResult<()> { + let global_folder = global_folder()?; + create_dir_if_not_exist(global_folder.as_path())?; + + write_to_user_only_file( + global_folder.join(GLOBAL_CONFIG_FILE).as_path(), + "Global Config", + &to_yaml(&self)?.into_bytes(), + ) + } +} + +fn global_folder() -> CliTypedResult { + if let Some(dir) = dirs::home_dir() { + Ok(dir.join(CONFIG_FOLDER)) + } else { + Err(CliError::UnexpectedError( + "Unable to retrieve home directory".to_string(), + )) + } +} + +fn find_workspace_config( + starting_path: PathBuf, + mode: ConfigSearchMode, +) -> CliTypedResult { + match mode { + ConfigSearchMode::CurrentDir => Ok(starting_path.join(CONFIG_FOLDER)), + ConfigSearchMode::CurrentDirAndParents => { + let mut current_path = starting_path.clone(); + loop { + current_path.push(CONFIG_FOLDER); + if current_path.is_dir() { + break Ok(current_path); + } else if !(current_path.pop() && current_path.pop()) { + // If we aren't able to find the folder, we'll create a new one right here + break Ok(starting_path.join(CONFIG_FOLDER)); + } + } + }, + } +} + +const GLOBAL: &str = "global"; +const WORKSPACE: &str = "workspace"; + +/// A configuration for where to place and use the config +/// +/// Workspace allows for multiple configs based on location, where +/// Global allows for one config for every part of the code +#[derive(Debug, Copy, Clone, Serialize, Deserialize, ArgEnum)] +pub enum ConfigType { + /// Per system user configuration put in `/.aptos` + Global, + /// Per directory configuration put in `/.aptos` + Workspace, +} + +impl Default for ConfigType { + fn default() -> Self { + // TODO: When we version up, we can change this to global + Self::Workspace + } +} + +impl std::fmt::Display for ConfigType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + ConfigType::Global => GLOBAL, + ConfigType::Workspace => WORKSPACE, + }) + } +} + +impl FromStr for ConfigType { + type Err = CliError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().trim() { + GLOBAL => Ok(Self::Global), + WORKSPACE => Ok(Self::Workspace), + _ => Err(CliError::CommandArgumentError( + "Invalid config type, must be one of [global, workspace]".to_string(), + )), + } + } +} + +const PROMPT: &str = "prompt"; +const ASSUME_YES: &str = "yes"; +const ASSUME_NO: &str = "no"; + +/// A configuration for how to expect the prompt response +/// +/// Option can be one of ["yes", "no", "prompt"], "yes" runs cli with "--assume-yes", where +/// "no" runs cli with "--assume-no", default: "prompt" +#[derive(Debug, Copy, Clone, Serialize, Deserialize, ArgEnum)] +pub enum PromptResponseType { + /// normal prompt + Prompt, + /// `--assume-yes` + Yes, + /// `--assume-no` + No, +} + +impl Default for PromptResponseType { + fn default() -> Self { + Self::Prompt + } +} + +impl std::fmt::Display for PromptResponseType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + PromptResponseType::Prompt => PROMPT, + PromptResponseType::Yes => ASSUME_YES, + PromptResponseType::No => ASSUME_NO, + }) + } +} + +impl FromStr for PromptResponseType { + type Err = CliError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().trim() { + PROMPT => Ok(Self::Prompt), + ASSUME_YES => Ok(Self::Yes), + ASSUME_NO => Ok(Self::No), + _ => Err(CliError::CommandArgumentError( + "Invalid prompt response type, must be one of [yes, no, prompt]".to_string(), + )), + } + } +} diff --git a/m1/movement/src/faucet/mod.rs b/m1/movement/src/faucet/mod.rs new file mode 100644 index 00000000..9564fb9c --- /dev/null +++ b/m1/movement/src/faucet/mod.rs @@ -0,0 +1,41 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::{ + types::{CliCommand, CliTypedResult} +}; +use async_trait::async_trait; +use crate::common::types::{FaucetOptions, ProfileOptions, RestOptions}; +use crate::common::utils::{fund_pub_key, wait_for_transactions}; +use clap::Parser; + +#[derive(Debug, Parser)] +pub struct FaucetTool { + #[clap(long)] + pub_key: String, + #[clap(flatten)] + pub(crate) faucet_options: FaucetOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, +} + +#[async_trait] +impl CliCommand for FaucetTool { + fn command_name(&self) -> &'static str { + "Faucet" + } + + async fn execute(self) -> CliTypedResult { + let profile = ProfileOptions::default(); + let hashes = fund_pub_key( + self.faucet_options.faucet_url(&profile)?, + self.pub_key.clone(), + ).await?; + let client = self.rest_options.client_raw(self.faucet_options.faucet_url(&profile)?)?; + wait_for_transactions(&client, hashes).await?; + return Ok(format!( + "Added 1000_000_000 Octas to account {}", self.pub_key + )); + } +} + diff --git a/m1/movement/src/ffi.rs b/m1/movement/src/ffi.rs new file mode 100644 index 00000000..af9ba937 --- /dev/null +++ b/m1/movement/src/ffi.rs @@ -0,0 +1,85 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +#![allow(unsafe_code)] + +use crate::Tool; +use clap::Parser; +use std::{ + ffi::{c_char, CStr, CString}, + thread, +}; +use tokio::runtime::Runtime; + +/// # Safety +/// +/// Run the movement CLI synchronously +/// Note: This function should only be called from other SDK (i.g Typescript) +/// +/// Return: the pointer to CLIResult c string +#[no_mangle] +pub unsafe extern "C" fn run_aptos_sync(s: *const c_char) -> *const c_char { + let c_str = unsafe { + assert!(!s.is_null()); + CStr::from_ptr(s) + }; + + // split string by spaces + let input_string = c_str.to_str().unwrap().split_whitespace(); + + // Create a new Tokio runtime and block on the execution of `cli.execute()` + let result_string = Runtime::new().unwrap().block_on(async move { + let cli = Tool::parse_from(input_string); + let result = cli.execute().await; + result + }); + + let res_cstr = CString::new(result_string.unwrap()).unwrap(); + + // Return a pointer to the C string + res_cstr.into_raw() +} + +/// # Safety +/// +/// Run the movement CLI async; Use this function if you are expecting the movement CLI command +/// to run in the background, or different thread +/// Note: This function should only be called from other SDK (i.g Typescript) +/// +/// Return: the pointer to c string: 'true' +#[no_mangle] +pub unsafe extern "C" fn run_aptos_async(s: *mut c_char) -> *mut c_char { + println!("Running movement..."); + let c_str = unsafe { + assert!(!s.is_null()); + CStr::from_ptr(s) + }; + + // Spawn a new thread to run the CLI + thread::spawn(move || { + let rt = Runtime::new().unwrap(); + let input_string = c_str.to_str().unwrap().split_whitespace(); + let cli = Tool::parse_from(input_string); + + // Run the CLI once + rt.block_on(async { cli.execute().await }) + .expect("Failed to run CLI"); + }); + + // Return pointer + CString::new("true").unwrap().into_raw() +} + +/// # Safety +/// +/// After running the movement CLI using FFI. Make sure to invoke this method to free up or +/// deallocate the memory +#[no_mangle] +pub unsafe extern "C" fn free_cstring(s: *mut c_char) { + unsafe { + if s.is_null() { + return; + } + CString::from_raw(s) + }; +} diff --git a/m1/movement/src/genesis/git.rs b/m1/movement/src/genesis/git.rs new file mode 100644 index 00000000..38368915 --- /dev/null +++ b/m1/movement/src/genesis/git.rs @@ -0,0 +1,264 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{CliError, CliTypedResult}, + utils::{create_dir_if_not_exist, write_to_file}, + }, + CliCommand, +}; +use aptos_config::config::Token; +use aptos_framework::ReleaseBundle; +use aptos_genesis::config::Layout; +use aptos_github_client::Client as GithubClient; +use async_trait::async_trait; +use clap::Parser; +use serde::{de::DeserializeOwned, Serialize}; +use std::{ + fmt::Debug, + io::Read, + path::{Path, PathBuf}, + str::FromStr, +}; + +pub const LAYOUT_FILE: &str = "layout.yaml"; +pub const OPERATOR_FILE: &str = "operator.yaml"; +pub const OWNER_FILE: &str = "owner.yaml"; +pub const FRAMEWORK_NAME: &str = "framework.mrb"; +pub const BALANCES_FILE: &str = "balances.yaml"; +pub const EMPLOYEE_VESTING_ACCOUNTS_FILE: &str = "employee_vesting_accounts.yaml"; + +/// Setup a shared Git repository for Genesis +/// +/// This will setup a folder or an online Github repository to be used +/// for Genesis. If it's the local, it will create the folders but not +/// set up a Git repository. +#[derive(Parser)] +pub struct SetupGit { + #[clap(flatten)] + pub(crate) git_options: GitOptions, + + /// Path to the `Layout` file which defines where all the files are + #[clap(long, parse(from_os_str))] + pub(crate) layout_file: PathBuf, +} + +#[async_trait] +impl CliCommand<()> for SetupGit { + fn command_name(&self) -> &'static str { + "SetupGit" + } + + async fn execute(self) -> CliTypedResult<()> { + let layout = Layout::from_disk(&self.layout_file)?; + + // Upload layout file to ensure we can read later + let client = self.git_options.get_client()?; + client.put(Path::new(LAYOUT_FILE), &layout)?; + + Ok(()) + } +} + +#[derive(Clone, Debug, Default)] +pub struct GithubRepo { + owner: String, + repository: String, +} + +impl FromStr for GithubRepo { + type Err = CliError; + + fn from_str(s: &str) -> Result { + let parts: Vec<_> = s.split('/').collect(); + if parts.len() != 2 { + Err(CliError::CommandArgumentError("Invalid repository must be of the form 'owner/repository` e.g. 'aptos-labs/aptos-core'".to_string())) + } else { + Ok(GithubRepo { + owner: parts.first().unwrap().to_string(), + repository: parts.get(1).unwrap().to_string(), + }) + } + } +} + +#[derive(Clone, Default, Parser)] +pub struct GitOptions { + /// Github repository e.g. 'aptos-labs/aptos-core' + /// + /// Mutually exclusive with `--local-repository-dir` + #[clap(long)] + pub(crate) github_repository: Option, + + /// Github repository branch e.g. main + #[clap(long, default_value = "main")] + pub(crate) github_branch: String, + + /// Path to Github API token. Token must have repo:* permissions + #[clap(long, parse(from_os_str))] + pub(crate) github_token_file: Option, + + /// Path to local git repository + /// + /// Mutually exclusive with `--github-repository` + #[clap(long, parse(from_os_str))] + pub(crate) local_repository_dir: Option, +} + +impl GitOptions { + pub fn get_client(self) -> CliTypedResult { + if self.github_repository.is_none() + && self.github_token_file.is_none() + && self.local_repository_dir.is_some() + { + Ok(Client::local(self.local_repository_dir.unwrap())) + } else if self.github_repository.is_some() + && self.github_token_file.is_some() + && self.local_repository_dir.is_none() + { + Client::github( + self.github_repository.unwrap(), + self.github_branch, + self.github_token_file.unwrap(), + ) + } else { + Err(CliError::CommandArgumentError("Must provide either only --local-repository-dir or both --github-repository and --github-token-path".to_string())) + } + } +} + +/// A client for abstracting away local vs Github storage +/// +/// Note: Writes do not commit locally +pub enum Client { + Local(PathBuf), + Github(GithubClient), +} + +impl Client { + pub fn local(path: PathBuf) -> Client { + Client::Local(path) + } + + pub fn github( + repository: GithubRepo, + branch: String, + token_path: PathBuf, + ) -> CliTypedResult { + let token = Token::FromDisk(token_path).read_token()?; + Ok(Client::Github(GithubClient::new( + repository.owner, + repository.repository, + branch, + token, + ))) + } + + /// Retrieves an object as a YAML encoded file from the appropriate storage + pub fn get(&self, path: &Path) -> CliTypedResult { + match self { + Client::Local(local_repository_path) => { + let path = local_repository_path.join(path); + + if !path.exists() { + return Err(CliError::UnableToReadFile( + path.display().to_string(), + "File not found".to_string(), + )); + } + + eprintln!("Reading {}", path.display()); + let mut file = std::fs::File::open(path.as_path()) + .map_err(|e| CliError::IO(path.display().to_string(), e))?; + + let mut contents = String::new(); + file.read_to_string(&mut contents) + .map_err(|e| CliError::IO(path.display().to_string(), e))?; + from_yaml(&contents) + }, + Client::Github(client) => { + from_base64_encoded_yaml(&client.get_file(&path.display().to_string())?) + }, + } + } + + /// Puts an object as a YAML encoded file to the appropriate storage + pub fn put(&self, name: &Path, input: &T) -> CliTypedResult<()> { + match self { + Client::Local(local_repository_path) => { + let path = local_repository_path.join(name); + + // Create repository path and any sub-directories + if let Some(dir) = path.parent() { + self.create_dir(dir)?; + } else { + return Err(CliError::UnexpectedError(format!( + "Path should always have a parent {}", + path.display() + ))); + } + write_to_file( + path.as_path(), + &path.display().to_string(), + to_yaml(input)?.as_bytes(), + )?; + }, + Client::Github(client) => { + client.put(&name.display().to_string(), &to_base64_encoded_yaml(input)?)?; + }, + } + + Ok(()) + } + + pub fn create_dir(&self, dir: &Path) -> CliTypedResult<()> { + match self { + Client::Local(local_repository_path) => { + let path = local_repository_path.join(dir); + create_dir_if_not_exist(path.as_path())?; + }, + Client::Github(_) => { + // There's no such thing as an empty directory in Git, so do nothing + }, + } + + Ok(()) + } + + /// Retrieve framework release bundle. + pub fn get_framework(&self) -> CliTypedResult { + match self { + Client::Local(local_repository_path) => { + let path = local_repository_path.join(FRAMEWORK_NAME); + if !path.exists() { + return Err(CliError::UnableToReadFile( + path.display().to_string(), + "File not found".to_string(), + )); + } + Ok(ReleaseBundle::read(path)?) + }, + Client::Github(client) => { + let bytes = base64::decode(client.get_file(FRAMEWORK_NAME)?)?; + Ok(bcs::from_bytes::(&bytes)?) + }, + } + } +} + +pub fn to_yaml(input: &T) -> CliTypedResult { + Ok(serde_yaml::to_string(input)?) +} + +pub fn from_yaml(input: &str) -> CliTypedResult { + Ok(serde_yaml::from_str(input)?) +} + +pub fn to_base64_encoded_yaml(input: &T) -> CliTypedResult { + Ok(base64::encode(to_yaml(input)?)) +} + +pub fn from_base64_encoded_yaml(input: &str) -> CliTypedResult { + from_yaml(&String::from_utf8(base64::decode(input)?)?) +} diff --git a/m1/movement/src/genesis/keys.rs b/m1/movement/src/genesis/keys.rs new file mode 100644 index 00000000..c283ad7f --- /dev/null +++ b/m1/movement/src/genesis/keys.rs @@ -0,0 +1,344 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{CliError, CliTypedResult, OptionalPoolAddressArgs, PromptOptions, RngArgs}, + utils::{ + check_if_file_exists, create_dir_if_not_exist, current_dir, dir_default_to_current, + read_from_file, write_to_user_only_file, + }, + }, + genesis::git::{from_yaml, to_yaml, GitOptions, LAYOUT_FILE, OPERATOR_FILE, OWNER_FILE}, + governance::CompileScriptFunction, + CliCommand, +}; +use aptos_genesis::{ + config::{HostAndPort, Layout, OperatorConfiguration, OwnerConfiguration}, + keys::{generate_key_objects, PublicIdentity}, +}; +use aptos_types::{ + account_address::AccountAddress, + transaction::{Script, Transaction, WriteSetPayload}, +}; +use async_trait::async_trait; +use clap::Parser; +use std::path::{Path, PathBuf}; + +const PRIVATE_KEYS_FILE: &str = "private-keys.yaml"; +pub const PUBLIC_KEYS_FILE: &str = "public-keys.yaml"; +const VALIDATOR_FILE: &str = "validator-identity.yaml"; +const VFN_FILE: &str = "validator-full-node-identity.yaml"; + +/// Generate keys for a new validator +/// +/// Generates account key, consensus key, and network key for a validator +/// These keys are used for running a validator or operator in a network +#[derive(Parser)] +pub struct GenerateKeys { + /// Output directory for the key files + #[clap(long, parse(from_os_str))] + pub(crate) output_dir: Option, + + #[clap(flatten)] + pub(crate) pool_address_args: OptionalPoolAddressArgs, + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, + #[clap(flatten)] + pub rng_args: RngArgs, +} + +#[async_trait] +impl CliCommand> for GenerateKeys { + fn command_name(&self) -> &'static str { + "GenerateKeys" + } + + async fn execute(self) -> CliTypedResult> { + let output_dir = dir_default_to_current(self.output_dir.clone())?; + + let private_keys_file = output_dir.join(PRIVATE_KEYS_FILE); + let public_keys_file = output_dir.join(PUBLIC_KEYS_FILE); + let validator_file = output_dir.join(VALIDATOR_FILE); + let vfn_file = output_dir.join(VFN_FILE); + check_if_file_exists(private_keys_file.as_path(), self.prompt_options)?; + check_if_file_exists(public_keys_file.as_path(), self.prompt_options)?; + check_if_file_exists(validator_file.as_path(), self.prompt_options)?; + check_if_file_exists(vfn_file.as_path(), self.prompt_options)?; + + let mut key_generator = self.rng_args.key_generator()?; + let (mut validator_blob, mut vfn_blob, private_identity, public_identity) = + generate_key_objects(&mut key_generator)?; + + // Allow for the owner to be different than the operator + if let Some(pool_address) = self.pool_address_args.pool_address { + validator_blob.account_address = Some(pool_address); + vfn_blob.account_address = Some(pool_address); + } + + // Create the directory if it doesn't exist + create_dir_if_not_exist(output_dir.as_path())?; + + write_to_user_only_file( + private_keys_file.as_path(), + PRIVATE_KEYS_FILE, + to_yaml(&private_identity)?.as_bytes(), + )?; + write_to_user_only_file( + public_keys_file.as_path(), + PUBLIC_KEYS_FILE, + to_yaml(&public_identity)?.as_bytes(), + )?; + write_to_user_only_file( + validator_file.as_path(), + VALIDATOR_FILE, + to_yaml(&validator_blob)?.as_bytes(), + )?; + write_to_user_only_file(vfn_file.as_path(), VFN_FILE, to_yaml(&vfn_blob)?.as_bytes())?; + Ok(vec![ + public_keys_file, + private_keys_file, + validator_file, + vfn_file, + ]) + } +} + +/// Set validator configuration for a single validator +/// +/// This will set the validator configuration for a single validator in the git repository. +/// It will have to be run for each validator expected at genesis. +#[derive(Parser)] +pub struct SetValidatorConfiguration { + /// Name of the validator + #[clap(long)] + pub(crate) username: String, + + /// Host and port pair for the validator e.g. 127.0.0.1:6180 or aptoslabs.com:6180 + #[clap(long)] + pub(crate) validator_host: HostAndPort, + + /// Host and port pair for the fullnode e.g. 127.0.0.1:6180 or aptoslabs.com:6180 + #[clap(long)] + pub(crate) full_node_host: Option, + + /// Stake amount for stake distribution + #[clap(long, default_value_t = 1)] + pub(crate) stake_amount: u64, + + /// Commission rate to pay operator + /// + /// This is a percentage between 0% and 100% + #[clap(long, default_value_t = 0)] + pub(crate) commission_percentage: u64, + + /// Whether the validator will be joining the genesis validator set + /// + /// If set this validator will already be in the validator set at genesis + #[clap(long)] + pub(crate) join_during_genesis: bool, + + /// Path to private identity generated from GenerateKeys + #[clap(long, parse(from_os_str))] + pub(crate) owner_public_identity_file: Option, + + /// Path to operator public identity, defaults to owner identity + #[clap(long, parse(from_os_str))] + pub(crate) operator_public_identity_file: Option, + + /// Path to voter public identity, defaults to owner identity + #[clap(long, parse(from_os_str))] + pub(crate) voter_public_identity_file: Option, + + #[clap(flatten)] + pub(crate) git_options: GitOptions, +} + +#[async_trait] +impl CliCommand<()> for SetValidatorConfiguration { + fn command_name(&self) -> &'static str { + "SetValidatorConfiguration" + } + + async fn execute(self) -> CliTypedResult<()> { + // Load owner + let owner_keys_file = if let Some(owner_keys_file) = self.owner_public_identity_file { + owner_keys_file + } else { + current_dir()?.join(PUBLIC_KEYS_FILE) + }; + let owner_identity = read_public_identity_file(owner_keys_file.as_path())?; + + // Load voter + let voter_identity = if let Some(voter_keys_file) = self.voter_public_identity_file { + read_public_identity_file(voter_keys_file.as_path())? + } else { + owner_identity.clone() + }; + + // Load operator + let (operator_identity, operator_keys_file) = + if let Some(operator_keys_file) = self.operator_public_identity_file { + ( + read_public_identity_file(operator_keys_file.as_path())?, + operator_keys_file, + ) + } else { + (owner_identity.clone(), owner_keys_file) + }; + + // Extract the possible optional fields + let consensus_public_key = + if let Some(consensus_public_key) = operator_identity.consensus_public_key { + consensus_public_key + } else { + return Err(CliError::CommandArgumentError(format!( + "Failed to read consensus public key from public identity file {}", + operator_keys_file.display() + ))); + }; + + let validator_network_public_key = if let Some(validator_network_public_key) = + operator_identity.validator_network_public_key + { + validator_network_public_key + } else { + return Err(CliError::CommandArgumentError(format!( + "Failed to read validator network public key from public identity file {}", + operator_keys_file.display() + ))); + }; + + let consensus_proof_of_possession = if let Some(consensus_proof_of_possession) = + operator_identity.consensus_proof_of_possession + { + consensus_proof_of_possession + } else { + return Err(CliError::CommandArgumentError(format!( + "Failed to read consensus proof of possession from public identity file {}", + operator_keys_file.display() + ))); + }; + + // Only add the public key if there is a full node + let full_node_network_public_key = if self.full_node_host.is_some() { + operator_identity.full_node_network_public_key + } else { + None + }; + + // Build operator configuration file + let operator_config = OperatorConfiguration { + operator_account_address: operator_identity.account_address.into(), + operator_account_public_key: operator_identity.account_public_key.clone(), + consensus_public_key, + consensus_proof_of_possession, + validator_network_public_key, + validator_host: self.validator_host, + full_node_network_public_key, + full_node_host: self.full_node_host, + }; + + let owner_config = OwnerConfiguration { + owner_account_address: owner_identity.account_address.into(), + owner_account_public_key: owner_identity.account_public_key, + voter_account_address: voter_identity.account_address.into(), + voter_account_public_key: voter_identity.account_public_key, + operator_account_address: operator_identity.account_address.into(), + operator_account_public_key: operator_identity.account_public_key, + stake_amount: self.stake_amount, + commission_percentage: self.commission_percentage, + join_during_genesis: self.join_during_genesis, + }; + + let directory = PathBuf::from(&self.username); + let operator_file = directory.join(OPERATOR_FILE); + let owner_file = directory.join(OWNER_FILE); + + let git_client = self.git_options.get_client()?; + git_client.put(operator_file.as_path(), &operator_config)?; + git_client.put(owner_file.as_path(), &owner_config) + } +} + +pub fn read_public_identity_file(public_identity_file: &Path) -> CliTypedResult { + let bytes = read_from_file(public_identity_file)?; + from_yaml(&String::from_utf8(bytes).map_err(CliError::from)?) +} + +/// Generate a Layout template file +/// +/// This will generate a layout template file for genesis with some default values. To start a +/// new chain, these defaults should be carefully thought through and chosen. +#[derive(Parser)] +pub struct GenerateLayoutTemplate { + /// Path of the output layout template + #[clap(long, parse(from_os_str), default_value = LAYOUT_FILE)] + pub(crate) output_file: PathBuf, + + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand<()> for GenerateLayoutTemplate { + fn command_name(&self) -> &'static str { + "GenerateLayoutTemplate" + } + + async fn execute(self) -> CliTypedResult<()> { + check_if_file_exists(self.output_file.as_path(), self.prompt_options)?; + let layout = Layout::default(); + + write_to_user_only_file( + self.output_file.as_path(), + &self.output_file.display().to_string(), + to_yaml(&layout)?.as_bytes(), + ) + } +} + +/// Generate a WriteSet genesis +/// +/// This will compile a Move script and generate a writeset from that script. +#[derive(Parser)] +pub struct GenerateAdminWriteSet { + /// Path of the output genesis file + #[clap(long, parse(from_os_str))] + pub(crate) output_file: PathBuf, + + /// Address of the account which execute this script. + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) execute_as: AccountAddress, + + #[clap(flatten)] + pub(crate) compile_proposal_args: CompileScriptFunction, + + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand<()> for GenerateAdminWriteSet { + fn command_name(&self) -> &'static str { + "GenerateAdminWriteSet" + } + + async fn execute(self) -> CliTypedResult<()> { + check_if_file_exists(self.output_file.as_path(), self.prompt_options)?; + let (bytecode, _script_hash) = self + .compile_proposal_args + .compile("GenerateAdminWriteSet", self.prompt_options)?; + + let txn = Transaction::GenesisTransaction(WriteSetPayload::Script { + execute_as: self.execute_as, + script: Script::new(bytecode, vec![], vec![]), + }); + + write_to_user_only_file( + self.output_file.as_path(), + &self.output_file.display().to_string(), + &bcs::to_bytes(&txn).map_err(CliError::from)?, + ) + } +} diff --git a/m1/movement/src/genesis/mod.rs b/m1/movement/src/genesis/mod.rs new file mode 100644 index 00000000..66f1a5b6 --- /dev/null +++ b/m1/movement/src/genesis/mod.rs @@ -0,0 +1,926 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod git; +pub mod keys; +#[cfg(test)] +mod tests; +pub mod tools; + +use crate::{ + common::{ + types::{CliError, CliTypedResult, PromptOptions}, + utils::{check_if_file_exists, dir_default_to_current, write_to_file}, + }, + genesis::git::{ + Client, GitOptions, BALANCES_FILE, EMPLOYEE_VESTING_ACCOUNTS_FILE, LAYOUT_FILE, + OPERATOR_FILE, OWNER_FILE, + }, + CliCommand, CliResult, +}; +use aptos_crypto::{ + bls12381, ed25519::ED25519_PUBLIC_KEY_LENGTH, x25519, ValidCryptoMaterial, + ValidCryptoMaterialStringExt, +}; +use aptos_genesis::{ + builder::GenesisConfiguration, + config::{ + AccountBalanceMap, EmployeePoolMap, HostAndPort, Layout, StringOperatorConfiguration, + StringOwnerConfiguration, ValidatorConfiguration, + }, + mainnet::MainnetGenesisInfo, + GenesisInfo, +}; +use aptos_logger::info; +use aptos_types::{ + account_address::{AccountAddress, AccountAddressWithChecks}, + on_chain_config::{OnChainConsensusConfig, OnChainExecutionConfig}, +}; +use aptos_vm_genesis::{default_gas_schedule, AccountBalance, EmployeePool}; +use async_trait::async_trait; +use clap::Parser; +use std::{ + cmp::Ordering, + collections::{BTreeMap, BTreeSet, HashSet}, + path::{Path, PathBuf}, + str::FromStr, +}; + +const WAYPOINT_FILE: &str = "waypoint.txt"; +const GENESIS_FILE: &str = "genesis.blob"; + +/// Tool for setting up an Movement chain Genesis transaction +/// +/// This tool sets up a space for multiple initial "validator" +/// accounts to build a genesis transaction for a new chain. +#[derive(Parser)] +pub enum GenesisTool { + GenerateAdminWriteSet(keys::GenerateAdminWriteSet), + GenerateGenesis(GenerateGenesis), + GetPoolAddresses(tools::PoolAddresses), + GenerateKeys(keys::GenerateKeys), + GenerateLayoutTemplate(keys::GenerateLayoutTemplate), + SetupGit(git::SetupGit), + SetValidatorConfiguration(keys::SetValidatorConfiguration), +} + +impl GenesisTool { + pub async fn execute(self) -> CliResult { + match self { + GenesisTool::GenerateAdminWriteSet(tool) => tool.execute_serialized_success().await, + GenesisTool::GenerateGenesis(tool) => tool.execute_serialized().await, + GenesisTool::GetPoolAddresses(tool) => tool.execute_serialized().await, + GenesisTool::GenerateKeys(tool) => tool.execute_serialized().await, + GenesisTool::GenerateLayoutTemplate(tool) => tool.execute_serialized_success().await, + GenesisTool::SetupGit(tool) => tool.execute_serialized_success().await, + GenesisTool::SetValidatorConfiguration(tool) => tool.execute_serialized_success().await, + } + } +} + +/// Generate genesis from a git repository +/// +/// This will create a genesis.blob and a waypoint.txt to be used for +/// running a network +#[derive(Parser)] +pub struct GenerateGenesis { + /// Output directory for Genesis file and waypoint + #[clap(long, parse(from_os_str))] + output_dir: Option, + /// Whether this is mainnet genesis. + /// + /// Default is false + #[clap(long)] + mainnet: bool, + + #[clap(flatten)] + prompt_options: PromptOptions, + #[clap(flatten)] + git_options: GitOptions, +} + +#[async_trait] +impl CliCommand> for GenerateGenesis { + fn command_name(&self) -> &'static str { + "GenerateGenesis" + } + + async fn execute(self) -> CliTypedResult> { + let output_dir = dir_default_to_current(self.output_dir.clone())?; + let genesis_file = output_dir.join(GENESIS_FILE); + let waypoint_file = output_dir.join(WAYPOINT_FILE); + check_if_file_exists(genesis_file.as_path(), self.prompt_options)?; + check_if_file_exists(waypoint_file.as_path(), self.prompt_options)?; + + // Generate genesis and waypoint files + let (genesis_bytes, waypoint) = if self.mainnet { + let mut mainnet_genesis = fetch_mainnet_genesis_info(self.git_options)?; + let genesis_bytes = bcs::to_bytes(mainnet_genesis.clone().get_genesis()) + .map_err(|e| CliError::BCS(GENESIS_FILE, e))?; + (genesis_bytes, mainnet_genesis.generate_waypoint()?) + } else { + let mut test_genesis = fetch_genesis_info(self.git_options)?; + let genesis_bytes = bcs::to_bytes(test_genesis.clone().get_genesis()) + .map_err(|e| CliError::BCS(GENESIS_FILE, e))?; + (genesis_bytes, test_genesis.generate_waypoint()?) + }; + write_to_file(genesis_file.as_path(), GENESIS_FILE, &genesis_bytes)?; + write_to_file( + waypoint_file.as_path(), + WAYPOINT_FILE, + waypoint.to_string().as_bytes(), + )?; + Ok(vec![genesis_file, waypoint_file]) + } +} + +/// Retrieves all information for mainnet genesis from the Git repository +pub fn fetch_mainnet_genesis_info(git_options: GitOptions) -> CliTypedResult { + let client = git_options.get_client()?; + let layout: Layout = client.get(Path::new(LAYOUT_FILE))?; + + if layout.root_key.is_some() { + return Err(CliError::UnexpectedError( + "Root key must not be set for mainnet.".to_string(), + )); + } + + let total_supply = layout.total_supply.ok_or_else(|| { + CliError::UnexpectedError("Layout file does not have `total_supply`".to_string()) + })?; + + let account_balance_map: AccountBalanceMap = client.get(Path::new(BALANCES_FILE))?; + let accounts: Vec = account_balance_map.try_into()?; + + // Check that the supply matches the total + let total_balance_supply: u64 = accounts.iter().map(|inner| inner.balance).sum(); + if total_supply != total_balance_supply { + return Err(CliError::UnexpectedError(format!( + "Total supply seen {} doesn't match expected total supply {}", + total_balance_supply, total_supply + ))); + } + + // Check that the user has a reasonable amount of APT, since below the minimum gas amount is + // not useful 1 APT minimally + const MIN_USEFUL_AMOUNT: u64 = 200000000; + let ten_percent_of_total = total_supply / 10; + for account in accounts.iter() { + if account.balance != 0 && account.balance < MIN_USEFUL_AMOUNT { + return Err(CliError::UnexpectedError(format!( + "Account {} has an initial supply below expected amount {} < {}", + account.account_address, account.balance, MIN_USEFUL_AMOUNT + ))); + } else if account.balance > ten_percent_of_total { + return Err(CliError::UnexpectedError(format!( + "Account {} has an more than 10% of the total balance {} > {}", + account.account_address, account.balance, ten_percent_of_total + ))); + } + } + + // Keep track of accounts for later lookup of balances + let initialized_accounts: BTreeMap = accounts + .iter() + .map(|inner| (inner.account_address, inner.balance)) + .collect(); + + let employee_vesting_accounts: EmployeePoolMap = + client.get(Path::new(EMPLOYEE_VESTING_ACCOUNTS_FILE))?; + + let employee_validators: Vec<_> = employee_vesting_accounts + .inner + .iter() + .map(|inner| inner.validator.clone()) + .collect(); + let employee_vesting_accounts: Vec = employee_vesting_accounts.try_into()?; + let validators = get_validator_configs(&client, &layout, true).map_err(parse_error)?; + let mut unique_accounts = BTreeSet::new(); + let mut unique_network_keys = HashSet::new(); + let mut unique_consensus_keys = HashSet::new(); + let mut unique_consensus_pop = HashSet::new(); + let mut unique_hosts = HashSet::new(); + + validate_employee_accounts( + &employee_vesting_accounts, + &initialized_accounts, + &mut unique_accounts, + )?; + + let mut seen_owners = BTreeMap::new(); + validate_validators( + &layout, + &employee_validators, + &initialized_accounts, + &mut unique_accounts, + &mut unique_network_keys, + &mut unique_consensus_keys, + &mut unique_consensus_pop, + &mut unique_hosts, + &mut seen_owners, + true, + )?; + validate_validators( + &layout, + &validators, + &initialized_accounts, + &mut unique_accounts, + &mut unique_network_keys, + &mut unique_consensus_keys, + &mut unique_consensus_pop, + &mut unique_hosts, + &mut seen_owners, + false, + )?; + + let framework = client.get_framework()?; + Ok(MainnetGenesisInfo::new( + layout.chain_id, + accounts, + employee_vesting_accounts, + validators, + framework, + &GenesisConfiguration { + allow_new_validators: true, + epoch_duration_secs: layout.epoch_duration_secs, + is_test: false, + min_stake: layout.min_stake, + min_voting_threshold: layout.min_voting_threshold, + max_stake: layout.max_stake, + recurring_lockup_duration_secs: layout.recurring_lockup_duration_secs, + required_proposer_stake: layout.required_proposer_stake, + rewards_apy_percentage: layout.rewards_apy_percentage, + voting_duration_secs: layout.voting_duration_secs, + voting_power_increase_limit: layout.voting_power_increase_limit, + employee_vesting_start: layout.employee_vesting_start, + employee_vesting_period_duration: layout.employee_vesting_period_duration, + consensus_config: OnChainConsensusConfig::default(), + execution_config: OnChainExecutionConfig::default(), + gas_schedule: default_gas_schedule(), + }, + )?) +} + +/// Retrieves all information for genesis from the Git repository +pub fn fetch_genesis_info(git_options: GitOptions) -> CliTypedResult { + let client = git_options.get_client()?; + let layout: Layout = client.get(Path::new(LAYOUT_FILE))?; + + if layout.root_key.is_none() { + return Err(CliError::UnexpectedError( + "Layout field root_key was not set. Please provide a hex encoded Ed25519PublicKey." + .to_string(), + )); + } + + let validators = get_validator_configs(&client, &layout, false).map_err(parse_error)?; + let framework = client.get_framework()?; + Ok(GenesisInfo::new( + layout.chain_id, + layout.root_key.unwrap(), + validators, + framework, + &GenesisConfiguration { + allow_new_validators: layout.allow_new_validators, + epoch_duration_secs: layout.epoch_duration_secs, + is_test: layout.is_test, + min_stake: layout.min_stake, + min_voting_threshold: layout.min_voting_threshold, + max_stake: layout.max_stake, + recurring_lockup_duration_secs: layout.recurring_lockup_duration_secs, + required_proposer_stake: layout.required_proposer_stake, + rewards_apy_percentage: layout.rewards_apy_percentage, + voting_duration_secs: layout.voting_duration_secs, + voting_power_increase_limit: layout.voting_power_increase_limit, + employee_vesting_start: layout.employee_vesting_start, + employee_vesting_period_duration: layout.employee_vesting_period_duration, + consensus_config: OnChainConsensusConfig::default(), + execution_config: OnChainExecutionConfig::default(), + gas_schedule: default_gas_schedule(), + }, + )?) +} + +fn parse_error(errors: Vec) -> CliError { + eprintln!( + "Failed to parse genesis inputs:\n{}", + serde_yaml::to_string(&errors).unwrap() + ); + CliError::UnexpectedError("Failed to parse genesis inputs".to_string()) +} + +fn get_validator_configs( + client: &Client, + layout: &Layout, + is_mainnet: bool, +) -> Result, Vec> { + let mut validators = Vec::new(); + let mut errors = Vec::new(); + for user in &layout.users { + match get_config(client, user, is_mainnet) { + Ok(validator) => { + validators.push(validator); + }, + Err(failure) => { + if let CliError::UnexpectedError(failure) = failure { + errors.push(format!("{}: {}", user, failure)); + } else { + errors.push(format!("{}: {:?}", user, failure)); + } + }, + } + } + + if errors.is_empty() { + Ok(validators) + } else { + Err(errors) + } +} + +/// Do proper parsing so more information is known about failures +fn get_config( + client: &Client, + user: &str, + is_mainnet: bool, +) -> CliTypedResult { + // Load a user's configuration files + let dir = PathBuf::from(user); + let owner_file = dir.join(OWNER_FILE); + let owner_file = owner_file.as_path(); + let owner_config = client.get::(owner_file)?; + + // Check and convert fields in owner file + let owner_account_address: AccountAddress = parse_required_option( + &owner_config.owner_account_address, + owner_file, + "owner_account_address", + AccountAddressWithChecks::from_str, + )? + .into(); + let owner_account_public_key = parse_required_option( + &owner_config.owner_account_public_key, + owner_file, + "owner_account_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + + let operator_account_address: AccountAddress = parse_required_option( + &owner_config.operator_account_address, + owner_file, + "operator_account_address", + AccountAddressWithChecks::from_str, + )? + .into(); + let operator_account_public_key = parse_required_option( + &owner_config.operator_account_public_key, + owner_file, + "operator_account_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + + let voter_account_address: AccountAddress = parse_required_option( + &owner_config.voter_account_address, + owner_file, + "voter_account_address", + AccountAddressWithChecks::from_str, + )? + .into(); + let voter_account_public_key = parse_required_option( + &owner_config.voter_account_public_key, + owner_file, + "voter_account_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + + let stake_amount = parse_required_option( + &owner_config.stake_amount, + owner_file, + "stake_amount", + u64::from_str, + )?; + + // Default to 0 for commission percentage if missing. + let commission_percentage = parse_optional_option( + &owner_config.commission_percentage, + owner_file, + "commission_percentage", + u64::from_str, + )? + .unwrap_or(0); + + // Default to true for whether the validator should be joining during genesis. + let join_during_genesis = parse_optional_option( + &owner_config.join_during_genesis, + owner_file, + "join_during_genesis", + bool::from_str, + )? + .unwrap_or(true); + + // We don't require the operator file if the validator is not joining during genesis. + if is_mainnet && !join_during_genesis { + return Ok(ValidatorConfiguration { + owner_account_address: owner_account_address.into(), + owner_account_public_key, + operator_account_address: operator_account_address.into(), + operator_account_public_key, + voter_account_address: voter_account_address.into(), + voter_account_public_key, + consensus_public_key: None, + proof_of_possession: None, + validator_network_public_key: None, + validator_host: None, + full_node_network_public_key: None, + full_node_host: None, + stake_amount, + commission_percentage, + join_during_genesis, + }); + }; + + let operator_file = dir.join(OPERATOR_FILE); + let operator_file = operator_file.as_path(); + let operator_config = client.get::(operator_file)?; + + // Check and convert fields in operator file + let operator_account_address_from_file: AccountAddress = parse_required_option( + &operator_config.operator_account_address, + operator_file, + "operator_account_address", + AccountAddressWithChecks::from_str, + )? + .into(); + let operator_account_public_key_from_file = parse_required_option( + &operator_config.operator_account_public_key, + operator_file, + "operator_account_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + let consensus_public_key = parse_required_option( + &operator_config.consensus_public_key, + operator_file, + "consensus_public_key", + |str| parse_key(bls12381::PublicKey::LENGTH, str), + )?; + let consensus_proof_of_possession = parse_required_option( + &operator_config.consensus_proof_of_possession, + operator_file, + "consensus_proof_of_possession", + |str| parse_key(bls12381::ProofOfPossession::LENGTH, str), + )?; + let validator_network_public_key = parse_required_option( + &operator_config.validator_network_public_key, + operator_file, + "validator_network_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + let full_node_network_public_key = parse_optional_option( + &operator_config.full_node_network_public_key, + operator_file, + "full_node_network_public_key", + |str| parse_key(ED25519_PUBLIC_KEY_LENGTH, str), + )?; + + // Verify owner & operator agree on operator + if operator_account_address != operator_account_address_from_file { + return Err( + CliError::CommandArgumentError( + format!("Operator account {} in owner file {} does not match operator account {} in operator file {}", + operator_account_address, + owner_file.display(), + operator_account_address_from_file, + operator_file.display() + ))); + } + if operator_account_public_key != operator_account_public_key_from_file { + return Err( + CliError::CommandArgumentError( + format!("Operator public key {} in owner file {} does not match operator public key {} in operator file {}", + operator_account_public_key, + owner_file.display(), + operator_account_public_key_from_file, + operator_file.display() + ))); + } + + // Build Validator configuration + Ok(ValidatorConfiguration { + owner_account_address: owner_account_address.into(), + owner_account_public_key, + operator_account_address: operator_account_address.into(), + operator_account_public_key, + voter_account_address: voter_account_address.into(), + voter_account_public_key, + consensus_public_key: Some(consensus_public_key), + proof_of_possession: Some(consensus_proof_of_possession), + validator_network_public_key: Some(validator_network_public_key), + validator_host: Some(operator_config.validator_host), + full_node_network_public_key, + full_node_host: operator_config.full_node_host, + stake_amount, + commission_percentage, + join_during_genesis, + }) +} + +// TODO: Move into the Crypto libraries +fn parse_key(num_bytes: usize, str: &str) -> anyhow::Result { + let num_chars: usize = num_bytes * 2; + let mut working = str.trim(); + + // Checks if it has a 0x at the beginning, which is okay + if working.starts_with("0x") { + working = &working[2..]; + } + + match working.len().cmp(&num_chars) { + Ordering::Less => { + anyhow::bail!( + "Key {} is too short {} must be {} hex characters", + str, + working.len(), + num_chars + ) + }, + Ordering::Greater => { + anyhow::bail!( + "Key {} is too long {} must be {} hex characters with or without a 0x in front", + str, + working.len(), + num_chars + ) + }, + Ordering::Equal => {}, + } + + if !working.chars().all(|c| char::is_ascii_hexdigit(&c)) { + anyhow::bail!("Key {} contains a non-hex character", str) + } + + Ok(T::from_encoded_string(str.trim())?) +} + +fn parse_required_option Result, T, E: std::fmt::Display>( + option: &Option, + file: &Path, + field_name: &'static str, + parse: F, +) -> Result { + if let Some(ref field) = option { + parse(field).map_err(|err| { + CliError::CommandArgumentError(format!( + "Field {} is invalid in file {}. Err: {}", + field_name, + file.display(), + err + )) + }) + } else { + Err(CliError::CommandArgumentError(format!( + "File {} is missing {}", + file.display(), + field_name + ))) + } +} + +fn parse_optional_option Result, T, E: std::fmt::Display>( + option: &Option, + file: &Path, + field_name: &'static str, + parse: F, +) -> Result, CliError> { + if let Some(ref field) = option { + parse(field) + .map_err(|err| { + CliError::CommandArgumentError(format!( + "Field {} is invalid in file {}. Err: {}", + field_name, + file.display(), + err + )) + }) + .map(Some) + } else { + Ok(None) + } +} + +fn validate_validators( + layout: &Layout, + validators: &[ValidatorConfiguration], + initialized_accounts: &BTreeMap, + unique_accounts: &mut BTreeSet, + unique_network_keys: &mut HashSet, + unique_consensus_keys: &mut HashSet, + unique_consensus_pops: &mut HashSet, + unique_hosts: &mut HashSet, + seen_owners: &mut BTreeMap, + is_pooled_validator: bool, +) -> CliTypedResult<()> { + // check accounts for validators + let mut errors = vec![]; + + for (i, validator) in validators.iter().enumerate() { + let name = if is_pooled_validator { + format!("Employee Pool #{}", i) + } else { + layout.users.get(i).unwrap().to_string() + }; + + if !initialized_accounts.contains_key(&validator.owner_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Owner {} in validator {} is is not in the balances.yaml file", + validator.owner_account_address, name + ))); + } + if !initialized_accounts.contains_key(&validator.operator_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Operator {} in validator {} is is not in the balances.yaml file", + validator.operator_account_address, name + ))); + } + if !initialized_accounts.contains_key(&validator.voter_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Voter {} in validator {} is is not in the balances.yaml file", + validator.voter_account_address, name + ))); + } + + let owner_balance = initialized_accounts + .get(&validator.owner_account_address.into()) + .unwrap(); + + if seen_owners.contains_key(&validator.owner_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Owner {} in validator {} has been seen before as an owner of validator {}", + validator.owner_account_address, + name, + seen_owners + .get(&validator.owner_account_address.into()) + .unwrap() + ))); + } + seen_owners.insert(validator.owner_account_address.into(), i); + + if unique_accounts.contains(&validator.owner_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Owner '{}' in validator {} has already been seen elsewhere", + validator.owner_account_address, name + ))); + } + unique_accounts.insert(validator.owner_account_address.into()); + + if unique_accounts.contains(&validator.operator_account_address.into()) { + errors.push(CliError::UnexpectedError(format!( + "Operator '{}' in validator {} has already been seen elsewhere", + validator.operator_account_address, name + ))); + } + unique_accounts.insert(validator.operator_account_address.into()); + + // Pooled validators have a combined balance + // TODO: Make this field optional but checked + if !is_pooled_validator && *owner_balance < validator.stake_amount { + errors.push(CliError::UnexpectedError(format!( + "Owner {} in validator {} has less in it's balance {} than the stake amount for the validator {}", + validator.owner_account_address, name, owner_balance, validator.stake_amount + ))); + } + if validator.stake_amount < layout.min_stake { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has stake {} under the min stake {}", + name, validator.stake_amount, layout.min_stake + ))); + } + if validator.stake_amount > layout.max_stake { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has stake {} over the max stake {}", + name, validator.stake_amount, layout.max_stake + ))); + } + + // Ensure that the validator is setup correctly if it's joining in genesis + if validator.join_during_genesis { + if validator.validator_network_public_key.is_none() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} does not have a validator network public key, though it's joining during genesis", + name + ))); + } + if !unique_network_keys.insert(validator.validator_network_public_key.unwrap()) { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated validator network key{}", + name, + validator.validator_network_public_key.unwrap() + ))); + } + + if validator.validator_host.is_none() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} does not have a validator host, though it's joining during genesis", + name + ))); + } + if !unique_hosts.insert(validator.validator_host.as_ref().unwrap().clone()) { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated validator host {:?}", + name, + validator.validator_host.as_ref().unwrap() + ))); + } + + if validator.consensus_public_key.is_none() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} does not have a consensus public key, though it's joining during genesis", + name + ))); + } + if !unique_consensus_keys + .insert(validator.consensus_public_key.as_ref().unwrap().clone()) + { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated a consensus public key {}", + name, + validator.consensus_public_key.as_ref().unwrap() + ))); + } + + if validator.proof_of_possession.is_none() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} does not have a consensus proof of possession, though it's joining during genesis", + name + ))); + } + if !unique_consensus_pops + .insert(validator.proof_of_possession.as_ref().unwrap().clone()) + { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated a consensus proof of possessions {}", + name, + validator.proof_of_possession.as_ref().unwrap() + ))); + } + + match ( + validator.full_node_host.as_ref(), + validator.full_node_network_public_key.as_ref(), + ) { + (None, None) => { + info!("Validator {} does not have a full node setup", name); + }, + (Some(_), None) | (None, Some(_)) => { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a full node host or public key but not both", + name + ))); + }, + (Some(full_node_host), Some(full_node_network_public_key)) => { + // Ensure that the validator and the full node aren't the same + let validator_host = validator.validator_host.as_ref().unwrap(); + let validator_network_public_key = + validator.validator_network_public_key.as_ref().unwrap(); + if validator_host == full_node_host { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a validator and a full node host that are the same {:?}", + name, + validator_host + ))); + } + if !unique_hosts.insert(validator.full_node_host.as_ref().unwrap().clone()) { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated full node host {:?}", + name, + validator.full_node_host.as_ref().unwrap() + ))); + } + + if validator_network_public_key == full_node_network_public_key { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a validator and a full node network public key that are the same {}", + name, + validator_network_public_key + ))); + } + if !unique_network_keys.insert(validator.full_node_network_public_key.unwrap()) + { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a repeated full node network key {}", + name, + validator.full_node_network_public_key.unwrap() + ))); + } + }, + } + } else { + if validator.validator_network_public_key.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a validator network public key, but it is *NOT* joining during genesis", + name + ))); + } + if validator.validator_host.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a validator host, but it is *NOT* joining during genesis", + name + ))); + } + if validator.consensus_public_key.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a consensus public key, but it is *NOT* joining during genesis", + name + ))); + } + if validator.proof_of_possession.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a consensus proof of possession, but it is *NOT* joining during genesis", + name + ))); + } + if validator.full_node_network_public_key.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a full node public key, but it is *NOT* joining during genesis", + name + ))); + } + if validator.full_node_host.is_some() { + errors.push(CliError::UnexpectedError(format!( + "Validator {} has a full node host, but it is *NOT* joining during genesis", + name + ))); + } + } + } + + if errors.is_empty() { + Ok(()) + } else { + eprintln!("{:#?}", errors); + + Err(CliError::UnexpectedError( + "Failed to validate validators".to_string(), + )) + } +} + +fn validate_employee_accounts( + employee_vesting_accounts: &[EmployeePool], + initialized_accounts: &BTreeMap, + unique_accounts: &mut BTreeSet, +) -> CliTypedResult<()> { + // Check accounts for employee accounts + for (i, pool) in employee_vesting_accounts.iter().enumerate() { + let mut total_stake_pool_amount = 0; + for (j, account) in pool.accounts.iter().enumerate() { + if !initialized_accounts.contains_key(account) { + return Err(CliError::UnexpectedError(format!( + "Account #{} '{}' in employee pool #{} is not in the balances.yaml file", + j, account, i + ))); + } + if unique_accounts.contains(account) { + return Err(CliError::UnexpectedError(format!( + "Account #{} '{}' in employee pool #{} has already been seen elsewhere", + j, account, i + ))); + } + unique_accounts.insert(*account); + + total_stake_pool_amount += initialized_accounts.get(account).unwrap(); + } + + if total_stake_pool_amount != pool.validator.validator.stake_amount { + return Err(CliError::UnexpectedError(format!( + "Stake amount {} in employee pool #{} does not match combined of accounts {}", + pool.validator.validator.stake_amount, i, total_stake_pool_amount + ))); + } + + if !initialized_accounts.contains_key(&pool.validator.validator.owner_address) { + return Err(CliError::UnexpectedError(format!( + "Owner address {} in employee pool #{} is is not in the balances.yaml file", + pool.validator.validator.owner_address, i + ))); + } + if !initialized_accounts.contains_key(&pool.validator.validator.operator_address) { + return Err(CliError::UnexpectedError(format!( + "Operator address {} in employee pool #{} is is not in the balances.yaml file", + pool.validator.validator.operator_address, i + ))); + } + if !initialized_accounts.contains_key(&pool.validator.validator.voter_address) { + return Err(CliError::UnexpectedError(format!( + "Voter address {} in employee pool #{} is is not in the balances.yaml file", + pool.validator.validator.voter_address, i + ))); + } + if !initialized_accounts.contains_key(&pool.beneficiary_resetter) { + return Err(CliError::UnexpectedError(format!( + "Beneficiary resetter {} in employee pool #{} is is not in the balances.yaml file", + pool.beneficiary_resetter, i + ))); + } + } + Ok(()) +} diff --git a/m1/movement/src/genesis/tests.rs b/m1/movement/src/genesis/tests.rs new file mode 100644 index 00000000..bb270e6e --- /dev/null +++ b/m1/movement/src/genesis/tests.rs @@ -0,0 +1,434 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{OptionalPoolAddressArgs, PromptOptions, RngArgs}, + utils::{read_from_file, write_to_file}, + }, + genesis::{ + git::{ + from_yaml, GitOptions, SetupGit, BALANCES_FILE, EMPLOYEE_VESTING_ACCOUNTS_FILE, + FRAMEWORK_NAME, + }, + keys::{GenerateKeys, GenerateLayoutTemplate, SetValidatorConfiguration, PUBLIC_KEYS_FILE}, + GenerateGenesis, + }, + CliCommand, +}; +use aptos_crypto::{ + ed25519::{Ed25519PrivateKey, Ed25519PublicKey}, + PrivateKey, +}; +use aptos_genesis::{ + config::{ + AccountBalanceMap, EmployeePoolConfig, EmployeePoolMap, HostAndPort, Layout, + ValidatorConfiguration, + }, + keys::PublicIdentity, +}; +use aptos_keygen::KeyGen; +use aptos_temppath::TempPath; +use aptos_types::{account_address::AccountAddress, chain_id::ChainId}; +use aptos_vm_genesis::{AccountBalance, TestValidator}; +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + str::FromStr, +}; + +const INITIAL_BALANCE: u64 = 100_000_000_000_000; + +/// Test the E2E genesis flow since it doesn't require a node to run +#[tokio::test] +async fn test_genesis_e2e_flow() { + let is_mainnet = false; + let dir = TempPath::new(); + dir.create_as_dir().unwrap(); + let git_options = create_users(2, 0, &dir, &mut vec![], is_mainnet).await; + + // Now generate genesis + let output_dir = TempPath::new(); + output_dir.create_as_dir().unwrap(); + let output_dir = PathBuf::from(output_dir.path()); + generate_genesis(git_options, output_dir.clone(), is_mainnet).await; + + // TODO: Verify that these are good + let waypoint_file = output_dir.join("waypoint.txt"); + assert!(waypoint_file.exists()); + let genesis_file = output_dir.join("genesis.blob"); + assert!(genesis_file.exists()); +} + +#[tokio::test] +async fn test_mainnet_genesis_e2e_flow() { + let is_mainnet = true; + let dir = TempPath::new(); + dir.create_as_dir().unwrap(); + let git_options = create_users(2, 4, &dir, &mut vec![10, 1], is_mainnet).await; + let account_1 = AccountAddress::from_hex_literal("0x101").unwrap(); + let account_2 = AccountAddress::from_hex_literal("0x102").unwrap(); + let employee_1 = AccountAddress::from_hex_literal("0x201").unwrap(); + let employee_2 = AccountAddress::from_hex_literal("0x202").unwrap(); + let employee_3 = AccountAddress::from_hex_literal("0x203").unwrap(); + let employee_4 = AccountAddress::from_hex_literal("0x204").unwrap(); + + let owner_identity1 = load_identity(dir.path(), "owner-0"); + let owner_identity2 = load_identity(dir.path(), "owner-1"); + let operator_identity1 = load_identity(dir.path(), "operator-0"); + let operator_identity2 = load_identity(dir.path(), "operator-1"); + let voter_identity1 = load_identity(dir.path(), "voter-0"); + let voter_identity2 = load_identity(dir.path(), "voter-1"); + let admin_identity1 = load_identity(dir.path(), "other-0"); + let admin_identity2 = load_identity(dir.path(), "other-1"); + let employee_operator_identity1 = load_identity(dir.path(), "other-2"); + let employee_operator_identity2 = load_identity(dir.path(), "other-3"); + + // Create initial balances and employee vesting account files. + let git_dir = git_options.local_repository_dir.as_ref().unwrap().as_path(); + + create_account_balances_file(PathBuf::from(git_dir), vec![ + owner_identity1.account_address, + owner_identity2.account_address, + operator_identity1.account_address, + operator_identity2.account_address, + voter_identity1.account_address, + voter_identity2.account_address, + account_1, + account_2, + employee_1, + employee_2, + employee_3, + employee_4, + admin_identity1.account_address, + admin_identity2.account_address, + employee_operator_identity1.account_address, + employee_operator_identity2.account_address, + ]) + .await; + create_employee_vesting_accounts_file( + PathBuf::from(git_dir), + &[admin_identity1, admin_identity2], + &[employee_operator_identity1, employee_operator_identity2], + &[vec![employee_1, employee_2], vec![employee_3, employee_4]], + &[true, false], + ) + .await; + + // Now generate genesis + let output_dir = TempPath::new(); + output_dir.create_as_dir().unwrap(); + let output_dir = PathBuf::from(output_dir.path()); + generate_genesis(git_options, output_dir.clone(), is_mainnet).await; + + // TODO: Verify that these are good + let waypoint_file = output_dir.join("waypoint.txt"); + assert!(waypoint_file.exists()); + let genesis_file = output_dir.join("genesis.blob"); + assert!(genesis_file.exists()); +} + +pub fn load_identity(base_dir: &Path, name: &str) -> PublicIdentity { + let path = base_dir.join(name).join(PUBLIC_KEYS_FILE); + from_yaml(&String::from_utf8(read_from_file(path.as_path()).unwrap()).unwrap()).unwrap() +} + +async fn create_users( + num_validators: u8, + num_other_users: u8, + dir: &TempPath, + commission_rates: &mut Vec, + is_mainnet: bool, +) -> GitOptions { + let mut users: HashMap = HashMap::new(); + for i in 0..num_validators { + let name = format!("owner-{}", i); + let output_dir = generate_keys(dir.path(), &name).await; + users.insert(name, output_dir); + + let name = format!("operator-{}", i); + let output_dir = generate_keys(dir.path(), &name).await; + users.insert(name, output_dir); + + let name = format!("voter-{}", i); + let output_dir = generate_keys(dir.path(), &name).await; + users.insert(name, output_dir); + } + for i in 0..num_other_users { + let name = format!("other-{}", i); + let output_dir = generate_keys(dir.path(), &name).await; + users.insert(name, output_dir); + } + + // Get the validator's names + let validator_names = users + .keys() + .map(|key| key.to_string()) + .filter(|name| name.starts_with("owner")) + .collect(); + let mut key_gen = KeyGen::from_seed([num_validators.saturating_add(1); 32]); + + // First step is setup the local git repo + let root_private_key = if !is_mainnet { + Some(key_gen.generate_ed25519_private_key()) + } else { + None + }; + let git_options = + setup_git_dir(root_private_key.as_ref(), validator_names, ChainId::test()).await; + + // Only write validators to folders + for i in 0..num_validators { + let owner_name = format!("owner-{}", i); + let owner_identity = users.get(&owner_name).unwrap().join(PUBLIC_KEYS_FILE); + let operator_identity = users + .get(&format!("operator-{}", i)) + .unwrap() + .join(PUBLIC_KEYS_FILE); + let voter_identity = users + .get(&format!("voter-{}", i)) + .unwrap() + .join(PUBLIC_KEYS_FILE); + let commission_rate = if commission_rates.is_empty() { + 0 + } else { + commission_rates.remove(0) + }; + set_validator_config( + owner_name, + git_options.clone(), + owner_identity.as_path(), + operator_identity.as_path(), + voter_identity.as_path(), + commission_rate, + i as u16, + ) + .await; + } + git_options +} + +/// Generate genesis and waypoint +async fn generate_genesis(git_options: GitOptions, output_dir: PathBuf, mainnet: bool) { + let command = GenerateGenesis { + prompt_options: PromptOptions::yes(), + git_options, + output_dir: Some(output_dir), + mainnet, + }; + let _ = command.execute().await.unwrap(); +} + +/// Setup a temporary repo location and add all required pieces +async fn setup_git_dir( + root_private_key: Option<&Ed25519PrivateKey>, + users: Vec, + chain_id: ChainId, +) -> GitOptions { + let git_options = git_options(); + let layout_file = TempPath::new(); + layout_file.create_as_file().unwrap(); + let layout_file = layout_file.path(); + + create_layout_file( + layout_file, + root_private_key.map(|inner| inner.public_key()), + users, + chain_id, + ) + .await; + let setup_command = SetupGit { + git_options: git_options.clone(), + layout_file: PathBuf::from(layout_file), + }; + + setup_command + .execute() + .await + .expect("Should not fail creating repo folder"); + + // Add framework + add_framework_to_dir(git_options.local_repository_dir.as_ref().unwrap().as_path()); + git_options +} + +/// Add framework to git directory +fn add_framework_to_dir(git_dir: &Path) { + aptos_cached_packages::head_release_bundle() + .write(git_dir.join(FRAMEWORK_NAME)) + .unwrap() +} + +/// Local git options for testing +fn git_options() -> GitOptions { + let temp_path = TempPath::new(); + let path = PathBuf::from(temp_path.path()); + GitOptions { + local_repository_dir: Some(path), + ..Default::default() + } +} + +/// Create a layout file for the repo +async fn create_layout_file( + file: &Path, + root_public_key: Option, + users: Vec, + chain_id: ChainId, +) { + GenerateLayoutTemplate { + output_file: PathBuf::from(file), + prompt_options: PromptOptions::yes(), + } + .execute() + .await + .expect("Expected to create layout template"); + + // Update layout file + let mut layout: Layout = + from_yaml(&String::from_utf8(read_from_file(file).unwrap()).unwrap()).unwrap(); + layout.root_key = root_public_key; + layout.users = users; + layout.chain_id = chain_id; + layout.is_test = true; + layout.total_supply = Some(INITIAL_BALANCE * 16); + + write_to_file( + file, + "Layout file", + serde_yaml::to_string(&layout).unwrap().as_bytes(), + ) + .unwrap(); +} + +/// Generate keys for a "user" +async fn generate_keys(dir: &Path, name: &str) -> PathBuf { + let output_dir = dir.join(name); + let command = GenerateKeys { + pool_address_args: OptionalPoolAddressArgs { pool_address: None }, + rng_args: RngArgs::from_string_seed(name), + prompt_options: PromptOptions::yes(), + output_dir: Some(output_dir.clone()), + }; + let _ = command.execute().await.unwrap(); + output_dir +} + +/// Set validator configuration for a user +async fn set_validator_config( + username: String, + git_options: GitOptions, + owner_identity_file: &Path, + operator_identity_file: &Path, + voter_identity_file: &Path, + commission_percentage: u64, + port: u16, +) { + let command = SetValidatorConfiguration { + username, + git_options, + owner_public_identity_file: Some(owner_identity_file.to_path_buf()), + validator_host: HostAndPort::from_str(&format!("localhost:{}", port)).unwrap(), + stake_amount: 100_000_000_000_000, + full_node_host: None, + operator_public_identity_file: Some(operator_identity_file.to_path_buf()), + voter_public_identity_file: Some(voter_identity_file.to_path_buf()), + commission_percentage, + join_during_genesis: true, + }; + + command.execute().await.unwrap() +} + +async fn create_account_balances_file(path: PathBuf, addresses: Vec) { + let account_balances: Vec = addresses + .iter() + .map(|account_address| AccountBalance { + account_address: *account_address, + balance: INITIAL_BALANCE, + }) + .collect(); + + let balance_map = AccountBalanceMap::try_from(account_balances).unwrap(); + + write_to_file( + &path.join(BALANCES_FILE), + BALANCES_FILE, + serde_yaml::to_string(&balance_map).unwrap().as_bytes(), + ) + .unwrap(); +} + +async fn create_employee_vesting_accounts_file( + path: PathBuf, + admin_identities: &[PublicIdentity], + operator_identities: &[PublicIdentity], + employee_groups: &[Vec], + join_during_genesis: &[bool], +) { + TestValidator::new_test_set(Some(employee_groups.len()), Some(INITIAL_BALANCE)); + let employee_vesting_accounts: Vec<_> = employee_groups + .iter() + .enumerate() + .map(|(index, accounts)| { + let admin_identity = admin_identities[index].clone(); + let operator_identity = operator_identities[index].clone(); + let validator_config = if *join_during_genesis.get(index).unwrap() { + ValidatorConfiguration { + owner_account_address: admin_identity.account_address.into(), + owner_account_public_key: admin_identity.account_public_key.clone(), + operator_account_address: operator_identity.account_address.into(), + operator_account_public_key: operator_identity.account_public_key.clone(), + voter_account_address: admin_identity.account_address.into(), + voter_account_public_key: admin_identity.account_public_key, + consensus_public_key: operator_identity.consensus_public_key, + proof_of_possession: operator_identity.consensus_proof_of_possession, + validator_network_public_key: operator_identity.validator_network_public_key, + validator_host: Some(HostAndPort::from_str("localhost:8080").unwrap()), + full_node_network_public_key: operator_identity.full_node_network_public_key, + full_node_host: Some(HostAndPort::from_str("localhost:8081").unwrap()), + stake_amount: 2 * INITIAL_BALANCE, + commission_percentage: 0, + join_during_genesis: true, + } + } else { + ValidatorConfiguration { + owner_account_address: admin_identity.account_address.into(), + owner_account_public_key: admin_identity.account_public_key.clone(), + operator_account_address: operator_identity.account_address.into(), + operator_account_public_key: operator_identity.account_public_key, + voter_account_address: admin_identity.account_address.into(), + voter_account_public_key: admin_identity.account_public_key, + consensus_public_key: None, + proof_of_possession: None, + validator_network_public_key: None, + validator_host: None, + full_node_network_public_key: None, + full_node_host: None, + stake_amount: 2 * INITIAL_BALANCE, + commission_percentage: 0, + join_during_genesis: false, + } + }; + + EmployeePoolConfig { + accounts: accounts.iter().map(|addr| addr.into()).collect(), + validator: validator_config, + vesting_schedule_numerators: vec![3, 3, 3, 3, 1], + vesting_schedule_denominator: 48, + beneficiary_resetter: AccountAddress::from_hex_literal("0x101").unwrap().into(), + } + }) + .collect(); + let employee_vesting_map = EmployeePoolMap { + inner: employee_vesting_accounts, + }; + write_to_file( + &path.join(EMPLOYEE_VESTING_ACCOUNTS_FILE), + EMPLOYEE_VESTING_ACCOUNTS_FILE, + serde_yaml::to_string(&employee_vesting_map) + .unwrap() + .as_bytes(), + ) + .unwrap(); +} diff --git a/m1/movement/src/genesis/tools.rs b/m1/movement/src/genesis/tools.rs new file mode 100644 index 00000000..5f3f8ed5 --- /dev/null +++ b/m1/movement/src/genesis/tools.rs @@ -0,0 +1,100 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::PromptOptions, + utils::{dir_default_to_current, write_to_file}, + }, + genesis::{ + get_validator_configs, + git::{GitOptions, EMPLOYEE_VESTING_ACCOUNTS_FILE, LAYOUT_FILE}, + parse_error, + }, + CliCommand, CliTypedResult, +}; +use aptos_genesis::config::{EmployeePoolMap, Layout}; +use aptos_sdk::move_types::account_address::AccountAddress; +use aptos_types::account_address::{create_vesting_pool_address, default_stake_pool_address}; +use async_trait::async_trait; +use clap::Parser; +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, +}; + +const POOL_ADDRESSES: &str = "pool-addresses.yaml"; +const EMPLOYEE_POOL_ADDRESSES: &str = "employee-pool-addresses.yaml"; + +/// Get pool addresses from a mainnet genesis setup +/// +/// Outputs all pool addresses to a file from the genesis files +#[derive(Parser)] +pub struct PoolAddresses { + /// Output directory for pool addresses + #[clap(long, parse(from_os_str))] + output_dir: Option, + + #[clap(flatten)] + prompt_options: PromptOptions, + #[clap(flatten)] + git_options: GitOptions, +} + +#[async_trait] +impl CliCommand> for PoolAddresses { + fn command_name(&self) -> &'static str { + "GetPoolAddresses" + } + + async fn execute(self) -> CliTypedResult> { + let output_dir = dir_default_to_current(self.output_dir.clone())?; + let client = self.git_options.get_client()?; + let layout: Layout = client.get(Path::new(LAYOUT_FILE))?; + let employee_vesting_accounts: EmployeePoolMap = + client.get(Path::new(EMPLOYEE_VESTING_ACCOUNTS_FILE))?; + let validators = get_validator_configs(&client, &layout, true).map_err(parse_error)?; + + let mut address_to_pool = BTreeMap::::new(); + + for validator in validators { + let stake_pool_address = default_stake_pool_address( + validator.owner_account_address.into(), + validator.operator_account_address.into(), + ); + address_to_pool.insert(validator.owner_account_address.into(), stake_pool_address); + } + + let mut employee_address_to_pool = BTreeMap::::new(); + + for employee_pool in employee_vesting_accounts.inner.iter() { + let stake_pool_address = create_vesting_pool_address( + employee_pool.validator.owner_account_address.into(), + employee_pool.validator.operator_account_address.into(), + 0, + &[], + ); + + employee_address_to_pool.insert( + employee_pool.validator.owner_account_address.into(), + stake_pool_address, + ); + } + + let pool_addresses_file = output_dir.join(POOL_ADDRESSES); + let employee_pool_addresses_file = output_dir.join(EMPLOYEE_POOL_ADDRESSES); + + write_to_file( + pool_addresses_file.as_path(), + POOL_ADDRESSES, + serde_yaml::to_string(&address_to_pool)?.as_bytes(), + )?; + write_to_file( + employee_pool_addresses_file.as_path(), + EMPLOYEE_POOL_ADDRESSES, + serde_yaml::to_string(&employee_address_to_pool)?.as_bytes(), + )?; + + Ok(vec![pool_addresses_file, employee_pool_addresses_file]) + } +} diff --git a/m1/movement/src/governance/mod.rs b/m1/movement/src/governance/mod.rs new file mode 100644 index 00000000..b399696a --- /dev/null +++ b/m1/movement/src/governance/mod.rs @@ -0,0 +1,1061 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +#[cfg(feature = "no-upload-proposal")] +use crate::common::utils::read_from_file; +use crate::{ + common::{ + types::{ + CliError, CliTypedResult, MovePackageDir, PoolAddressArgs, ProfileOptions, + PromptOptions, RestOptions, TransactionOptions, TransactionSummary, + }, + utils::prompt_yes_with_override, + }, + move_tool::{FrameworkPackageArgs, IncludedArtifacts}, + CliCommand, CliResult, +}; +use aptos_cached_packages::aptos_stdlib; +use aptos_crypto::HashValue; +use aptos_framework::{BuildOptions, BuiltPackage, ReleasePackage}; +use aptos_logger::warn; +use aptos_rest_client::{ + aptos_api_types::{Address, HexEncodedBytes, U128, U64}, + Client, Transaction, +}; +use aptos_sdk::move_types::language_storage::CORE_CODE_ADDRESS; +use aptos_types::{ + account_address::AccountAddress, + event::EventHandle, + governance::VotingRecords, + stake_pool::StakePool, + state_store::table::TableHandle, + transaction::{Script, TransactionPayload}, +}; +use async_trait::async_trait; +use clap::Parser; +use move_core_types::transaction_argument::TransactionArgument; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeMap, + fmt::Formatter, + fs, + path::{Path, PathBuf}, +}; +use tempfile::TempDir; + +/// Tool for on-chain governance +/// +/// This tool allows voters that have stake to vote the ability to +/// propose changes to the chain, as well as vote and execute these +/// proposals. +#[derive(Parser)] +pub enum GovernanceTool { + Propose(SubmitProposal), + Vote(SubmitVote), + ShowProposal(ViewProposal), + ListProposals(ListProposals), + VerifyProposal(VerifyProposal), + ExecuteProposal(ExecuteProposal), + GenerateUpgradeProposal(GenerateUpgradeProposal), + ApproveExecutionHash(ApproveExecutionHash), +} + +impl GovernanceTool { + pub async fn execute(self) -> CliResult { + use GovernanceTool::*; + match self { + Propose(tool) => tool.execute_serialized().await, + Vote(tool) => tool.execute_serialized().await, + ExecuteProposal(tool) => tool.execute_serialized().await, + GenerateUpgradeProposal(tool) => tool.execute_serialized_success().await, + ShowProposal(tool) => tool.execute_serialized().await, + ListProposals(tool) => tool.execute_serialized().await, + VerifyProposal(tool) => tool.execute_serialized().await, + ApproveExecutionHash(tool) => tool.execute_serialized().await, + } + } +} + +/// View a known on-chain governance proposal +/// +/// This command will return the proposal requested as well as compute +/// the hash of the metadata to determine whether it was verified or not. +#[derive(Parser)] +pub struct ViewProposal { + /// The identifier of the onchain governance proposal + #[clap(long)] + proposal_id: u64, + + #[clap(flatten)] + rest_options: RestOptions, + #[clap(flatten)] + profile: ProfileOptions, +} + +#[async_trait] +impl CliCommand for ViewProposal { + fn command_name(&self) -> &'static str { + "ViewProposal" + } + + async fn execute(mut self) -> CliTypedResult { + // Get proposal + let client = self.rest_options.client(&self.profile)?; + let forum = client + .get_account_resource_bcs::( + AccountAddress::ONE, + "0x1::voting::VotingForum<0x1::governance_proposal::GovernanceProposal>", + ) + .await? + .into_inner(); + let voting_table = forum.table_handle.0; + + let proposal: Proposal = get_proposal(&client, voting_table, self.proposal_id) + .await? + .into(); + + let metadata_hash = proposal.metadata.get("metadata_hash").unwrap(); + let metadata_url = proposal.metadata.get("metadata_location").unwrap(); + + // Compute the hash and verify accordingly + let mut metadata_verified = false; + let mut actual_metadata_hash = "Unable to fetch metadata url".to_string(); + let mut actual_metadata = None; + if let Ok(url) = Url::parse(metadata_url) { + if let Ok(bytes) = get_metadata_from_url(&url).await { + let hash = HashValue::sha3_256_of(&bytes); + metadata_verified = metadata_hash == &hash.to_hex(); + actual_metadata_hash = hash.to_hex(); + if let Ok(metadata) = String::from_utf8(bytes) { + actual_metadata = Some(metadata); + } + } + } + + Ok(VerifiedProposal { + metadata_verified, + actual_metadata_hash, + actual_metadata, + proposal, + }) + } +} + +/// List the last 100 visible onchain proposals +/// +/// Note, if the full node you are talking to is pruning data, it may not have some of the +/// proposals show here +#[derive(Parser)] +pub struct ListProposals { + #[clap(flatten)] + rest_options: RestOptions, + #[clap(flatten)] + profile: ProfileOptions, +} + +#[async_trait] +impl CliCommand> for ListProposals { + fn command_name(&self) -> &'static str { + "ListProposals" + } + + async fn execute(mut self) -> CliTypedResult> { + // List out known proposals based on events + let client = self.rest_options.client(&self.profile)?; + + let events = client + .get_account_events_bcs( + AccountAddress::ONE, + "0x1::aptos_governance::GovernanceEvents", + "create_proposal_events", + None, + Some(100), + ) + .await? + .into_inner(); + let mut proposals = vec![]; + + for event in &events { + match bcs::from_bytes::(event.event.event_data()) { + Ok(valid_event) => proposals.push(valid_event.into()), + Err(err) => { + eprintln!( + "Event: {:?} cannot be parsed as a proposal: {:?}", + event, err + ) + }, + } + } + + // TODO: Show more information about proposal? + Ok(proposals) + } +} + +/// Verify a proposal given the source code of the script +/// +/// The script's bytecode or source can be provided and it will +/// verify whether the hash matches the onchain hash +#[derive(Parser)] +pub struct VerifyProposal { + /// The id of the onchain proposal + #[clap(long)] + pub(crate) proposal_id: u64, + + #[clap(flatten)] + pub(crate) compile_proposal_args: CompileScriptFunction, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile: ProfileOptions, + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand for VerifyProposal { + fn command_name(&self) -> &'static str { + "VerifyProposal" + } + + async fn execute(mut self) -> CliTypedResult { + // Compile local first to get the hash + let (_, hash) = self + .compile_proposal_args + .compile("SubmitProposal", self.prompt_options)?; + + // Retrieve the onchain proposal + let client = self.rest_options.client(&self.profile)?; + let forum = client + .get_account_resource_bcs::( + AccountAddress::ONE, + "0x1::voting::VotingForum<0x1::governance_proposal::GovernanceProposal>", + ) + .await? + .into_inner(); + let voting_table = forum.table_handle.0; + + let proposal: Proposal = get_proposal(&client, voting_table, self.proposal_id) + .await? + .into(); + + // Compare the hashes + let computed_hash = hash.to_hex(); + let onchain_hash = proposal.execution_hash; + + Ok(VerifyProposalResponse { + verified: computed_hash == onchain_hash, + computed_hash, + onchain_hash, + }) + } +} + +async fn get_proposal( + client: &aptos_rest_client::Client, + voting_table: AccountAddress, + proposal_id: u64, +) -> CliTypedResult { + let json = client + .get_table_item( + voting_table, + "u64", + "0x1::voting::Proposal<0x1::governance_proposal::GovernanceProposal>", + format!("{}", proposal_id), + ) + .await? + .into_inner(); + serde_json::from_value(json) + .map_err(|err| CliError::CommandArgumentError(format!("Failed to parse proposal {}", err))) +} + +/// Submit a governance proposal +#[derive(Parser)] +pub struct SubmitProposal { + /// Location of the JSON metadata of the proposal + /// + /// If this location does not keep the metadata in the exact format, it will be less likely + /// that voters will approve this proposal, as they won't be able to verify it. + #[clap(long)] + pub(crate) metadata_url: Url, + + #[cfg(feature = "no-upload-proposal")] + /// A JSON file to be uploaded later at the metadata URL + /// + /// If this does not match properly, voters may choose to vote no. For real proposals, + /// it is better to already have it uploaded at the URL. + #[clap(long)] + pub(crate) metadata_path: Option, + + #[clap(long)] + pub(crate) is_multi_step: bool, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) pool_address_args: PoolAddressArgs, + #[clap(flatten)] + pub(crate) compile_proposal_args: CompileScriptFunction, +} + +#[async_trait] +impl CliCommand for SubmitProposal { + fn command_name(&self) -> &'static str { + "SubmitProposal" + } + + async fn execute(mut self) -> CliTypedResult { + let (_bytecode, script_hash) = self + .compile_proposal_args + .compile("SubmitProposal", self.txn_options.prompt_options)?; + + // Validate the proposal metadata + let (metadata, metadata_hash) = self.get_metadata().await?; + + println!( + "{}\n\tMetadata Hash: {}\n\tScript Hash: {}", + metadata, metadata_hash, script_hash + ); + prompt_yes_with_override( + "Do you want to submit this proposal?", + self.txn_options.prompt_options, + )?; + + let txn: Transaction = if self.is_multi_step { + self.txn_options + .submit_transaction(aptos_stdlib::aptos_governance_create_proposal_v2( + self.pool_address_args.pool_address, + script_hash.to_vec(), + self.metadata_url.to_string().as_bytes().to_vec(), + metadata_hash.to_hex().as_bytes().to_vec(), + true, + )) + .await? + } else { + self.txn_options + .submit_transaction(aptos_stdlib::aptos_governance_create_proposal( + self.pool_address_args.pool_address, + script_hash.to_vec(), + self.metadata_url.to_string().as_bytes().to_vec(), + metadata_hash.to_hex().as_bytes().to_vec(), + )) + .await? + }; + let txn_summary = TransactionSummary::from(&txn); + if let Transaction::UserTransaction(inner) = txn { + // Find event with proposal id + let proposal_id = if let Some(event) = inner.events.into_iter().find(|event| { + event.typ.to_string().as_str() == "0x1::aptos_governance::CreateProposalEvent" + }) { + let data: CreateProposalEvent = + serde_json::from_value(event.data).map_err(|_| { + CliError::UnexpectedError( + "Failed to parse Proposal event to get ProposalId".to_string(), + ) + })?; + Some(data.proposal_id.0) + } else { + warn!("No proposal event found to find proposal id"); + None + }; + + return Ok(ProposalSubmissionSummary { + proposal_id, + transaction: txn_summary, + }); + } + Err(CliError::UnexpectedError( + "Unable to find parse proposal transaction output".to_string(), + )) + } +} + +impl SubmitProposal { + /// Retrieve metadata and validate it + async fn get_metadata(&self) -> CliTypedResult<(ProposalMetadata, HashValue)> { + #[cfg(feature = "no-upload-proposal")] + let bytes = if let Some(ref path) = self.metadata_path { + read_from_file(path)? + } else { + get_metadata_from_url(&self.metadata_url).await? + }; + #[cfg(not(feature = "no-upload-proposal"))] + let bytes = get_metadata_from_url(&self.metadata_url).await?; + + let metadata: ProposalMetadata = serde_json::from_slice(&bytes).map_err(|err| { + CliError::CommandArgumentError(format!( + "Metadata is not in a proper JSON format: {}", + err + )) + })?; + Url::parse(&metadata.source_code_url).map_err(|err| { + CliError::CommandArgumentError(format!( + "Source code URL {} is invalid {}", + metadata.source_code_url, err + )) + })?; + Url::parse(&metadata.discussion_url).map_err(|err| { + CliError::CommandArgumentError(format!( + "Discussion URL {} is invalid {}", + metadata.discussion_url, err + )) + })?; + let metadata_hash = HashValue::sha3_256_of(&bytes); + Ok((metadata, metadata_hash)) + } +} + +/// Retrieve the Metadata from the given URL +async fn get_metadata_from_url(metadata_url: &Url) -> CliTypedResult> { + let client = reqwest::ClientBuilder::default() + .tls_built_in_root_certs(true) + .build() + .map_err(|err| CliError::UnexpectedError(format!("Failed to build HTTP client {}", err)))?; + client + .get(metadata_url.clone()) + .send() + .await + .map_err(|err| { + CliError::CommandArgumentError(format!( + "Failed to fetch metadata url {}: {}", + metadata_url, err + )) + })? + .bytes() + .await + .map(|b| b.to_vec()) + .map_err(|err| { + CliError::CommandArgumentError(format!( + "Failed to fetch metadata url {}: {}", + metadata_url, err + )) + }) +} + +#[derive(Debug, Deserialize, Serialize)] +struct CreateProposalEvent { + proposal_id: U64, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ProposalSubmissionSummary { + proposal_id: Option, + #[serde(flatten)] + transaction: TransactionSummary, +} + +/// Submit a vote on a proposal +/// +/// Votes can only be given on proposals that are currently open for voting. You can vote +/// with `--yes` for a yes vote, and `--no` for a no vote. +#[derive(Parser)] +pub struct SubmitVote { + /// Id of the proposal to vote on + #[clap(long)] + pub(crate) proposal_id: u64, + + /// Vote to accept the proposal + #[clap(long, group = "vote")] + pub(crate) yes: bool, + + /// Vote to reject the proposal + #[clap(long, group = "vote")] + pub(crate) no: bool, + + /// Space separated list of pool addresses. + #[clap(long, multiple_values = true, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) pool_addresses: Vec, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for SubmitVote { + fn command_name(&self) -> &'static str { + "SubmitVote" + } + + async fn execute(mut self) -> CliTypedResult> { + let (vote_str, vote) = match (self.yes, self.no) { + (true, false) => ("Yes", true), + (false, true) => ("No", false), + (_, _) => { + return Err(CliError::CommandArgumentError( + "Must choose only either --yes or --no".to_string(), + )); + }, + }; + + let client: &Client = &self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let proposal_id = self.proposal_id; + let voting_records = client + .get_account_resource_bcs::( + CORE_CODE_ADDRESS, + "0x1::aptos_governance::VotingRecords", + ) + .await + .unwrap() + .into_inner() + .votes; + + let mut summaries: Vec = vec![]; + for pool_address in self.pool_addresses { + let voting_record = client + .get_table_item( + voting_records, + "0x1::aptos_governance::RecordKey", + "bool", + VotingRecord { + proposal_id: proposal_id.to_string(), + stake_pool: pool_address, + }, + ) + .await; + let voted = if let Ok(voting_record) = voting_record { + voting_record.into_inner().as_bool().unwrap() + } else { + false + }; + if voted { + println!("Stake pool {} already voted", pool_address); + continue; + } + + let stake_pool = client + .get_account_resource_bcs::(pool_address, "0x1::stake::StakePool") + .await? + .into_inner(); + let voting_power = stake_pool.get_governance_voting_power(); + + prompt_yes_with_override( + &format!( + "Vote {} with voting power = {} from stake pool {}?", + vote_str, voting_power, pool_address + ), + self.txn_options.prompt_options, + )?; + + summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::aptos_governance_vote( + pool_address, + proposal_id, + vote, + )) + .await + .map(TransactionSummary::from)?, + ); + } + Ok(summaries) + } +} + +/// Submit a transaction to approve a proposal's script hash to bypass the transaction size limit. +/// This is needed for upgrading large packages such as aptos-framework. +#[derive(Parser)] +pub struct ApproveExecutionHash { + /// Id of the proposal to vote on + #[clap(long)] + pub(crate) proposal_id: u64, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for ApproveExecutionHash { + fn command_name(&self) -> &'static str { + "ApproveExecutionHash" + } + + async fn execute(mut self) -> CliTypedResult { + Ok(self + .txn_options + .submit_transaction( + aptos_stdlib::aptos_governance_add_approved_script_hash_script(self.proposal_id), + ) + .await + .map(TransactionSummary::from)?) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct VotingRecord { + proposal_id: String, + stake_pool: AccountAddress, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ProposalMetadata { + title: String, + description: String, + source_code_url: String, + discussion_url: String, +} + +impl std::fmt::Display for ProposalMetadata { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Proposal:\n\tTitle:{}\n\tDescription:{}\n\tSource code URL:{}\n\tDiscussion URL:{}", + self.title, self.description, self.source_code_url, self.discussion_url + ) + } +} + +fn compile_in_temp_dir( + script_name: &str, + script_path: &Path, + framework_package_args: &FrameworkPackageArgs, + prompt_options: PromptOptions, + bytecode_version: Option, +) -> CliTypedResult<(Vec, HashValue)> { + // Make a temporary directory for compilation + let temp_dir = TempDir::new().map_err(|err| { + CliError::UnexpectedError(format!("Failed to create temporary directory {}", err)) + })?; + + // Initialize a move directory + let package_dir = temp_dir.path(); + framework_package_args.init_move_dir( + package_dir, + script_name, + BTreeMap::new(), + prompt_options, + )?; + + // Insert the new script + let sources_dir = package_dir.join("sources"); + let new_script_path = if let Some(file_name) = script_path.file_name() { + sources_dir.join(file_name) + } else { + // If for some reason we can't get the move file + sources_dir.join("script.move") + }; + fs::copy(script_path, new_script_path.as_path()).map_err(|err| { + CliError::IO( + format!( + "Failed to copy {} to {}", + script_path.display(), + new_script_path.display() + ), + err, + ) + })?; + + // Compile the script + compile_script( + framework_package_args.skip_fetch_latest_git_deps, + package_dir, + bytecode_version, + ) +} + +fn compile_script( + skip_fetch_latest_git_deps: bool, + package_dir: &Path, + bytecode_version: Option, +) -> CliTypedResult<(Vec, HashValue)> { + let build_options = BuildOptions { + with_srcs: false, + with_abis: false, + with_source_maps: false, + with_error_map: false, + skip_fetch_latest_git_deps, + bytecode_version, + ..BuildOptions::default() + }; + + let pack = BuiltPackage::build(package_dir.to_path_buf(), build_options) + .map_err(|e| CliError::MoveCompilationError(format!("{:#}", e)))?; + + let scripts_count = pack.script_count(); + + if scripts_count != 1 { + return Err(CliError::UnexpectedError(format!( + "Only one script can be prepared a time. Make sure one and only one script file \ + is included in the Move package. Found {} scripts.", + scripts_count + ))); + } + + let bytes = pack.extract_script_code().pop().unwrap(); + let hash = HashValue::sha3_256_of(bytes.as_slice()); + Ok((bytes, hash)) +} + +/// Execute a proposal that has passed voting requirements +#[derive(Parser)] +pub struct ExecuteProposal { + /// Proposal Id being executed + #[clap(long)] + pub(crate) proposal_id: u64, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) compile_proposal_args: CompileScriptFunction, +} + +#[async_trait] +impl CliCommand for ExecuteProposal { + fn command_name(&self) -> &'static str { + "ExecuteProposal" + } + + async fn execute(mut self) -> CliTypedResult { + let (bytecode, _script_hash) = self + .compile_proposal_args + .compile("ExecuteProposal", self.txn_options.prompt_options)?; + // TODO: Check hash so we don't do a failed roundtrip? + + let args = vec![TransactionArgument::U64(self.proposal_id)]; + let txn = TransactionPayload::Script(Script::new(bytecode, vec![], args)); + + self.txn_options + .submit_transaction(txn) + .await + .map(TransactionSummary::from) + } +} + +/// Compile a specified script. +#[derive(Parser)] +pub struct CompileScriptFunction { + /// Path to the Move script for the proposal + #[clap(long, group = "script", parse(from_os_str))] + pub script_path: Option, + + /// Path to the Move script for the proposal + #[clap(long, group = "script", parse(from_os_str))] + pub compiled_script_path: Option, + + #[clap(flatten)] + pub(crate) framework_package_args: FrameworkPackageArgs, + + #[clap(long)] + pub(crate) bytecode_version: Option, +} + +impl CompileScriptFunction { + pub(crate) fn compile( + &self, + script_name: &str, + prompt_options: PromptOptions, + ) -> CliTypedResult<(Vec, HashValue)> { + if let Some(compiled_script_path) = &self.compiled_script_path { + let bytes = std::fs::read(compiled_script_path).map_err(|e| { + CliError::IO(format!("Unable to read {:?}", self.compiled_script_path), e) + })?; + let hash = HashValue::sha3_256_of(bytes.as_slice()); + return Ok((bytes, hash)); + } + + // Check script file + let script_path = self + .script_path + .as_ref() + .ok_or_else(|| { + CliError::CommandArgumentError( + "Must choose either --compiled-script-path or --script-path".to_string(), + ) + })? + .as_path(); + if !script_path.exists() { + return Err(CliError::CommandArgumentError(format!( + "{} does not exist", + script_path.display() + ))); + } else if script_path.is_dir() { + return Err(CliError::CommandArgumentError(format!( + "{} is a directory", + script_path.display() + ))); + } + + // Compile script + compile_in_temp_dir( + script_name, + script_path, + &self.framework_package_args, + prompt_options, + self.bytecode_version, + ) + } +} + +/// Generates a package upgrade proposal script. +#[derive(Parser)] +pub struct GenerateUpgradeProposal { + /// Address of the account which the proposal addresses. + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Where to store the generated proposal + #[clap(long, parse(from_os_str), default_value = "proposal.move")] + pub(crate) output: PathBuf, + + /// What artifacts to include in the package. This can be one of `none`, `sparse`, and + /// `all`. `none` is the most compact form and does not allow to reconstruct a source + /// package from chain; `sparse` is the minimal set of artifacts needed to reconstruct + /// a source package; `all` includes all available artifacts. The choice of included + /// artifacts heavily influences the size and therefore gas cost of publishing: `none` + /// is the size of bytecode alone; `sparse` is roughly 2 times as much; and `all` 3-4 + /// as much. + #[clap(long, default_value_t = IncludedArtifacts::Sparse)] + pub(crate) included_artifacts: IncludedArtifacts, + + /// Generate the script for mainnet governance proposal by default or generate the upgrade script for testnet. + #[clap(long)] + pub(crate) testnet: bool, + + #[clap(long, default_value = "")] + pub(crate) next_execution_hash: String, + + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand<()> for GenerateUpgradeProposal { + fn command_name(&self) -> &'static str { + "GenerateUpgradeProposal" + } + + async fn execute(self) -> CliTypedResult<()> { + let GenerateUpgradeProposal { + move_options, + account, + included_artifacts, + output, + testnet, + next_execution_hash, + } = self; + let package_path = move_options.get_package_path()?; + let options = included_artifacts.build_options( + move_options.skip_fetch_latest_git_deps, + move_options.named_addresses(), + move_options.bytecode_version, + ); + let package = BuiltPackage::build(package_path, options)?; + let release = ReleasePackage::new(package)?; + + // If we're generating a single-step proposal on testnet + if testnet && next_execution_hash.is_empty() { + release.generate_script_proposal_testnet(account, output)?; + // If we're generating a single-step proposal on mainnet + } else if next_execution_hash.is_empty() { + release.generate_script_proposal(account, output)?; + // If we're generating a multi-step proposal + } else { + let next_execution_hash_bytes = hex::decode(next_execution_hash)?; + release.generate_script_proposal_multi_step( + account, + output, + next_execution_hash_bytes, + )?; + }; + Ok(()) + } +} + +/// Generate execution hash for a specified script. +#[derive(Parser)] +pub struct GenerateExecutionHash { + #[clap(long)] + pub script_path: Option, +} + +impl GenerateExecutionHash { + pub fn generate_hash(&self) -> CliTypedResult<(Vec, HashValue)> { + CompileScriptFunction { + script_path: self.script_path.clone(), + compiled_script_path: None, + framework_package_args: FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: Option::from({ + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + path.pop(); + path.pop(); + path.join("aptos-move") + .join("framework") + .join("aptos-framework") + .canonicalize() + .map_err(|err| { + CliError::IO( + format!("Failed to canonicalize aptos framework path: {:?}", path), + err, + ) + })? + }), + skip_fetch_latest_git_deps: false, + }, + bytecode_version: None, + } + .compile("execution_hash", PromptOptions::yes()) + } +} + +/// Response for `verify proposal` +#[derive(Serialize, Deserialize, Debug)] +pub struct VerifyProposalResponse { + pub verified: bool, + pub computed_hash: String, + pub onchain_hash: String, +} + +/// Voting forum onchain type +/// +/// TODO: Move to a shared location +#[derive(Serialize, Deserialize, Debug)] +pub struct VotingForum { + table_handle: TableHandle, + events: VotingEvents, + next_proposal_id: u64, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct VotingEvents { + create_proposal_events: EventHandle, + register_forum_events: EventHandle, + resolve_proposal_events: EventHandle, + vote_events: EventHandle, +} + +/// Summary of proposal from the listing events for `ListProposals` +#[derive(Serialize, Deserialize, Debug)] +struct ProposalSummary { + proposer: AccountAddress, + stake_pool: AccountAddress, + proposal_id: u64, + execution_hash: String, + proposal_metadata: BTreeMap, +} + +impl From for ProposalSummary { + fn from(event: CreateProposalFullEvent) -> Self { + let proposal_metadata = event + .proposal_metadata + .into_iter() + .map(|(key, value)| (key, String::from_utf8(value).unwrap())) + .collect(); + ProposalSummary { + proposer: event.proposer, + stake_pool: event.stake_pool, + proposal_id: event.proposal_id, + execution_hash: hex::encode(event.execution_hash), + proposal_metadata, + } + } +} + +#[derive(Deserialize)] +struct CreateProposalFullEvent { + proposer: AccountAddress, + stake_pool: AccountAddress, + proposal_id: u64, + execution_hash: Vec, + proposal_metadata: Vec<(String, Vec)>, +} + +/// A proposal and the verified information about it +#[derive(Serialize, Deserialize, Debug)] +pub struct VerifiedProposal { + metadata_verified: bool, + actual_metadata_hash: String, + actual_metadata: Option, + proposal: Proposal, +} + +/// A reformatted type that has human readable version of the proposal onchain +#[derive(Serialize, Deserialize, Debug)] +pub struct Proposal { + proposer: AccountAddress, + metadata: BTreeMap, + creation_time_secs: u64, + execution_hash: String, + min_vote_threshold: u128, + expiration_secs: u64, + early_resolution_vote_threshold: Option, + yes_votes: u128, + no_votes: u128, + is_resolved: bool, + resolution_time_secs: u64, +} + +impl From for Proposal { + fn from(proposal: JsonProposal) -> Self { + let metadata = proposal + .metadata + .data + .into_iter() + .map(|pair| { + let value = match pair.key.as_str() { + "metadata_hash" => String::from_utf8(pair.value.0) + .unwrap_or_else(|_| "Failed to parse utf8".to_string()), + "metadata_location" => String::from_utf8(pair.value.0) + .unwrap_or_else(|_| "Failed to parse utf8".to_string()), + "RESOLVABLE_TIME_METADATA_KEY" => bcs::from_bytes::(pair.value.inner()) + .map(|inner| inner.to_string()) + .unwrap_or_else(|_| "Failed to parse u64".to_string()), + _ => pair.value.to_string(), + }; + (pair.key, value) + }) + .collect(); + + Proposal { + proposer: proposal.proposer.into(), + metadata, + creation_time_secs: proposal.creation_time_secs.into(), + execution_hash: format!("{:x}", proposal.execution_hash), + min_vote_threshold: proposal.min_vote_threshold.into(), + expiration_secs: proposal.expiration_secs.into(), + early_resolution_vote_threshold: proposal + .early_resolution_vote_threshold + .vec + .first() + .map(|inner| inner.0), + yes_votes: proposal.yes_votes.into(), + no_votes: proposal.no_votes.into(), + is_resolved: proposal.is_resolved, + resolution_time_secs: proposal.resolution_time_secs.into(), + } + } +} + +/// An ugly JSON parsing version for from the JSON API +#[derive(Serialize, Deserialize, Debug)] +struct JsonProposal { + creation_time_secs: U64, + early_resolution_vote_threshold: JsonEarlyResolutionThreshold, + execution_hash: aptos_rest_client::aptos_api_types::HashValue, + expiration_secs: U64, + is_resolved: bool, + min_vote_threshold: U128, + no_votes: U128, + resolution_time_secs: U64, + yes_votes: U128, + proposer: Address, + metadata: JsonMetadata, +} + +#[derive(Serialize, Deserialize, Debug)] +struct JsonEarlyResolutionThreshold { + vec: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +struct JsonMetadata { + data: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +struct JsonMetadataPair { + key: String, + value: HexEncodedBytes, +} diff --git a/m1/movement/src/lib.rs b/m1/movement/src/lib.rs new file mode 100644 index 00000000..ee7e8a28 --- /dev/null +++ b/m1/movement/src/lib.rs @@ -0,0 +1,94 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +#![deny(unsafe_code)] + +pub mod account; +pub mod common; +pub mod config; +pub mod ffi; +pub mod genesis; +pub mod governance; +pub mod move_tool; +pub mod node; +pub mod op; +pub mod stake; +#[cfg(any(test, feature = "fuzzing"))] +pub mod test; +pub mod update; +pub mod faucet; + +use crate::common::{ + types::{CliCommand, CliResult, CliTypedResult}, + utils::cli_build_information, +}; +use async_trait::async_trait; +use clap::Parser; +use std::collections::BTreeMap; + +/// Command Line Interface (CLI) for developing and interacting with the Aptos blockchain +#[derive(Parser)] +#[clap(name = "movement", author, version, propagate_version = true)] +pub enum Tool { + #[clap(subcommand)] + Account(account::AccountTool), + #[clap(subcommand)] + Config(config::ConfigTool), + #[clap(subcommand)] + Genesis(genesis::GenesisTool), + #[clap(subcommand)] + Governance(governance::GovernanceTool), + Info(InfoTool), + Init(common::init::InitTool), + #[clap(subcommand)] + Key(op::key::KeyTool), + #[clap(subcommand)] + Move(move_tool::MoveTool), + #[clap(subcommand)] + Multisig(account::MultisigAccountTool), + #[clap(subcommand)] + Node(node::NodeTool), + #[clap(subcommand)] + Stake(stake::StakeTool), + Update(update::UpdateTool), + Faucet(faucet::FaucetTool), +} + +impl Tool { + pub async fn execute(self) -> CliResult { + use Tool::*; + match self { + Account(tool) => tool.execute().await, + Config(tool) => tool.execute().await, + Genesis(tool) => tool.execute().await, + Governance(tool) => tool.execute().await, + Info(tool) => tool.execute_serialized().await, + // TODO: Replace entirely with config init + Init(tool) => tool.execute_serialized_success().await, + Key(tool) => tool.execute().await, + Move(tool) => tool.execute().await, + Multisig(tool) => tool.execute().await, + Node(tool) => tool.execute().await, + Stake(tool) => tool.execute().await, + Update(tool) => tool.execute_serialized().await, + Faucet(tool) => tool.execute_serialized().await, + } + } +} + +/// Show build information about the CLI +/// +/// This is useful for debugging as well as determining what versions are compatible with the CLI +#[derive(Parser)] +pub struct InfoTool {} + +#[async_trait] +impl CliCommand> for InfoTool { + fn command_name(&self) -> &'static str { + "GetCLIInfo" + } + + async fn execute(self) -> CliTypedResult> { + Ok(cli_build_information()) + } +} diff --git a/m1/movement/src/main.rs b/m1/movement/src/main.rs new file mode 100644 index 00000000..faf840e2 --- /dev/null +++ b/m1/movement/src/main.rs @@ -0,0 +1,31 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +//! Aptos is a one stop tool for operations, debugging, and other operations with the blockchain + +#![forbid(unsafe_code)] + +#[cfg(unix)] +#[global_allocator] +static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; + +use movement::{move_tool, Tool}; +use clap::Parser; +use std::process::exit; + +#[tokio::main] +async fn main() { + // Register hooks + move_tool::register_package_hooks(); + // Run the corresponding tools + let result = Tool::parse().execute().await; + + // At this point, we'll want to print and determine whether to exit for an error code + match result { + Ok(inner) => println!("{}", inner), + Err(inner) => { + println!("{}", inner); + exit(1); + }, + } +} diff --git a/m1/movement/src/move_tool/aptos_debug_natives.rs b/m1/movement/src/move_tool/aptos_debug_natives.rs new file mode 100644 index 00000000..7abcb46f --- /dev/null +++ b/m1/movement/src/move_tool/aptos_debug_natives.rs @@ -0,0 +1,26 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use aptos_gas::{AbstractValueSizeGasParameters, NativeGasParameters, LATEST_GAS_FEATURE_VERSION}; +use aptos_types::on_chain_config::{Features, TimedFeatures}; +use aptos_vm::natives; +use move_vm_runtime::native_functions::NativeFunctionTable; +use std::sync::Arc; + +// move_stdlib has the testing feature enabled to include debug native functions +pub fn aptos_debug_natives( + gas_parameters: NativeGasParameters, + abs_val_size_gas_params: AbstractValueSizeGasParameters, +) -> NativeFunctionTable { + // As a side effect, also configure for unit testing + natives::configure_for_unit_test(); + // Return all natives -- build with the 'testing' feature, therefore containing + // debug related functions. + natives::aptos_natives( + gas_parameters, + abs_val_size_gas_params, + LATEST_GAS_FEATURE_VERSION, + TimedFeatures::enable_all(), + Arc::new(Features::default()), + ) +} diff --git a/m1/movement/src/move_tool/aptos_dep_example/README.md b/m1/movement/src/move_tool/aptos_dep_example/README.md new file mode 100644 index 00000000..abfa0f99 --- /dev/null +++ b/m1/movement/src/move_tool/aptos_dep_example/README.md @@ -0,0 +1,24 @@ +This is a small example of using the new `aptos` dependency. This shall be removed once we have +documentation/tests. + +`pack2` contains a package which is used by `pack1` as follows: + +``` +[dependencies] +Pack2 = { aptos = "http://localhost:8080", address = "default" } +``` + +To see it working: + +```shell +# Start a node with an account +aptos node run-local-testnet --with-faucet & +aptos account create --account default --use-faucet +# Compile and publish pack2 +cd pack2 +aptos move compile --named-addresses project=default +aptos move publish --named-addresses project=default +# Compile pack1 agains the published pack2 +cd ../pack1 +aptos move compile --named-addresses project=default +``` \ No newline at end of file diff --git a/m1/movement/src/move_tool/aptos_dep_example/pack1/Move.toml b/m1/movement/src/move_tool/aptos_dep_example/pack1/Move.toml new file mode 100644 index 00000000..dab5d3fe --- /dev/null +++ b/m1/movement/src/move_tool/aptos_dep_example/pack1/Move.toml @@ -0,0 +1,6 @@ +[package] +name = "Pack1" +version = "0.0.0" + +[dependencies] +Pack2 = { aptos = "http://localhost:8080", address = "default" } diff --git a/m1/movement/src/move_tool/aptos_dep_example/pack1/sources/hello.move b/m1/movement/src/move_tool/aptos_dep_example/pack1/sources/hello.move new file mode 100644 index 00000000..fe091952 --- /dev/null +++ b/m1/movement/src/move_tool/aptos_dep_example/pack1/sources/hello.move @@ -0,0 +1,7 @@ +module project::test { + use project::m; + + public entry fun test(_sender: &signer) { + assert!(m::add(1, 2) == 1 + 2, 1); + } +} diff --git a/m1/movement/src/move_tool/aptos_dep_example/pack2/Move.toml b/m1/movement/src/move_tool/aptos_dep_example/pack2/Move.toml new file mode 100644 index 00000000..c5e6c7d0 --- /dev/null +++ b/m1/movement/src/move_tool/aptos_dep_example/pack2/Move.toml @@ -0,0 +1,3 @@ +[package] +name = "Pack2" +version = "0.0.0" diff --git a/m1/movement/src/move_tool/aptos_dep_example/pack2/sources/m.move b/m1/movement/src/move_tool/aptos_dep_example/pack2/sources/m.move new file mode 100644 index 00000000..ade22fad --- /dev/null +++ b/m1/movement/src/move_tool/aptos_dep_example/pack2/sources/m.move @@ -0,0 +1,3 @@ +module project::m { + public fun add(x: u64, y: u64): u64 { x + y } +} diff --git a/m1/movement/src/move_tool/coverage.rs b/m1/movement/src/move_tool/coverage.rs new file mode 100644 index 00000000..78ffdb02 --- /dev/null +++ b/m1/movement/src/move_tool/coverage.rs @@ -0,0 +1,184 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliCommand, CliError, CliResult, CliTypedResult, MovePackageDir}; +use async_trait::async_trait; +use clap::{Parser, Subcommand}; +use move_compiler::compiled_unit::{CompiledUnit, NamedCompiledModule}; +use move_coverage::{ + coverage_map::CoverageMap, format_csv_summary, format_human_summary, + source_coverage::SourceCoverageBuilder, summary::summarize_inst_cov, +}; +use move_disassembler::disassembler::Disassembler; +use move_package::{compilation::compiled_package::CompiledPackage, BuildConfig}; + +/// Display a coverage summary for all modules in a package +/// +#[derive(Debug, Parser)] +pub struct SummaryCoverage { + /// Display function coverage summaries + /// + /// When provided, it will include coverage on a function level + #[clap(long)] + pub summarize_functions: bool, + /// Output CSV data of coverage + #[clap(long = "csv")] + pub output_csv: bool, + /// A filter string to determine which unit tests to compute coverage on + #[clap(long, short)] + pub filter: Option, + #[clap(flatten)] + pub move_options: MovePackageDir, +} + +impl SummaryCoverage { + pub fn coverage(self) -> CliTypedResult<()> { + let (coverage_map, package) = compile_coverage(self.move_options)?; + let modules: Vec<_> = package + .root_modules() + .filter_map(|unit| { + let mut retain = true; + if let Some(filter_str) = &self.filter { + if !&unit.unit.name().as_str().contains(filter_str.as_str()) { + retain = false; + } + } + match &unit.unit { + CompiledUnit::Module(NamedCompiledModule { module, .. }) if retain => { + Some(module.clone()) + }, + _ => None, + } + }) + .collect(); + let coverage_map = coverage_map.to_unified_exec_map(); + if self.output_csv { + format_csv_summary( + modules.as_slice(), + &coverage_map, + summarize_inst_cov, + &mut std::io::stdout(), + ) + } else { + format_human_summary( + modules.as_slice(), + &coverage_map, + summarize_inst_cov, + &mut std::io::stdout(), + self.summarize_functions, + ) + } + Ok(()) + } +} + +#[async_trait] +impl CliCommand<()> for SummaryCoverage { + fn command_name(&self) -> &'static str { + "SummaryCoverage" + } + + async fn execute(self) -> CliTypedResult<()> { + self.coverage() + } +} + +/// Display coverage information about the module against source code +#[derive(Debug, Parser)] +pub struct SourceCoverage { + #[clap(long = "module")] + pub module_name: String, + #[clap(flatten)] + pub move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand<()> for SourceCoverage { + fn command_name(&self) -> &'static str { + "SourceCoverage" + } + + async fn execute(self) -> CliTypedResult<()> { + let (coverage_map, package) = compile_coverage(self.move_options)?; + let unit = package.get_module_by_name_from_root(&self.module_name)?; + let source_path = &unit.source_path; + let (module, source_map) = match &unit.unit { + CompiledUnit::Module(NamedCompiledModule { + module, source_map, .. + }) => (module, source_map), + _ => panic!("Should all be modules"), + }; + let source_coverage = SourceCoverageBuilder::new(module, &coverage_map, source_map); + source_coverage + .compute_source_coverage(source_path) + .output_source_coverage(&mut std::io::stdout()) + .map_err(|err| CliError::UnexpectedError(format!("Failed to get coverage {}", err))) + } +} + +/// Display coverage information about the module against disassembled bytecode +#[derive(Debug, Parser)] +pub struct BytecodeCoverage { + #[clap(long = "module")] + pub module_name: String, + #[clap(flatten)] + pub move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand<()> for BytecodeCoverage { + fn command_name(&self) -> &'static str { + "BytecodeCoverage" + } + + async fn execute(self) -> CliTypedResult<()> { + let (coverage_map, package) = compile_coverage(self.move_options)?; + let unit = package.get_module_by_name_from_root(&self.module_name)?; + let mut disassembler = Disassembler::from_unit(&unit.unit); + disassembler.add_coverage_map(coverage_map.to_unified_exec_map()); + println!("{}", disassembler.disassemble()?); + Ok(()) + } +} + +fn compile_coverage( + move_options: MovePackageDir, +) -> CliTypedResult<(CoverageMap, CompiledPackage)> { + let config = BuildConfig { + additional_named_addresses: move_options.named_addresses(), + test_mode: false, + install_dir: move_options.output_dir.clone(), + ..Default::default() + }; + let path = move_options.get_package_path()?; + let coverage_map = + CoverageMap::from_binary_file(path.join(".coverage_map.mvcov")).map_err(|err| { + CliError::UnexpectedError(format!("Failed to retrieve coverage map {}", err)) + })?; + let package = config + .compile_package(path.as_path(), &mut Vec::new()) + .map_err(|err| CliError::MoveCompilationError(err.to_string()))?; + + Ok((coverage_map, package)) +} + +/// Computes coverage for a package +/// +/// Computes coverage on a previous unit test run for a package. Coverage input must +/// first be built with `aptos move test --coverage` +#[derive(Subcommand)] +pub enum CoveragePackage { + Summary(SummaryCoverage), + Source(SourceCoverage), + Bytecode(BytecodeCoverage), +} + +impl CoveragePackage { + pub async fn execute(self) -> CliResult { + match self { + Self::Summary(tool) => tool.execute_serialized_success().await, + Self::Source(tool) => tool.execute_serialized_success().await, + Self::Bytecode(tool) => tool.execute_serialized_success().await, + } + } +} diff --git a/m1/movement/src/move_tool/disassembler.rs b/m1/movement/src/move_tool/disassembler.rs new file mode 100644 index 00000000..12e7fa62 --- /dev/null +++ b/m1/movement/src/move_tool/disassembler.rs @@ -0,0 +1,148 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::{ + types::{CliCommand, CliError, CliTypedResult, PromptOptions}, + utils::{ + check_if_file_exists, create_dir_if_not_exist, dir_default_to_current, read_from_file, + write_to_user_only_file, + }, +}; +use anyhow::Context; +use async_trait::async_trait; +use clap::Parser; +use move_binary_format::{ + binary_views::BinaryIndexedView, file_format::CompiledScript, CompiledModule, +}; +use move_bytecode_source_map::{mapping::SourceMapping, utils::source_map_from_file}; +use move_command_line_common::files::{ + MOVE_COMPILED_EXTENSION, MOVE_EXTENSION, SOURCE_MAP_EXTENSION, +}; +use move_coverage::coverage_map::CoverageMap; +use move_disassembler::disassembler::{Disassembler, DisassemblerOptions}; +use move_ir_types::location::Spanned; +use std::{fs, path::PathBuf}; + +const DISASSEMBLED_CODE_FILE: &str = "disassembled-code.yaml"; + +/// Disassemble the Move bytecode pointed to +/// +/// For example, if you want to disassemble on chain module: +/// 1. Download the package - aptos move download +/// 2. Compile the package - aptos move compile +/// 3. Cd to package and disassemble - aptos move disassemble --bytecode-path ./test.mv +#[derive(Debug, Parser)] +pub struct Disassemble { + /// Treat input file as a script (default is to treat file as a module) + #[clap(long)] + pub is_script: bool, + + /// The path to the bytecode file to disassemble; + /// + /// let's call it file.mv. We assume that two other files reside under the same directory: + /// a source map file.mvsm (possibly) and the Move source code file.move. + #[clap(long)] + pub bytecode_path: PathBuf, + + /// (Optional) Path to a coverage file for the VM in order to print trace information in the + /// disassembled output. + #[clap(long)] + pub code_coverage_path: Option, + + /// Output directory for the key files + #[clap(long, parse(from_os_str))] + pub(crate) output_dir: Option, + + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand for Disassemble { + fn command_name(&self) -> &'static str { + "Disassemble" + } + + async fn execute(self) -> CliTypedResult { + let bytecode_path = self.bytecode_path.as_path(); + let extension = bytecode_path + .extension() + .context("Missing file extension for bytecode file")?; + if extension != MOVE_COMPILED_EXTENSION { + return Err(CliError::UnexpectedError(format!( + "Bad source file extension {:?}; expected {}", + extension, MOVE_COMPILED_EXTENSION + ))); + } + + let bytecode_bytes = read_from_file(bytecode_path)?; + let move_path = bytecode_path.with_extension(MOVE_EXTENSION); + let source_map_path = bytecode_path.with_extension(SOURCE_MAP_EXTENSION); + + let source = fs::read_to_string(move_path).ok(); + let source_map = source_map_from_file(&source_map_path); + + let disassembler_options = DisassemblerOptions { + print_code: true, + only_externally_visible: false, + print_basic_blocks: true, + print_locals: true, + }; + + let no_loc = Spanned::unsafe_no_loc(()).loc; + let module: CompiledModule; + let script: CompiledScript; + let bytecode = if self.is_script { + script = CompiledScript::deserialize(&bytecode_bytes) + .context("Script blob can't be deserialized")?; + BinaryIndexedView::Script(&script) + } else { + module = CompiledModule::deserialize(&bytecode_bytes) + .context("Module blob can't be deserialized")?; + BinaryIndexedView::Module(&module) + }; + + let mut source_mapping = if let Ok(s) = source_map { + SourceMapping::new(s, bytecode) + } else { + SourceMapping::new_from_view(bytecode, no_loc) + .context("Unable to build dummy source mapping")? + }; + + if let Some(source_code) = source { + source_mapping.with_source_code((bytecode_path.display().to_string(), source_code)); + } + + let mut disassembler = Disassembler::new(source_mapping, disassembler_options); + + if let Some(file_path) = &self.code_coverage_path { + disassembler.add_coverage_map( + CoverageMap::from_binary_file(file_path) + .map_err(|_err| { + CliError::UnexpectedError("Unable to read from file_path".to_string()) + })? + .to_unified_exec_map(), + ); + } + + let disassemble_string = disassembler + .disassemble() + .map_err(|_err| CliError::UnexpectedError("Unable to disassemble".to_string()))?; + + let output_dir = dir_default_to_current(self.output_dir.clone())?; + let disassemble_file = output_dir.join(DISASSEMBLED_CODE_FILE); + check_if_file_exists(disassemble_file.as_path(), self.prompt_options)?; + + // Create the directory if it doesn't exist + create_dir_if_not_exist(output_dir.as_path())?; + + // write to file + write_to_user_only_file( + disassemble_file.as_path(), + DISASSEMBLED_CODE_FILE, + disassemble_string.as_bytes(), + )?; + + Ok(disassemble_file.as_path().display().to_string()) + } +} diff --git a/m1/movement/src/move_tool/manifest.rs b/m1/movement/src/move_tool/manifest.rs new file mode 100644 index 00000000..dbdcbc3e --- /dev/null +++ b/m1/movement/src/move_tool/manifest.rs @@ -0,0 +1,90 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::load_manifest_account_arg; +use aptos_types::account_address::AccountAddress; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::collections::BTreeMap; + +/// A Rust representation of the Move package manifest +/// +/// Note: The original Move package manifest object used by the package system +/// can't be serialized because it uses a symbol mapping +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MovePackageManifest { + pub package: PackageInfo, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub addresses: BTreeMap, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub dependencies: BTreeMap, +} + +/// Representation of an option address so we can print it as "_" +#[derive(Debug, Clone)] +pub struct ManifestNamedAddress { + pub address: Option, +} + +impl From> for ManifestNamedAddress { + fn from(opt: Option) -> Self { + ManifestNamedAddress { address: opt } + } +} + +impl From for Option { + fn from(addr: ManifestNamedAddress) -> Self { + addr.address + } +} + +impl Serialize for ManifestNamedAddress { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + if let Some(address) = self.address { + serializer.serialize_str(&address.to_hex_literal()) + } else { + serializer.serialize_str("_") + } + } +} + +impl<'de> Deserialize<'de> for ManifestNamedAddress { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let str = ::deserialize(deserializer)?; + Ok(ManifestNamedAddress { + // TODO: Cleanup unwrap + address: load_manifest_account_arg(&str).unwrap(), + }) + } +} + +/// A Rust representation of a move dependency +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Dependency { + #[serde(skip_serializing_if = "Option::is_none")] + pub local: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub git: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub rev: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub subdir: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub aptos: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub address: Option, +} + +/// A Rust representation of the package info +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PackageInfo { + pub name: String, + pub version: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub author: Option, +} diff --git a/m1/movement/src/move_tool/mod.rs b/m1/movement/src/move_tool/mod.rs new file mode 100644 index 00000000..560d0f18 --- /dev/null +++ b/m1/movement/src/move_tool/mod.rs @@ -0,0 +1,1606 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +mod aptos_debug_natives; +pub mod coverage; +mod disassembler; +mod manifest; +pub mod package_hooks; +mod show; +pub mod stored_package; +mod transactional_tests_runner; + +use crate::{ + account::derive_resource_account::ResourceAccountSeed, + common::{ + types::{ + load_account_arg, ArgWithTypeVec, CliConfig, CliError, CliTypedResult, + ConfigSearchMode, EntryFunctionArguments, MoveManifestAccountWrapper, MovePackageDir, + ProfileOptions, PromptOptions, RestOptions, TransactionOptions, TransactionSummary, + }, + utils::{ + check_if_file_exists, create_dir_if_not_exist, dir_default_to_current, + profile_or_submit, prompt_yes_with_override, write_to_file, + }, + }, + governance::CompileScriptFunction, + move_tool::{ + coverage::SummaryCoverage, + disassembler::Disassemble, + manifest::{Dependency, ManifestNamedAddress, MovePackageManifest, PackageInfo}, + }, + CliCommand, CliResult, +}; +use aptos_crypto::HashValue; +use aptos_framework::{ + build_model, docgen::DocgenOptions, extended_checks, natives::code::UpgradePolicy, + prover::ProverOptions, BuildOptions, BuiltPackage, +}; +use aptos_gas::{AbstractValueSizeGasParameters, NativeGasParameters}; +use aptos_rest_client::aptos_api_types::{EntryFunctionId, MoveType, ViewRequest}; +use aptos_transactional_test_harness::run_aptos_test; +use aptos_types::{ + account_address::{create_resource_address, AccountAddress}, + transaction::{Script, TransactionArgument, TransactionPayload}, +}; +use async_trait::async_trait; +use clap::{ArgEnum, Parser, Subcommand}; +use codespan_reporting::{ + diagnostic::Severity, + term::termcolor::{ColorChoice, StandardStream}, +}; +use itertools::Itertools; +use move_cli::{self, base::test::UnitTestResult}; +use move_command_line_common::env::MOVE_HOME; +use move_core_types::{ + identifier::Identifier, + language_storage::{ModuleId, TypeTag}, + u256::U256, +}; +use move_package::{source_package::layout::SourcePackageLayout, BuildConfig}; +use move_unit_test::UnitTestingConfig; +pub use package_hooks::*; +use serde::{Deserialize, Serialize}; +use std::{ + collections::BTreeMap, + convert::TryFrom, + fmt::{Display, Formatter}, + path::{Path, PathBuf}, + str::FromStr, +}; +pub use stored_package::*; +use tokio::task; +use transactional_tests_runner::TransactionalTestOpts; + +/// Tool for Move related operations +/// +/// This tool lets you compile, test, and publish Move code, in addition +/// to run any other tools that help run, verify, or provide information +/// about this code. +#[derive(Subcommand)] +pub enum MoveTool { + Clean(CleanPackage), + Compile(CompilePackage), + CompileScript(CompileScript), + #[clap(subcommand)] + Coverage(coverage::CoveragePackage), + CreateResourceAccountAndPublishPackage(CreateResourceAccountAndPublishPackage), + Disassemble(Disassemble), + Document(DocumentPackage), + Download(DownloadPackage), + Init(InitPackage), + List(ListPackage), + Prove(ProvePackage), + Publish(PublishPackage), + Run(RunFunction), + RunScript(RunScript), + #[clap(subcommand, hide = true)] + Show(show::ShowTool), + Test(TestPackage), + TransactionalTest(TransactionalTestOpts), + VerifyPackage(VerifyPackage), + View(ViewFunction), +} + +impl MoveTool { + pub async fn execute(self) -> CliResult { + match self { + MoveTool::Clean(tool) => tool.execute_serialized().await, + MoveTool::Compile(tool) => tool.execute_serialized().await, + MoveTool::CompileScript(tool) => tool.execute_serialized().await, + MoveTool::Coverage(tool) => tool.execute().await, + MoveTool::CreateResourceAccountAndPublishPackage(tool) => { + tool.execute_serialized_success().await + }, + MoveTool::Disassemble(tool) => tool.execute_serialized().await, + MoveTool::Document(tool) => tool.execute_serialized().await, + MoveTool::Download(tool) => tool.execute_serialized().await, + MoveTool::Init(tool) => tool.execute_serialized_success().await, + MoveTool::List(tool) => tool.execute_serialized().await, + MoveTool::Prove(tool) => tool.execute_serialized().await, + MoveTool::Publish(tool) => tool.execute_serialized().await, + MoveTool::Run(tool) => tool.execute_serialized().await, + MoveTool::RunScript(tool) => tool.execute_serialized().await, + MoveTool::Show(tool) => tool.execute_serialized().await, + MoveTool::Test(tool) => tool.execute_serialized().await, + MoveTool::TransactionalTest(tool) => tool.execute_serialized_success().await, + MoveTool::VerifyPackage(tool) => tool.execute_serialized().await, + MoveTool::View(tool) => tool.execute_serialized().await, + } + } +} + +#[derive(Parser)] +pub struct FrameworkPackageArgs { + /// Git revision or branch for the Aptos framework + /// + /// This is mutually exclusive with `--framework-local-dir` + #[clap(long, group = "framework_package_args")] + pub(crate) framework_git_rev: Option, + + /// Local framework directory for the Aptos framework + /// + /// This is mutually exclusive with `--framework-git-rev` + #[clap(long, parse(from_os_str), group = "framework_package_args")] + pub(crate) framework_local_dir: Option, + + /// Skip pulling the latest git dependencies + /// + /// If you don't have a network connection, the compiler may fail due + /// to no ability to pull git dependencies. This will allow overriding + /// this for local development. + #[clap(long)] + pub(crate) skip_fetch_latest_git_deps: bool, +} + +impl FrameworkPackageArgs { + pub fn init_move_dir( + &self, + package_dir: &Path, + name: &str, + addresses: BTreeMap, + prompt_options: PromptOptions, + ) -> CliTypedResult<()> { + const APTOS_FRAMEWORK: &str = "AptosFramework"; + const APTOS_GIT_PATH: &str = "https://github.com/aptos-labs/aptos-core.git"; + const SUBDIR_PATH: &str = "aptos-move/framework/aptos-framework"; + const DEFAULT_BRANCH: &str = "main"; + + let move_toml = package_dir.join(SourcePackageLayout::Manifest.path()); + check_if_file_exists(move_toml.as_path(), prompt_options)?; + create_dir_if_not_exist( + package_dir + .join(SourcePackageLayout::Sources.path()) + .as_path(), + )?; + + // Add the framework dependency if it's provided + let mut dependencies = BTreeMap::new(); + if let Some(ref path) = self.framework_local_dir { + dependencies.insert(APTOS_FRAMEWORK.to_string(), Dependency { + local: Some(path.display().to_string()), + git: None, + rev: None, + subdir: None, + aptos: None, + address: None, + }); + } else { + let git_rev = self.framework_git_rev.as_deref().unwrap_or(DEFAULT_BRANCH); + dependencies.insert(APTOS_FRAMEWORK.to_string(), Dependency { + local: None, + git: Some(APTOS_GIT_PATH.to_string()), + rev: Some(git_rev.to_string()), + subdir: Some(SUBDIR_PATH.to_string()), + aptos: None, + address: None, + }); + } + + let manifest = MovePackageManifest { + package: PackageInfo { + name: name.to_string(), + version: "1.0.0".to_string(), + author: None, + }, + addresses, + dependencies, + }; + + write_to_file( + move_toml.as_path(), + SourcePackageLayout::Manifest.location_str(), + toml::to_string_pretty(&manifest) + .map_err(|err| CliError::UnexpectedError(err.to_string()))? + .as_bytes(), + ) + } +} + +/// Creates a new Move package at the given location +/// +/// This will create a directory for a Move package and a corresponding +/// `Move.toml` file. +#[derive(Parser)] +pub struct InitPackage { + /// Name of the new Move package + #[clap(long)] + pub(crate) name: String, + + /// Directory to create the new Move package + #[clap(long, parse(from_os_str))] + pub(crate) package_dir: Option, + + /// Named addresses for the move binary + /// + /// Allows for an address to be put into the Move.toml, or a placeholder `_` + /// + /// Example: alice=0x1234,bob=0x5678,greg=_ + /// + /// Note: This will fail if there are duplicates in the Move.toml file remove those first. + #[clap(long, parse(try_from_str = crate::common::utils::parse_map), default_value = "")] + pub(crate) named_addresses: BTreeMap, + + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, + + #[clap(flatten)] + pub(crate) framework_package_args: FrameworkPackageArgs, +} + +#[async_trait] +impl CliCommand<()> for InitPackage { + fn command_name(&self) -> &'static str { + "InitPackage" + } + + async fn execute(self) -> CliTypedResult<()> { + let package_dir = dir_default_to_current(self.package_dir.clone())?; + let addresses = self + .named_addresses + .into_iter() + .map(|(key, value)| (key, value.account_address.into())) + .collect(); + + self.framework_package_args.init_move_dir( + package_dir.as_path(), + &self.name, + addresses, + self.prompt_options, + ) + } +} + +/// Compiles a package and returns the associated ModuleIds +#[derive(Parser)] +pub struct CompilePackage { + /// Save the package metadata in the package's build directory + /// + /// If set, package metadata should be generated and stored in the package's build directory. + /// This metadata can be used to construct a transaction to publish a package. + #[clap(long)] + pub(crate) save_metadata: bool, + + #[clap(flatten)] + pub(crate) included_artifacts_args: IncludedArtifactsArgs, + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand> for CompilePackage { + fn command_name(&self) -> &'static str { + "CompilePackage" + } + + async fn execute(self) -> CliTypedResult> { + let build_options = BuildOptions { + install_dir: self.move_options.output_dir.clone(), + ..self + .included_artifacts_args + .included_artifacts + .build_options( + self.move_options.skip_fetch_latest_git_deps, + self.move_options.named_addresses(), + self.move_options.bytecode_version, + ) + }; + let pack = BuiltPackage::build(self.move_options.get_package_path()?, build_options) + .map_err(|e| CliError::MoveCompilationError(format!("{:#}", e)))?; + if self.save_metadata { + pack.extract_metadata_and_save()?; + } + let ids = pack + .modules() + .into_iter() + .map(|m| m.self_id().to_string()) + .collect::>(); + Ok(ids) + } +} + +/// Compiles a Move script into bytecode +/// +/// Compiles a script into bytecode and provides a hash of the bytecode. +/// This can then be run with `aptos move run-script` +#[derive(Parser)] +pub struct CompileScript { + #[clap(long, parse(from_os_str))] + pub output_file: Option, + #[clap(flatten)] + pub move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand for CompileScript { + fn command_name(&self) -> &'static str { + "CompileScript" + } + + async fn execute(self) -> CliTypedResult { + let (bytecode, script_hash) = self.compile_script().await?; + let script_location = self.output_file.unwrap_or_else(|| { + self.move_options + .get_package_path() + .unwrap() + .join("script.mv") + }); + write_to_file(script_location.as_path(), "Script", bytecode.as_slice())?; + Ok(CompileScriptOutput { + script_location, + script_hash, + }) + } +} + +impl CompileScript { + async fn compile_script(&self) -> CliTypedResult<(Vec, HashValue)> { + let build_options = BuildOptions { + install_dir: self.move_options.output_dir.clone(), + ..IncludedArtifacts::None.build_options( + self.move_options.skip_fetch_latest_git_deps, + self.move_options.named_addresses(), + self.move_options.bytecode_version, + ) + }; + let package_dir = self.move_options.get_package_path()?; + let pack = BuiltPackage::build(package_dir, build_options) + .map_err(|e| CliError::MoveCompilationError(format!("{:#}", e)))?; + + let scripts_count = pack.script_count(); + if scripts_count != 1 { + return Err(CliError::UnexpectedError(format!( + "Only one script can be prepared a time. Make sure one and only one script file \ + is included in the Move package. Found {} scripts.", + scripts_count + ))); + } + + let bytecode = pack.extract_script_code().pop().unwrap(); + let script_hash = HashValue::sha3_256_of(bytecode.as_slice()); + Ok((bytecode, script_hash)) + } +} + +#[derive(Debug, Serialize)] +pub struct CompileScriptOutput { + pub script_location: PathBuf, + pub script_hash: HashValue, +} + +/// Runs Move unit tests for a package +/// +/// This will run Move unit tests against a package with debug mode +/// turned on. Note, that move code warnings currently block tests from running. +#[derive(Parser)] +pub struct TestPackage { + /// A filter string to determine which unit tests to run + #[clap(long, short)] + pub filter: Option, + + /// A boolean value to skip warnings. + #[clap(long)] + pub ignore_compile_warnings: bool, + + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, + + /// The maximum number of instructions that can be executed by a test + /// + /// If set, the number of instructions executed by one test will be bounded + // TODO: Remove short, it's against the style guidelines, and update the name here + #[clap( + name = "instructions", + default_value = "100000", + short = 'i', + long = "instructions" + )] + pub instruction_execution_bound: u64, + + /// Collect coverage information for later use with the various `aptos move coverage` subcommands + #[clap(long = "coverage")] + pub compute_coverage: bool, + + /// Dump storage state on failure. + #[clap(long = "dump")] + pub dump_state: bool, +} + +#[async_trait] +impl CliCommand<&'static str> for TestPackage { + fn command_name(&self) -> &'static str { + "TestPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let mut config = BuildConfig { + additional_named_addresses: self.move_options.named_addresses(), + test_mode: true, + install_dir: self.move_options.output_dir.clone(), + skip_fetch_latest_git_deps: self.move_options.skip_fetch_latest_git_deps, + ..Default::default() + }; + + // Build the Move model for extended checks + let model = &build_model( + self.move_options.get_package_path()?.as_path(), + self.move_options.named_addresses(), + None, + self.move_options.bytecode_version, + )?; + let _ = extended_checks::run_extended_checks(model); + if model.diag_count(Severity::Warning) > 0 { + let mut error_writer = StandardStream::stderr(ColorChoice::Auto); + model.report_diag(&mut error_writer, Severity::Warning); + if model.has_errors() { + return Err(CliError::MoveCompilationError( + "extended checks failed".to_string(), + )); + } + } + let path = self.move_options.get_package_path()?; + let result = move_cli::base::test::run_move_unit_tests( + path.as_path(), + config.clone(), + UnitTestingConfig { + filter: self.filter.clone(), + report_stacktrace_on_abort: true, + report_storage_on_error: self.dump_state, + ignore_compile_warnings: self.ignore_compile_warnings, + ..UnitTestingConfig::default_with_bound(None) + }, + // TODO(Gas): we may want to switch to non-zero costs in the future + aptos_debug_natives::aptos_debug_natives( + NativeGasParameters::zeros(), + AbstractValueSizeGasParameters::zeros(), + ), + None, + self.compute_coverage, + &mut std::io::stdout(), + ) + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + + // Print coverage summary if --coverage is set + if self.compute_coverage { + config.test_mode = false; + let summary = SummaryCoverage { + summarize_functions: false, + output_csv: false, + filter: self.filter, + move_options: self.move_options, + }; + summary.coverage()?; + + println!("Please use `movement move coverage -h` for more detailed source or bytecode test coverage of this package"); + } + + match result { + UnitTestResult::Success => Ok("Success"), + UnitTestResult::Failure => Err(CliError::MoveTestError), + } + } +} + +#[async_trait] +impl CliCommand<()> for TransactionalTestOpts { + fn command_name(&self) -> &'static str { + "TransactionalTest" + } + + async fn execute(self) -> CliTypedResult<()> { + let root_path = self.root_path.display().to_string(); + + let requirements = vec![transactional_tests_runner::Requirements::new( + run_aptos_test, + "tests".to_string(), + root_path, + self.pattern.clone(), + )]; + + transactional_tests_runner::runner(&self, &requirements) + } +} + +/// Proves a Move package +/// +/// This is a tool for formal verification of a Move package using +/// the Move prover +#[derive(Parser)] +pub struct ProvePackage { + #[clap(flatten)] + move_options: MovePackageDir, + + #[clap(flatten)] + prover_options: ProverOptions, +} + +#[async_trait] +impl CliCommand<&'static str> for ProvePackage { + fn command_name(&self) -> &'static str { + "ProvePackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let ProvePackage { + move_options, + prover_options, + } = self; + + let result = task::spawn_blocking(move || { + prover_options.prove( + move_options.get_package_path()?.as_path(), + move_options.named_addresses(), + move_options.bytecode_version, + ) + }) + .await + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + match result { + Ok(_) => Ok("Success"), + Err(e) => Err(CliError::MoveProverError(format!("{:#}", e))), + } + } +} + +/// Documents a Move package +/// +/// This converts the content of the package into markdown for documentation. +#[derive(Parser)] +pub struct DocumentPackage { + #[clap(flatten)] + move_options: MovePackageDir, + + #[clap(flatten)] + docgen_options: DocgenOptions, +} + +#[async_trait] +impl CliCommand<&'static str> for DocumentPackage { + fn command_name(&self) -> &'static str { + "DocumentPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let DocumentPackage { + move_options, + docgen_options, + } = self; + let build_options = BuildOptions { + with_srcs: false, + with_abis: false, + with_source_maps: false, + with_error_map: false, + with_docs: true, + install_dir: None, + named_addresses: move_options.named_addresses(), + docgen_options: Some(docgen_options), + skip_fetch_latest_git_deps: move_options.skip_fetch_latest_git_deps, + bytecode_version: move_options.bytecode_version, + }; + BuiltPackage::build(move_options.get_package_path()?, build_options)?; + Ok("succeeded") + } +} + +#[derive(Parser)] +pub struct IncludedArtifactsArgs { + /// Artifacts to be generated when building the package + /// + /// Which artifacts to include in the package. This can be one of `none`, `sparse`, and + /// `all`. `none` is the most compact form and does not allow to reconstruct a source + /// package from chain; `sparse` is the minimal set of artifacts needed to reconstruct + /// a source package; `all` includes all available artifacts. The choice of included + /// artifacts heavily influences the size and therefore gas cost of publishing: `none` + /// is the size of bytecode alone; `sparse` is roughly 2 times as much; and `all` 3-4 + /// as much. + #[clap(long, default_value_t = IncludedArtifacts::Sparse)] + pub(crate) included_artifacts: IncludedArtifacts, +} + +/// Publishes the modules in a Move package to the Aptos blockchain +#[derive(Parser)] +pub struct PublishPackage { + /// Whether to override the check for maximal size of published data + #[clap(long)] + pub(crate) override_size_check: bool, + + #[clap(flatten)] + pub(crate) included_artifacts_args: IncludedArtifactsArgs, + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[derive(ArgEnum, Clone, Copy, Debug)] +pub enum IncludedArtifacts { + None, + Sparse, + All, +} + +impl Display for IncludedArtifacts { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + use IncludedArtifacts::*; + match self { + None => f.write_str("none"), + Sparse => f.write_str("sparse"), + All => f.write_str("all"), + } + } +} + +impl FromStr for IncludedArtifacts { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + use IncludedArtifacts::*; + match s { + "none" => Ok(None), + "sparse" => Ok(Sparse), + "all" => Ok(All), + _ => Err("unknown variant"), + } + } +} + +impl IncludedArtifacts { + pub(crate) fn build_options( + self, + skip_fetch_latest_git_deps: bool, + named_addresses: BTreeMap, + bytecode_version: Option, + ) -> BuildOptions { + use IncludedArtifacts::*; + match self { + None => BuildOptions { + with_srcs: false, + with_abis: false, + with_source_maps: false, + // Always enable error map bytecode injection + with_error_map: true, + named_addresses, + skip_fetch_latest_git_deps, + bytecode_version, + ..BuildOptions::default() + }, + Sparse => BuildOptions { + with_srcs: true, + with_abis: false, + with_source_maps: false, + with_error_map: true, + named_addresses, + skip_fetch_latest_git_deps, + bytecode_version, + ..BuildOptions::default() + }, + All => BuildOptions { + with_srcs: true, + with_abis: true, + with_source_maps: true, + with_error_map: true, + named_addresses, + skip_fetch_latest_git_deps, + bytecode_version, + ..BuildOptions::default() + }, + } + } +} + +pub const MAX_PUBLISH_PACKAGE_SIZE: usize = 60_000; + +#[async_trait] +impl CliCommand for PublishPackage { + fn command_name(&self) -> &'static str { + "PublishPackage" + } + + async fn execute(self) -> CliTypedResult { + let PublishPackage { + move_options, + txn_options, + override_size_check, + included_artifacts_args, + } = self; + let package_path = move_options.get_package_path()?; + let options = included_artifacts_args.included_artifacts.build_options( + move_options.skip_fetch_latest_git_deps, + move_options.named_addresses(), + move_options.bytecode_version, + ); + let package = BuiltPackage::build(package_path, options)?; + let compiled_units = package.extract_code(); + + // Send the compiled module and metadata using the code::publish_package_txn. + let metadata = package.extract_metadata()?; + let payload = aptos_cached_packages::aptos_stdlib::code_publish_package_txn( + bcs::to_bytes(&metadata).expect("PackageMetadata has BCS"), + compiled_units, + ); + let size = bcs::serialized_size(&payload)?; + println!("package size {} bytes", size); + if !override_size_check && size > MAX_PUBLISH_PACKAGE_SIZE { + return Err(CliError::UnexpectedError(format!( + "The package is larger than {} bytes ({} bytes)! To lower the size \ + you may want to include less artifacts via `--included-artifacts`. \ + You can also override this check with `--override-size-check", + MAX_PUBLISH_PACKAGE_SIZE, size + ))); + } + profile_or_submit(payload, &txn_options).await + } +} + +/// Publishes the modules in a Move package to the Aptos blockchain under a resource account +#[derive(Parser)] +pub struct CreateResourceAccountAndPublishPackage { + /// The named address for compiling and using in the contract + /// + /// This will take the derived account address for the resource account and put it in this location + #[clap(long)] + pub(crate) address_name: String, + + /// Whether to override the check for maximal size of published data + /// + /// This won't bypass on chain checks, so if you are not allowed to go over the size check, it + /// will still be blocked from publishing. + #[clap(long)] + pub(crate) override_size_check: bool, + + #[clap(flatten)] + pub(crate) seed_args: ResourceAccountSeed, + #[clap(flatten)] + pub(crate) included_artifacts_args: IncludedArtifactsArgs, + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for CreateResourceAccountAndPublishPackage { + fn command_name(&self) -> &'static str { + "ResourceAccountPublishPackage" + } + + async fn execute(self) -> CliTypedResult { + let CreateResourceAccountAndPublishPackage { + address_name, + mut move_options, + txn_options, + override_size_check, + included_artifacts_args, + seed_args, + } = self; + + let account = if let Some(Some(account)) = CliConfig::load_profile( + txn_options.profile_options.profile_name(), + ConfigSearchMode::CurrentDirAndParents, + )? + .map(|p| p.account) + { + account + } else { + return Err(CliError::CommandArgumentError( + "Please provide an account using --profile or run movement init".to_string(), + )); + }; + let seed = seed_args.seed()?; + + let resource_address = create_resource_address(account, &seed); + move_options.add_named_address(address_name, resource_address.to_string()); + + let package_path = move_options.get_package_path()?; + let options = included_artifacts_args.included_artifacts.build_options( + move_options.skip_fetch_latest_git_deps, + move_options.named_addresses(), + move_options.bytecode_version, + ); + let package = BuiltPackage::build(package_path, options)?; + let compiled_units = package.extract_code(); + + // Send the compiled module and metadata using the code::publish_package_txn. + let metadata = package.extract_metadata()?; + + let message = format!( + "Do you want to publish this package under the resource account's address {}?", + resource_address + ); + prompt_yes_with_override(&message, txn_options.prompt_options)?; + + let payload = aptos_cached_packages::aptos_stdlib::resource_account_create_resource_account_and_publish_package( + seed, + bcs::to_bytes(&metadata).expect("PackageMetadata has BCS"), + compiled_units, + ); + let size = bcs::serialized_size(&payload)?; + println!("package size {} bytes", size); + if !override_size_check && size > MAX_PUBLISH_PACKAGE_SIZE { + return Err(CliError::UnexpectedError(format!( + "The package is larger than {} bytes ({} bytes)! To lower the size \ + you may want to include less artifacts via `--included-artifacts`. \ + You can also override this check with `--override-size-check", + MAX_PUBLISH_PACKAGE_SIZE, size + ))); + } + txn_options + .submit_transaction(payload) + .await + .map(TransactionSummary::from) + } +} + +/// Downloads a package and stores it in a directory named after the package +/// +/// This lets you retrieve packages directly from the blockchain for inspection +/// and use as a local dependency in testing. +#[derive(Parser)] +pub struct DownloadPackage { + /// Address of the account containing the package + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Name of the package + #[clap(long)] + pub package: String, + + /// Directory to store downloaded package. Defaults to the current directory. + #[clap(long, parse(from_os_str))] + pub output_dir: Option, + + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand<&'static str> for DownloadPackage { + fn command_name(&self) -> &'static str { + "DownloadPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let url = self.rest_options.url(&self.profile_options)?; + let registry = CachedPackageRegistry::create(url, self.account).await?; + let output_dir = dir_default_to_current(self.output_dir)?; + + let package = registry + .get_package(self.package) + .await + .map_err(|s| CliError::CommandArgumentError(s.to_string()))?; + if package.upgrade_policy() == UpgradePolicy::arbitrary() { + return Err(CliError::CommandArgumentError( + "A package with upgrade policy `arbitrary` cannot be downloaded \ + since it is not safe to depend on such packages." + .to_owned(), + )); + } + let package_path = output_dir.join(package.name()); + package + .save_package_to_disk(package_path.as_path()) + .map_err(|e| CliError::UnexpectedError(format!("Failed to save package: {}", e)))?; + println!( + "Saved package with {} module(s) to `{}`", + package.module_names().len(), + package_path.display() + ); + Ok("Download succeeded") + } +} + +/// Downloads a package and verifies the bytecode +/// +/// Downloads the package from onchain and verifies the bytecode matches a local compilation of the Move code +#[derive(Parser)] +pub struct VerifyPackage { + /// Address of the account containing the package + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Artifacts to be generated when building this package. + #[clap(long, default_value_t = IncludedArtifacts::Sparse)] + pub(crate) included_artifacts: IncludedArtifacts, + + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand<&'static str> for VerifyPackage { + fn command_name(&self) -> &'static str { + "DownloadPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + // First build the package locally to get the package metadata + let build_options = BuildOptions { + install_dir: self.move_options.output_dir.clone(), + bytecode_version: self.move_options.bytecode_version, + ..self.included_artifacts.build_options( + self.move_options.skip_fetch_latest_git_deps, + self.move_options.named_addresses(), + self.move_options.bytecode_version, + ) + }; + let pack = BuiltPackage::build(self.move_options.get_package_path()?, build_options) + .map_err(|e| CliError::MoveCompilationError(format!("{:#}", e)))?; + let compiled_metadata = pack.extract_metadata()?; + + // Now pull the compiled package + let url = self.rest_options.url(&self.profile_options)?; + let registry = CachedPackageRegistry::create(url, self.account).await?; + let package = registry + .get_package(pack.name()) + .await + .map_err(|s| CliError::CommandArgumentError(s.to_string()))?; + + // We can't check the arbitrary, because it could change on us + if package.upgrade_policy() == UpgradePolicy::arbitrary() { + return Err(CliError::CommandArgumentError( + "A package with upgrade policy `arbitrary` cannot be downloaded \ + since it is not safe to depend on such packages." + .to_owned(), + )); + } + + // Verify that the source digest matches + package.verify(&compiled_metadata)?; + + Ok("Successfully verified source of package") + } +} + +/// Lists information about packages and modules on-chain for an account +#[derive(Parser)] +pub struct ListPackage { + /// Address of the account for which to list packages. + #[clap(long, parse(try_from_str = crate::common::types::load_account_arg))] + pub(crate) account: AccountAddress, + + /// Type of items to query + /// + /// Current supported types `[packages]` + #[clap(long, default_value_t = MoveListQuery::Packages)] + query: MoveListQuery, + + #[clap(flatten)] + rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[derive(ArgEnum, Clone, Copy, Debug)] +pub enum MoveListQuery { + Packages, +} + +impl Display for MoveListQuery { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + MoveListQuery::Packages => "packages", + }) + } +} + +impl FromStr for MoveListQuery { + type Err = &'static str; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "packages" => Ok(MoveListQuery::Packages), + _ => Err("Invalid query. Valid values are modules, packages"), + } + } +} + +#[async_trait] +impl CliCommand<&'static str> for ListPackage { + fn command_name(&self) -> &'static str { + "ListPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let url = self.rest_options.url(&self.profile_options)?; + let registry = CachedPackageRegistry::create(url, self.account).await?; + match self.query { + MoveListQuery::Packages => { + for name in registry.package_names() { + let data = registry.get_package(name).await?; + println!("package {}", data.name()); + println!(" upgrade_policy: {}", data.upgrade_policy()); + println!(" upgrade_number: {}", data.upgrade_number()); + println!(" source_digest: {}", data.source_digest()); + println!(" modules: {}", data.module_names().into_iter().join(", ")); + } + }, + } + Ok("list succeeded") + } +} + +/// Cleans derived artifacts of a package. +#[derive(Parser)] +pub struct CleanPackage { + #[clap(flatten)] + pub(crate) move_options: MovePackageDir, + #[clap(flatten)] + pub(crate) prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand<&'static str> for CleanPackage { + fn command_name(&self) -> &'static str { + "CleanPackage" + } + + async fn execute(self) -> CliTypedResult<&'static str> { + let path = self.move_options.get_package_path()?; + let build_dir = path.join("build"); + // Only remove the build dir if it exists, allowing for users to still clean their cache + if build_dir.exists() { + std::fs::remove_dir_all(build_dir.as_path()) + .map_err(|e| CliError::IO(build_dir.display().to_string(), e))?; + } + + let move_dir = PathBuf::from(MOVE_HOME.as_str()); + if move_dir.exists() + && prompt_yes_with_override( + &format!( + "Do you also want to delete the local package download cache at `{}`?", + move_dir.display() + ), + self.prompt_options, + ) + .is_ok() + { + std::fs::remove_dir_all(move_dir.as_path()) + .map_err(|e| CliError::IO(move_dir.display().to_string(), e))?; + } + Ok("succeeded") + } +} + +/// Run a Move function +#[derive(Parser)] +pub struct RunFunction { + #[clap(flatten)] + pub(crate) entry_function_args: EntryFunctionArguments, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for RunFunction { + fn command_name(&self) -> &'static str { + "RunFunction" + } + + async fn execute(self) -> CliTypedResult { + let payload = TransactionPayload::EntryFunction( + self.entry_function_args.create_entry_function_payload()?, + ); + profile_or_submit(payload, &self.txn_options).await + } +} + +/// Run a view function +#[derive(Parser)] +pub struct ViewFunction { + #[clap(flatten)] + pub(crate) entry_function_args: EntryFunctionArguments, + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for ViewFunction { + fn command_name(&self) -> &'static str { + "RunViewFunction" + } + + async fn execute(self) -> CliTypedResult> { + let mut args: Vec = vec![]; + for arg in self.entry_function_args.arg_vec.args { + args.push(arg.to_json()?); + } + + let view_request = ViewRequest { + function: EntryFunctionId { + module: self.entry_function_args.function_id.module_id.into(), + name: self.entry_function_args.function_id.member_id.into(), + }, + type_arguments: self.entry_function_args.type_args, + arguments: args, + }; + + self.txn_options.view(view_request).await + } +} + +/// Run a Move script +#[derive(Parser)] +pub struct RunScript { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) compile_proposal_args: CompileScriptFunction, + #[clap(flatten)] + pub(crate) arg_vec: ArgWithTypeVec, + /// TypeTag arguments separated by spaces. + /// + /// Example: `u8 u16 u32 u64 u128 u256 bool address vector signer` + #[clap(long, multiple_values = true)] + pub(crate) type_args: Vec, +} + +#[async_trait] +impl CliCommand for RunScript { + fn command_name(&self) -> &'static str { + "RunScript" + } + + async fn execute(self) -> CliTypedResult { + let (bytecode, _script_hash) = self + .compile_proposal_args + .compile("RunScript", self.txn_options.prompt_options)?; + + let mut args: Vec = vec![]; + for arg in self.arg_vec.args { + args.push(arg.try_into()?); + } + + let mut type_args: Vec = Vec::new(); + + // These TypeArgs are used for generics + for type_arg in self.type_args.into_iter() { + let type_tag = TypeTag::try_from(type_arg) + .map_err(|err| CliError::UnableToParse("--type-args", err.to_string()))?; + type_args.push(type_tag) + } + + let payload = TransactionPayload::Script(Script::new(bytecode, type_args, args)); + + profile_or_submit(payload, &self.txn_options).await + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) enum FunctionArgType { + Address, + Bool, + Hex, + String, + U8, + U16, + U32, + U64, + U128, + U256, + Raw, +} + +impl Display for FunctionArgType { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + FunctionArgType::Address => write!(f, "address"), + FunctionArgType::Bool => write!(f, "bool"), + FunctionArgType::Hex => write!(f, "hex"), + FunctionArgType::String => write!(f, "string"), + FunctionArgType::U8 => write!(f, "u8"), + FunctionArgType::U16 => write!(f, "u16"), + FunctionArgType::U32 => write!(f, "u32"), + FunctionArgType::U64 => write!(f, "u64"), + FunctionArgType::U128 => write!(f, "u128"), + FunctionArgType::U256 => write!(f, "u256"), + FunctionArgType::Raw => write!(f, "raw"), + } + } +} + +impl FunctionArgType { + /// Parse a standalone argument (not a vector) from string slice into BCS representation. + fn parse_arg_str(&self, arg: &str) -> CliTypedResult> { + match self { + FunctionArgType::Address => bcs::to_bytes( + &load_account_arg(arg) + .map_err(|err| CliError::UnableToParse("address", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::Bool => bcs::to_bytes( + &bool::from_str(arg) + .map_err(|err| CliError::UnableToParse("bool", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::Hex => bcs::to_bytes( + &hex::decode(arg).map_err(|err| CliError::UnableToParse("hex", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::String => bcs::to_bytes(arg).map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U8 => bcs::to_bytes( + &u8::from_str(arg).map_err(|err| CliError::UnableToParse("u8", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U16 => bcs::to_bytes( + &u16::from_str(arg) + .map_err(|err| CliError::UnableToParse("u16", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U32 => bcs::to_bytes( + &u32::from_str(arg) + .map_err(|err| CliError::UnableToParse("u32", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U64 => bcs::to_bytes( + &u64::from_str(arg) + .map_err(|err| CliError::UnableToParse("u64", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U128 => bcs::to_bytes( + &u128::from_str(arg) + .map_err(|err| CliError::UnableToParse("u128", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::U256 => bcs::to_bytes( + &U256::from_str(arg) + .map_err(|err| CliError::UnableToParse("u256", err.to_string()))?, + ) + .map_err(|err| CliError::BCS("arg", err)), + FunctionArgType::Raw => { + hex::decode(arg).map_err(|err| CliError::UnableToParse("raw", err.to_string())) + }, + } + } + + /// Recursively parse argument JSON into BCS representation. + pub fn parse_arg_json(&self, arg: &serde_json::Value) -> CliTypedResult { + match arg { + serde_json::Value::Bool(value) => Ok(ArgWithType { + _ty: self.clone(), + _vector_depth: 0, + arg: self.parse_arg_str(value.to_string().as_str())?, + }), + serde_json::Value::Number(value) => Ok(ArgWithType { + _ty: self.clone(), + _vector_depth: 0, + arg: self.parse_arg_str(value.to_string().as_str())?, + }), + serde_json::Value::String(value) => Ok(ArgWithType { + _ty: self.clone(), + _vector_depth: 0, + arg: self.parse_arg_str(value.as_str())?, + }), + serde_json::Value::Array(_) => { + let mut bcs: Vec = vec![]; // BCS representation of argument. + let mut common_sub_arg_depth = None; + // Prepend argument sequence length to BCS bytes vector. + write_u64_as_uleb128(&mut bcs, arg.as_array().unwrap().len()); + // Loop over all of the vector's sub-arguments, which may also be vectors: + for sub_arg in arg.as_array().unwrap() { + let ArgWithType { + _ty: _, + _vector_depth: sub_arg_depth, + arg: mut sub_arg_bcs, + } = self.parse_arg_json(sub_arg)?; + // Verify all sub-arguments have same depth. + if let Some(check_depth) = common_sub_arg_depth { + if check_depth != sub_arg_depth { + return Err(CliError::CommandArgumentError( + "Variable vector depth".to_string(), + )); + } + }; + common_sub_arg_depth = Some(sub_arg_depth); + bcs.append(&mut sub_arg_bcs); // Append sub-argument BCS. + } + // Default sub-argument depth is 0 for when no sub-arguments were looped over. + Ok(ArgWithType { + _ty: self.clone(), + _vector_depth: common_sub_arg_depth.unwrap_or(0) + 1, + arg: bcs, + }) + }, + serde_json::Value::Null => { + Err(CliError::CommandArgumentError("Null argument".to_string())) + }, + serde_json::Value::Object(_) => Err(CliError::CommandArgumentError( + "JSON object argument".to_string(), + )), + } + } +} + +// TODO use from move_binary_format::file_format_common if it is made public. +fn write_u64_as_uleb128(binary: &mut Vec, mut val: usize) { + loop { + let cur = val & 0x7F; + if cur != val { + binary.push((cur | 0x80) as u8); + val >>= 7; + } else { + binary.push(cur as u8); + break; + } + } +} + +impl FromStr for FunctionArgType { + type Err = CliError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "address" => Ok(FunctionArgType::Address), + "bool" => Ok(FunctionArgType::Bool), + "hex" => Ok(FunctionArgType::Hex), + "string" => Ok(FunctionArgType::String), + "u8" => Ok(FunctionArgType::U8), + "u16" => Ok(FunctionArgType::U16), + "u32" => Ok(FunctionArgType::U32), + "u64" => Ok(FunctionArgType::U64), + "u128" => Ok(FunctionArgType::U128), + "u256" => Ok(FunctionArgType::U256), + "raw" => Ok(FunctionArgType::Raw), + str => {Err(CliError::CommandArgumentError(format!( + "Invalid arg type '{}'. Must be one of: ['{}','{}','{}','{}','{}','{}','{}','{}','{}','{}','{}']", + str, + FunctionArgType::Address, + FunctionArgType::Bool, + FunctionArgType::Hex, + FunctionArgType::String, + FunctionArgType::U8, + FunctionArgType::U16, + FunctionArgType::U32, + FunctionArgType::U64, + FunctionArgType::U128, + FunctionArgType::U256, + FunctionArgType::Raw))) + } + } + } +} + +/// A parseable arg with a type separated by a colon +#[derive(Debug)] +pub struct ArgWithType { + pub(crate) _ty: FunctionArgType, + pub(crate) _vector_depth: u8, + pub(crate) arg: Vec, +} + +impl ArgWithType { + pub fn address(account_address: AccountAddress) -> Self { + ArgWithType { + _ty: FunctionArgType::Address, + _vector_depth: 0, + arg: bcs::to_bytes(&account_address).unwrap(), + } + } + + pub fn u64(arg: u64) -> Self { + ArgWithType { + _ty: FunctionArgType::U64, + _vector_depth: 0, + arg: bcs::to_bytes(&arg).unwrap(), + } + } + + pub fn bytes(arg: Vec) -> Self { + ArgWithType { + _ty: FunctionArgType::Raw, + _vector_depth: 0, + arg: bcs::to_bytes(&arg).unwrap(), + } + } + + pub fn raw(arg: Vec) -> Self { + ArgWithType { + _ty: FunctionArgType::Raw, + _vector_depth: 0, + arg, + } + } + + pub fn bcs_value_to_json<'a, T: Deserialize<'a> + Serialize>( + &'a self, + ) -> CliTypedResult { + match self._vector_depth { + 0 => serde_json::to_value(bcs::from_bytes::(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + 1 => serde_json::to_value(bcs::from_bytes::>(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + + 2 => serde_json::to_value(bcs::from_bytes::>>(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + + 3 => serde_json::to_value(bcs::from_bytes::>>>(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + + 4 => serde_json::to_value(bcs::from_bytes::>>>>(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + 5 => serde_json::to_value(bcs::from_bytes::>>>>>(&self.arg)?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + 6 => serde_json::to_value(bcs::from_bytes::>>>>>>( + &self.arg, + )?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + 7 => serde_json::to_value(bcs::from_bytes::>>>>>>>( + &self.arg, + )?) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + depth => Err(CliError::UnexpectedError(format!( + "Vector of depth {depth} is overly nested" + ))), + } + } + + pub fn to_json(&self) -> CliTypedResult { + match self._ty { + FunctionArgType::Address => self.bcs_value_to_json::(), + FunctionArgType::Bool => self.bcs_value_to_json::(), + FunctionArgType::Hex => self.bcs_value_to_json::>(), + FunctionArgType::String => self.bcs_value_to_json::(), + FunctionArgType::U8 => self.bcs_value_to_json::(), + FunctionArgType::U16 => self.bcs_value_to_json::(), + FunctionArgType::U32 => self.bcs_value_to_json::(), + FunctionArgType::U64 => self.bcs_value_to_json::(), + FunctionArgType::U128 => self.bcs_value_to_json::(), + FunctionArgType::U256 => self.bcs_value_to_json::(), + FunctionArgType::Raw => serde_json::to_value(&self.arg) + .map_err(|err| CliError::UnexpectedError(err.to_string())), + } + .map_err(|err| { + CliError::UnexpectedError(format!("Failed to parse argument to JSON {}", err)) + }) + } +} + +/// Does not support string arguments that contain the following characters: +/// +/// * `,` +/// * `[` +/// * `]` +impl FromStr for ArgWithType { + type Err = CliError; + + fn from_str(s: &str) -> Result { + // Splits on the first colon, returning at most `2` elements + // This is required to support args that contain a colon + let parts: Vec<_> = s.splitn(2, ':').collect(); + if parts.len() != 2 { + return Err(CliError::CommandArgumentError( + "Arguments must be pairs of : e.g. bool:true".to_string(), + )); + } + let ty = FunctionArgType::from_str(parts.first().unwrap())?; + let mut arg = String::from(*parts.last().unwrap()); + // May need to surround with quotes if not an array, so arg can be parsed into JSON. + if !arg.starts_with('[') { + if let FunctionArgType::Address + | FunctionArgType::Hex + | FunctionArgType::String + | FunctionArgType::Raw = ty + { + arg = format!("\"{}\"", arg); + } + } + let json = serde_json::from_str::(arg.as_str()) + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + ty.parse_arg_json(&json) + } +} + +impl TryInto for ArgWithType { + type Error = CliError; + + fn try_into(self) -> Result { + if self._vector_depth > 0 && self._ty != FunctionArgType::U8 { + return Err(CliError::UnexpectedError( + "Unable to parse non-u8 vector to transaction argument".to_string(), + )); + } + match self._ty { + FunctionArgType::Address => Ok(TransactionArgument::Address(txn_arg_parser( + &self.arg, "address", + )?)), + FunctionArgType::Bool => Ok(TransactionArgument::Bool(txn_arg_parser( + &self.arg, "bool", + )?)), + FunctionArgType::Hex => Ok(TransactionArgument::U8Vector(txn_arg_parser( + &self.arg, "hex", + )?)), + FunctionArgType::String => Ok(TransactionArgument::U8Vector(txn_arg_parser( + &self.arg, "string", + )?)), + FunctionArgType::U8 => match self._vector_depth { + 0 => Ok(TransactionArgument::U8(txn_arg_parser(&self.arg, "u8")?)), + 1 => Ok(TransactionArgument::U8Vector(txn_arg_parser( + &self.arg, + "vector", + )?)), + depth => Err(CliError::UnexpectedError(format!( + "Unable to parse u8 vector of depth {} to transaction argument", + depth + ))), + }, + FunctionArgType::U16 => Ok(TransactionArgument::U16(txn_arg_parser(&self.arg, "u16")?)), + FunctionArgType::U32 => Ok(TransactionArgument::U32(txn_arg_parser(&self.arg, "u32")?)), + FunctionArgType::U64 => Ok(TransactionArgument::U64(txn_arg_parser(&self.arg, "u64")?)), + FunctionArgType::U128 => Ok(TransactionArgument::U128(txn_arg_parser( + &self.arg, "u128", + )?)), + FunctionArgType::U256 => Ok(TransactionArgument::U256(txn_arg_parser( + &self.arg, "u256", + )?)), + FunctionArgType::Raw => Ok(TransactionArgument::U8Vector(txn_arg_parser( + &self.arg, "raw", + )?)), + } + } +} + +fn txn_arg_parser( + data: &[u8], + label: &'static str, +) -> Result { + bcs::from_bytes(data).map_err(|err| CliError::UnableToParse(label, err.to_string())) +} + +/// Identifier of a module member (function or struct). +#[derive(Debug, Clone)] +pub struct MemberId { + pub module_id: ModuleId, + pub member_id: Identifier, +} + +fn parse_member_id(function_id: &str) -> CliTypedResult { + let ids: Vec<&str> = function_id.split_terminator("::").collect(); + if ids.len() != 3 { + return Err(CliError::CommandArgumentError( + "FunctionId is not well formed. Must be of the form
::::" + .to_string(), + )); + } + let address = load_account_arg(ids.first().unwrap())?; + let module = Identifier::from_str(ids.get(1).unwrap()) + .map_err(|err| CliError::UnableToParse("Module Name", err.to_string()))?; + let member_id = Identifier::from_str(ids.get(2).unwrap()) + .map_err(|err| CliError::UnableToParse("Member Name", err.to_string()))?; + let module_id = ModuleId::new(address, module); + Ok(MemberId { + module_id, + member_id, + }) +} + +impl FromStr for MemberId { + type Err = CliError; + + fn from_str(s: &str) -> Result { + parse_member_id(s) + } +} diff --git a/m1/movement/src/move_tool/package_hooks.rs b/m1/movement/src/move_tool/package_hooks.rs new file mode 100644 index 00000000..208d368c --- /dev/null +++ b/m1/movement/src/move_tool/package_hooks.rs @@ -0,0 +1,54 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{common::types::load_account_arg, move_tool::CachedPackageRegistry}; +use aptos_framework::UPGRADE_POLICY_CUSTOM_FIELD; +use futures::executor::block_on; +use move_package::{ + compilation::package_layout::CompiledPackageLayout, package_hooks::PackageHooks, + source_package::parsed_manifest::CustomDepInfo, +}; +use move_symbol_pool::Symbol; +use reqwest::Url; + +pub fn register_package_hooks() { + move_package::package_hooks::register_package_hooks(Box::new(AptosPackageHooks {})) +} + +struct AptosPackageHooks {} + +impl PackageHooks for AptosPackageHooks { + fn custom_package_info_fields(&self) -> Vec { + vec![UPGRADE_POLICY_CUSTOM_FIELD.to_string()] + } + + fn custom_dependency_key(&self) -> Option { + Some("movement".to_string()) + } + + fn resolve_custom_dependency( + &self, + _dep_name: Symbol, + info: &CustomDepInfo, + ) -> anyhow::Result<()> { + block_on(maybe_download_package(info)) + } +} + +async fn maybe_download_package(info: &CustomDepInfo) -> anyhow::Result<()> { + if !info + .download_to + .join(CompiledPackageLayout::BuildInfo.path()) + .exists() + { + let registry = CachedPackageRegistry::create( + Url::parse(info.node_url.as_str())?, + load_account_arg(info.package_address.as_str())?, + ) + .await?; + let package = registry.get_package(info.package_name).await?; + package.save_package_to_disk(info.download_to.as_path()) + } else { + Ok(()) + } +} diff --git a/m1/movement/src/move_tool/show.rs b/m1/movement/src/move_tool/show.rs new file mode 100644 index 00000000..e1cc0e72 --- /dev/null +++ b/m1/movement/src/move_tool/show.rs @@ -0,0 +1,109 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use super::IncludedArtifactsArgs; +use crate::common::types::{CliCommand, CliError, CliResult, CliTypedResult, MovePackageDir}; +use anyhow::Context; +use aptos_framework::{BuildOptions, BuiltPackage}; +use aptos_types::transaction::EntryABI; +use async_trait::async_trait; +use clap::{Parser, Subcommand}; + +#[derive(Subcommand)] +pub enum ShowTool { + Abi(ShowAbi), +} + +impl ShowTool { + pub async fn execute_serialized(self) -> CliResult { + match self { + Self::Abi(tool) => tool.execute_serialized().await, + } + } +} + +/// Compile the package and show information about the ABIs of the compiled modules. +/// +/// For example, this would show the function `transfer` in the module `coin`: +/// +/// aptos move show abi --modules coin --names transfer +/// +#[derive(Parser)] +pub struct ShowAbi { + /// If provided, only show items from the given Move modules. These should be module + /// names, not file paths. For example, `coin`. + #[clap(long, multiple_values = true)] + modules: Vec, + + /// If provided, only show items with the given names. For example, `transfer`. + #[clap(long, multiple_values = true)] + names: Vec, + + #[clap(flatten)] + included_artifacts_args: IncludedArtifactsArgs, + + #[clap(flatten)] + move_options: MovePackageDir, +} + +#[async_trait] +impl CliCommand> for ShowAbi { + fn command_name(&self) -> &'static str { + "ShowAbi" + } + + async fn execute(self) -> CliTypedResult> { + let build_options = BuildOptions { + install_dir: self.move_options.output_dir.clone(), + with_abis: true, + ..self + .included_artifacts_args + .included_artifacts + .build_options( + self.move_options.skip_fetch_latest_git_deps, + self.move_options.named_addresses(), + self.move_options.bytecode_version, + ) + }; + + // Build the package. + let package = BuiltPackage::build(self.move_options.get_package_path()?, build_options) + .map_err(|e| CliError::MoveCompilationError(format!("{:#}", e)))?; + + // Get ABIs from the package. + let abis = package + .extract_abis() + .context("No ABIs found after compilation")?; + + // Filter the ABIs based on the filters passed in. + let abis = abis + .into_iter() + .filter(|abi| { + let name = abi.name().to_string(); + if !self.names.is_empty() && !self.names.contains(&name) { + return false; + } + match &abi { + EntryABI::EntryFunction(func) => { + if !self.modules.is_empty() + && !self + .modules + .contains(&func.module_name().name().to_string()) + { + return false; + } + }, + EntryABI::TransactionScript(_) => { + // If there were any modules specified we ignore scripts. + if !self.modules.is_empty() { + return false; + } + }, + } + true + }) + .collect(); + + Ok(abis) + } +} diff --git a/m1/movement/src/move_tool/stored_package.rs b/m1/movement/src/move_tool/stored_package.rs new file mode 100644 index 00000000..c957c4c5 --- /dev/null +++ b/m1/movement/src/move_tool/stored_package.rs @@ -0,0 +1,214 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::bail; +use aptos_framework::{ + natives::code::{ModuleMetadata, PackageMetadata, PackageRegistry, UpgradePolicy}, + unzip_metadata_str, +}; +use aptos_rest_client::Client; +use aptos_types::account_address::AccountAddress; +use move_package::compilation::package_layout::CompiledPackageLayout; +use reqwest::Url; +use std::{fs, path::Path}; + +// TODO: this is a first naive implementation of the package registry. Before mainnet +// we need to use tables for the package registry. + +/// Represents the package registry at a given account. +pub struct CachedPackageRegistry { + inner: PackageRegistry, +} + +/// Represents the package metadata found in an registry. +pub struct CachedPackageMetadata<'a> { + metadata: &'a PackageMetadata, +} + +/// Represents the package metadata found in an registry. +pub struct CachedModuleMetadata<'a> { + metadata: &'a ModuleMetadata, +} + +impl CachedPackageRegistry { + /// Creates a new registry. + pub async fn create(url: Url, addr: AccountAddress) -> anyhow::Result { + let client = Client::new(url); + // Need to use a different type to deserialize JSON + let inner = client + .get_account_resource_bcs::(addr, "0x1::code::PackageRegistry") + .await? + .into_inner(); + Ok(Self { inner }) + } + + /// Returns the list of packages in this registry by name. + pub fn package_names(&self) -> Vec<&str> { + self.inner + .packages + .iter() + .map(|p| p.name.as_str()) + .collect() + } + + /// Finds the metadata for the given module in the registry by its unique name. + pub async fn get_module<'a>( + &self, + name: impl AsRef, + ) -> anyhow::Result> { + let name = name.as_ref(); + for package in &self.inner.packages { + for module in &package.modules { + if module.name == name { + return Ok(CachedModuleMetadata { metadata: module }); + } + } + } + bail!("module `{}` not found", name) + } + + /// Finds the metadata for the given package in the registry by its unique name. + pub async fn get_package<'a>( + &self, + name: impl AsRef, + ) -> anyhow::Result> { + let name = name.as_ref(); + for package in &self.inner.packages { + if package.name == name { + return Ok(CachedPackageMetadata { metadata: package }); + } + } + bail!("package `{}` not found", name) + } +} + +impl<'a> CachedPackageMetadata<'a> { + pub fn name(&self) -> &str { + &self.metadata.name + } + + pub fn upgrade_policy(&self) -> UpgradePolicy { + self.metadata.upgrade_policy + } + + pub fn upgrade_number(&self) -> u64 { + self.metadata.upgrade_number + } + + pub fn source_digest(&self) -> &str { + &self.metadata.source_digest + } + + pub fn manifest(&self) -> anyhow::Result { + unzip_metadata_str(&self.metadata.manifest) + } + + pub fn module_names(&self) -> Vec<&str> { + self.metadata + .modules + .iter() + .map(|s| s.name.as_str()) + .collect() + } + + pub fn module(&self, name: impl AsRef) -> anyhow::Result> { + let name = name.as_ref(); + for module in &self.metadata.modules { + if module.name == name { + return Ok(CachedModuleMetadata { metadata: module }); + } + } + bail!("module `{}` not found", name) + } + + pub fn save_package_to_disk(&self, path: &Path) -> anyhow::Result<()> { + fs::create_dir_all(path)?; + fs::write( + path.join("Move.toml"), + unzip_metadata_str(&self.metadata.manifest)?, + )?; + let sources_dir = path.join(CompiledPackageLayout::Sources.path()); + fs::create_dir_all(&sources_dir)?; + for module in &self.metadata.modules { + let source = match module.source.is_empty() { + true => { + println!("module without code: {}", module.name); + "".into() + }, + false => unzip_metadata_str(&module.source)?, + }; + fs::write(sources_dir.join(format!("{}.move", module.name)), source)?; + } + Ok(()) + } + + pub fn verify(&self, package_metadata: &PackageMetadata) -> anyhow::Result<()> { + let self_metadata = self.metadata; + + if self_metadata.name != package_metadata.name { + bail!( + "Package name doesn't match {} : {}", + package_metadata.name, + self_metadata.name + ) + } else if self_metadata.deps != package_metadata.deps { + bail!( + "Dependencies don't match {:?} : {:?}", + package_metadata.deps, + self_metadata.deps + ) + } else if self_metadata.modules != package_metadata.modules { + bail!( + "Modules don't match {:?} : {:?}", + package_metadata.modules, + self_metadata.modules + ) + } else if self_metadata.manifest != package_metadata.manifest { + bail!( + "Manifest doesn't match {:?} : {:?}", + package_metadata.manifest, + self_metadata.manifest + ) + } else if self_metadata.upgrade_policy != package_metadata.upgrade_policy { + bail!( + "Upgrade policy doesn't match {:?} : {:?}", + package_metadata.upgrade_policy, + self_metadata.upgrade_policy + ) + } else if self_metadata.upgrade_number != package_metadata.upgrade_number { + bail!( + "Upgrade number doesn't match {:?} : {:?}", + package_metadata.upgrade_number, + self_metadata.upgrade_number + ) + } else if self_metadata.extension != package_metadata.extension { + bail!( + "Extensions doesn't match {:?} : {:?}", + package_metadata.extension, + self_metadata.extension + ) + } else if self_metadata.source_digest != package_metadata.source_digest { + bail!( + "Source digests doesn't match {:?} : {:?}", + package_metadata.source_digest, + self_metadata.source_digest + ) + } + + Ok(()) + } +} + +impl<'a> CachedModuleMetadata<'a> { + pub fn name(&self) -> &str { + &self.metadata.name + } + + pub fn zipped_source(&self) -> &[u8] { + &self.metadata.source + } + + pub fn zipped_source_map_raw(&self) -> &[u8] { + &self.metadata.source_map + } +} diff --git a/m1/movement/src/move_tool/transactional_tests_runner.rs b/m1/movement/src/move_tool/transactional_tests_runner.rs new file mode 100644 index 00000000..91b504a4 --- /dev/null +++ b/m1/movement/src/move_tool/transactional_tests_runner.rs @@ -0,0 +1,345 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::common::types::{CliError, CliTypedResult}; +/// Most of the code below comes from the crate `datatest-stable`. Because the limitation of `datatest-stable`, +/// we are not able to run transactional tests as a subcommand of the Movement CLI. Therefore, we need to duplicate code +/// here and make minor modifications. +/// +use clap::Parser; +use std::{ + io::{self, Write}, + num::NonZeroUsize, + panic::{catch_unwind, AssertUnwindSafe}, + path::{Path, PathBuf}, + process, + sync::mpsc::{channel, Sender}, + thread, +}; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; + +type Result = std::result::Result>; + +/// Run Move transactional tests +#[derive(Parser, Clone)] +pub struct TransactionalTestOpts { + /// The filter string is tested against the name of all tests, and only those tests whose names + /// contain the filter are run. + #[clap(long)] + pub filter: Option, + + /// Exactly match filters rather than match by substring + #[clap(long = "exact")] + pub filter_exact: bool, + + /// Number of threads used for running tests in parallel + #[clap(long, default_value = "32")] + pub test_threads: NonZeroUsize, + + /// Output minimal information + #[clap(long)] + pub quiet: bool, + + /// List all tests + #[clap(long)] + pub list: bool, + + /// Path to contain the tests + #[clap(long, parse(from_os_str))] + pub root_path: PathBuf, + + /// Pattern to match the test files + #[clap(long, default_value = r".*\.(mvir|move)$")] + pub pattern: String, +} + +/// Helper function to iterate through all the files in the given directory, skipping hidden files, +/// and return an iterator of their paths. +pub fn iterate_directory(path: &Path) -> impl Iterator { + walkdir::WalkDir::new(path) + .into_iter() + .map(::std::result::Result::unwrap) + .filter(|entry| { + entry.file_type().is_file() + && entry + .file_name() + .to_str() + .map_or(false, |s| !s.starts_with('.')) // Skip hidden files + }) + .map(|entry| entry.path().to_path_buf()) +} + +pub fn derive_test_name(root: &Path, path: &Path, test_name: &str) -> String { + let relative = path.strip_prefix(root).unwrap_or_else(|_| { + panic!( + "failed to strip prefix '{}' from path '{}'", + root.display(), + path.display() + ) + }); + let mut test_name = test_name.to_string(); + test_name = format!("{}::{}", test_name, relative.display()); + test_name +} + +struct Test { + testfn: Box Result<()> + Send>, + name: String, +} + +enum TestResult { + Ok, + Failed, + FailedWithMsg(String), +} + +pub(crate) fn runner(options: &TransactionalTestOpts, reqs: &[Requirements]) -> CliTypedResult<()> { + let mut tests: Vec = reqs.iter().flat_map(|req| req.expand()).collect(); + tests.sort_by(|a, b| a.name.cmp(&b.name)); + + if options.list { + for test in &tests { + println!("{}: test", test.name); + } + + return Ok(()); + } + + match run_tests(options, tests) { + Ok(true) => Ok(()), + Ok(false) => process::exit(101), + Err(e) => Err(CliError::UnexpectedError(format!( + "error: io error when running tests: {:?}", + e + ))), + } +} + +fn run_tests(options: &TransactionalTestOpts, tests: Vec) -> io::Result { + let total = tests.len(); + + // Filter out tests + let mut remaining = match &options.filter { + None => tests, + Some(filter) => tests + .into_iter() + .filter(|test| { + if options.filter_exact { + test.name == filter[..] + } else { + test.name.contains(&filter[..]) + } + }) + .rev() + .collect(), + }; + + let filtered_out = total - remaining.len(); + let mut summary = TestSummary::new(total, filtered_out); + + if !options.quiet { + summary.write_starting_msg()?; + } + + let (tx, rx) = channel(); + + let mut pending = 0; + while pending > 0 || !remaining.is_empty() { + while pending < options.test_threads.get() && !remaining.is_empty() { + let test = remaining.pop().unwrap(); + run_test(test, tx.clone()); + pending += 1; + } + + let (name, result) = rx.recv().unwrap(); + summary.handle_result(name, result)?; + + pending -= 1; + } + + // Write Test Summary + if !options.quiet { + summary.write_summary()?; + } + + Ok(summary.success()) +} + +fn run_test(test: Test, channel: Sender<(String, TestResult)>) { + let Test { name, testfn } = test; + + let cfg = thread::Builder::new().name(name.clone()); + cfg.spawn(move || { + let result = match catch_unwind(AssertUnwindSafe(testfn)) { + Ok(Ok(())) => TestResult::Ok, + Ok(Err(e)) => TestResult::FailedWithMsg(format!("{:?}", e)), + Err(_) => TestResult::Failed, + }; + + channel.send((name, result)).unwrap(); + }) + .unwrap(); +} + +struct TestSummary { + stdout: StandardStream, + total: usize, + filtered_out: usize, + passed: usize, + failed: Vec, +} + +impl TestSummary { + fn new(total: usize, filtered_out: usize) -> Self { + Self { + stdout: StandardStream::stdout(ColorChoice::Auto), + total, + filtered_out, + passed: 0, + failed: Vec::new(), + } + } + + fn handle_result(&mut self, name: String, result: TestResult) -> io::Result<()> { + write!(self.stdout, "test {} ... ", name)?; + match result { + TestResult::Ok => { + self.passed += 1; + self.write_ok()?; + }, + TestResult::Failed => { + self.failed.push(name); + self.write_failed()?; + }, + TestResult::FailedWithMsg(msg) => { + self.failed.push(name); + self.write_failed()?; + writeln!(self.stdout)?; + + write!(self.stdout, "Error: {}", msg)?; + }, + } + writeln!(self.stdout)?; + Ok(()) + } + + fn write_ok(&mut self) -> io::Result<()> { + self.stdout + .set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(self.stdout, "ok")?; + self.stdout.reset()?; + Ok(()) + } + + fn write_failed(&mut self) -> io::Result<()> { + self.stdout + .set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(self.stdout, "FAILED")?; + self.stdout.reset()?; + Ok(()) + } + + fn write_starting_msg(&mut self) -> io::Result<()> { + writeln!(self.stdout)?; + writeln!( + self.stdout, + "running {} tests", + self.total - self.filtered_out + )?; + Ok(()) + } + + fn write_summary(&mut self) -> io::Result<()> { + // Print out the failing tests + if !self.failed.is_empty() { + writeln!(self.stdout)?; + writeln!(self.stdout, "failures:")?; + for name in &self.failed { + writeln!(self.stdout, " {}", name)?; + } + } + + writeln!(self.stdout)?; + write!(self.stdout, "test result: ")?; + if self.failed.is_empty() { + self.write_ok()?; + } else { + self.write_failed()?; + } + writeln!( + self.stdout, + ". {} passed; {} failed; {} filtered out", + self.passed, + self.failed.len(), + self.filtered_out + )?; + writeln!(self.stdout)?; + Ok(()) + } + + fn success(&self) -> bool { + self.failed.is_empty() + } +} + +#[doc(hidden)] +pub struct Requirements { + test: fn(&Path) -> Result<()>, + test_name: String, + root: String, + pattern: String, +} + +impl Requirements { + #[doc(hidden)] + pub fn new( + test: fn(&Path) -> Result<()>, + test_name: String, + root: String, + pattern: String, + ) -> Self { + Self { + test, + test_name, + root, + pattern, + } + } + + /// Generate standard test descriptors ([`test::TestDescAndFn`]) from the descriptor of + /// `#[datatest::files(..)]`. + /// + /// Scans all files in a given directory, finds matching ones and generates a test descriptor + /// for each of them. + fn expand(&self) -> Vec { + let root = Path::new(&self.root).to_path_buf(); + + let re = regex::Regex::new(&self.pattern) + .unwrap_or_else(|_| panic!("invalid regular expression: '{}'", self.pattern)); + + let tests: Vec<_> = iterate_directory(&root) + .filter_map(|path| { + let input_path = path.to_string_lossy(); + if re.is_match(&input_path) { + let testfn = self.test; + let name = derive_test_name(&root, &path, &self.test_name); + let testfn = Box::new(move || (testfn)(&path)); + + Some(Test { testfn, name }) + } else { + None + } + }) + .collect(); + + // We want to avoid silent fails due to typos in regexp! + if tests.is_empty() { + panic!( + "no test cases found for test '{}'. Scanned directory: '{}' with pattern '{}'", + self.test_name, self.root, self.pattern, + ); + } + + tests + } +} diff --git a/m1/movement/src/node/analyze/analyze_validators.rs b/m1/movement/src/node/analyze/analyze_validators.rs new file mode 100644 index 00000000..a5fa20b1 --- /dev/null +++ b/m1/movement/src/node/analyze/analyze_validators.rs @@ -0,0 +1,540 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use super::fetch_metadata::ValidatorInfo; +use anyhow::Result; +use aptos_bitvec::BitVec; +use aptos_rest_client::VersionedNewBlockEvent; +use aptos_storage_interface::{DbReader, Order}; +use aptos_types::{ + account_address::AccountAddress, + account_config::{new_block_event_key, NewBlockEvent}, +}; +use itertools::Itertools; +use std::{cmp::Ordering, collections::HashMap, convert::TryFrom, ops::Add}; + +/// Single validator stats +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub struct ValidatorStats { + /// Number of successful proposals + pub proposal_successes: u32, + /// Number of failed proposals + pub proposal_failures: u32, + /// Number of votes proposals + pub votes: u32, + /// Number of transactions in a block + pub transactions: u32, + /// Voting power + pub voting_power: u64, +} + +impl ValidatorStats { + /// Proposal failure rate + pub fn failure_rate(&self) -> f32 { + (self.proposal_failures as f32) / (self.proposal_failures + self.proposal_successes) as f32 + } + + /// Whether node is proposing well enough + pub fn is_reliable(&self) -> bool { + (self.proposal_successes > 0) && (self.failure_rate() < 0.1) + } + + // Whether node is voting well enough + pub fn is_voting_enough(&self, rounds: u32) -> bool { + self.votes as f32 > rounds as f32 * 0.3 + } +} + +#[derive(Debug, Eq, PartialEq, Hash)] +pub enum NodeState { + // Proposal failure < 10%, >30% votes + Reliable, + // Proposal failure < 10%, <30% votes + ReliableLowVotes, + // Has successful proposals, but proposal failure > 10% + AliveUnreliable, + // No successful proposals, but voting + OnlyVoting, + // Not participating in consensus + NotParticipatingInConsensus, + // Not in ValidatorSet + Absent, +} + +impl NodeState { + pub fn to_char(&self) -> &str { + match self { + Self::Reliable => "+", + Self::ReliableLowVotes => "P", + Self::AliveUnreliable => "~", + Self::OnlyVoting => "V", + Self::NotParticipatingInConsensus => "X", + Self::Absent => " ", + } + } + + // Large the value, the worse the node is performing. + pub fn to_order_weight(&self) -> usize { + match self { + Self::Reliable => 0, + Self::ReliableLowVotes => 100, + Self::AliveUnreliable => 10000, + Self::OnlyVoting => 1000000, + Self::NotParticipatingInConsensus => 100000000, + Self::Absent => 1, + } + } +} + +impl Add for ValidatorStats { + type Output = Self; + + fn add(self, other: Self) -> Self { + Self { + proposal_successes: self.proposal_successes + other.proposal_successes, + proposal_failures: self.proposal_failures + other.proposal_failures, + votes: self.votes + other.votes, + transactions: self.transactions + other.transactions, + voting_power: 0, // cannot aggregate voting power. + } + } +} + +/// Statistics for all validators +#[derive(Clone)] +pub struct EpochStats { + /// Statistics for each of the validators + pub validator_stats: HashMap, + /// Total rounds in an epoch + pub total_rounds: u32, + /// Total transactions in an epoch + pub total_transactions: u32, + /// Successful rounds in an epoch + pub round_successes: u32, + /// Failed rounds in an epoch + pub round_failures: u32, + /// Nil blocks in an epoch + pub nil_blocks: u32, + /// Total voting power + pub total_voting_power: u128, +} + +impl EpochStats { + pub fn to_state(&self, validator: &AccountAddress) -> NodeState { + self.validator_stats + .get(validator) + .map(|b| { + if b.is_reliable() { + if b.is_voting_enough(self.total_rounds) { + NodeState::Reliable + } else { + NodeState::ReliableLowVotes + } + } else if b.proposal_successes > 0 { + NodeState::AliveUnreliable + } else if b.votes > 0 { + NodeState::OnlyVoting + } else { + NodeState::NotParticipatingInConsensus + } + }) + .unwrap_or(NodeState::Absent) + } + + pub fn to_votes(&self, validator: &AccountAddress) -> u32 { + self.validator_stats + .get(validator) + .map(|s| s.votes) + .unwrap_or(0) + } + + pub fn to_voting_power(&self, validator: &AccountAddress) -> u64 { + self.validator_stats + .get(validator) + .map(|s| s.voting_power) + .unwrap_or(0) + } +} + +impl Add for EpochStats { + type Output = Self; + + fn add(self, other: Self) -> Self { + let mut validator_stats = self.validator_stats; + for (key, other_validator_stats) in other.validator_stats.into_iter() { + validator_stats.insert( + key, + other_validator_stats + + *validator_stats.get(&key).unwrap_or(&ValidatorStats { + proposal_failures: 0, + proposal_successes: 0, + votes: 0, + transactions: 0, + voting_power: 0, + }), + ); + } + Self { + validator_stats, + total_rounds: self.total_rounds + other.total_rounds, + round_successes: self.round_successes + other.round_successes, + round_failures: self.round_failures + other.round_failures, + nil_blocks: self.nil_blocks + other.nil_blocks, + total_transactions: self.total_transactions + other.total_transactions, + total_voting_power: 0, + } + } +} + +/// Analyze validator performance +pub struct AnalyzeValidators {} + +impl AnalyzeValidators { + /// Fetch all events from a single epoch from DB. + pub fn fetch_epoch(epoch: u64, aptos_db: &dyn DbReader) -> Result> { + let batch = 100; + + let mut cursor = u64::max_value(); + let mut result: Vec = vec![]; + let ledger_version = aptos_db.get_latest_ledger_info()?.ledger_info().version(); + + loop { + let raw_events = aptos_db.get_events( + &new_block_event_key(), + cursor, + Order::Descending, + batch as u64, + ledger_version, + )?; + let end = raw_events.len() < batch; + for raw_event in raw_events { + if cursor <= raw_event.event.sequence_number() { + println!( + "Duplicate event found for {} : {:?}", + cursor, + raw_event.event.sequence_number() + ); + } else { + cursor = raw_event.event.sequence_number(); + let event = bcs::from_bytes::(raw_event.event.event_data())?; + + match epoch.cmp(&event.epoch()) { + Ordering::Equal => { + result.push(VersionedNewBlockEvent { + event, + version: raw_event.transaction_version, + sequence_number: raw_event.event.sequence_number(), + }); + }, + Ordering::Greater => { + return Ok(result); + }, + Ordering::Less => {}, + }; + } + } + + if end { + return Ok(result); + } + } + } + + /// Analyze single epoch + pub fn analyze(blocks: &[VersionedNewBlockEvent], validators: &[ValidatorInfo]) -> EpochStats { + assert!( + validators.iter().as_slice().windows(2).all(|w| { + w[0].validator_index + .partial_cmp(&w[1].validator_index) + .map(|o| o != Ordering::Greater) + .unwrap_or(false) + }), + "Validators need to be sorted" + ); + assert!( + blocks.iter().as_slice().windows(2).all(|w| { + w[0].event + .round() + .partial_cmp(&w[1].event.round()) + .map(|o| o != Ordering::Greater) + .unwrap_or(false) + }), + "Blocks need to be sorted" + ); + + let mut successes = HashMap::::new(); + let mut failures = HashMap::::new(); + let mut votes = HashMap::::new(); + let mut transactions = HashMap::::new(); + + let mut trimmed_rounds = 0; + let mut nil_blocks = 0; + let mut previous_round = 0; + for (pos, block) in blocks.iter().enumerate() { + let event = &block.event; + let is_nil = event.proposer() == AccountAddress::ZERO; + if is_nil { + nil_blocks += 1; + } + let expected_round = + previous_round + u64::from(!is_nil) + event.failed_proposer_indices().len() as u64; + if event.round() != expected_round { + println!( + "Missing failed AccountAddresss : {} {:?}", + previous_round, &event + ); + assert!(expected_round < event.round()); + trimmed_rounds += event.round() - expected_round; + } + previous_round = event.round(); + + if !is_nil { + *successes.entry(event.proposer()).or_insert(0) += 1; + } + + for failed_proposer_index in event.failed_proposer_indices() { + *failures + .entry(validators[*failed_proposer_index as usize].address) + .or_insert(0) += 1; + } + + let previous_block_votes_bitvec: BitVec = + event.previous_block_votes_bitvec().clone().into(); + assert_eq!( + BitVec::required_buckets(validators.len() as u16), + previous_block_votes_bitvec.num_buckets() + ); + for (i, validator) in validators.iter().enumerate() { + if previous_block_votes_bitvec.is_set(i as u16) { + *votes.entry(validator.address).or_insert(0) += 1; + } + } + + let cur_transactions_option = blocks + .get(pos + 1) + .map(|next| u32::try_from(next.version - block.version - 2).unwrap()); + if let Some(cur_transactions) = cur_transactions_option { + if is_nil { + assert_eq!( + cur_transactions, + 0, + "{} {:?}", + block.version, + blocks.get(pos + 1) + ); + } + *transactions.entry(event.proposer()).or_insert(0) += cur_transactions; + } + } + let total_successes: u32 = successes.values().sum(); + let total_failures: u32 = failures.values().sum(); + let total_transactions: u32 = transactions.values().sum(); + let total_rounds = total_successes + total_failures; + assert_eq!( + total_rounds + u32::try_from(trimmed_rounds).unwrap(), + previous_round as u32, + "{} success + {} failures + {} trimmed != {}", + total_successes, + total_failures, + trimmed_rounds, + previous_round + ); + + return EpochStats { + validator_stats: validators + .iter() + .map(|validator| { + (validator.address, ValidatorStats { + proposal_successes: *successes.get(&validator.address).unwrap_or(&0), + proposal_failures: *failures.get(&validator.address).unwrap_or(&0), + votes: *votes.get(&validator.address).unwrap_or(&0), + transactions: *transactions.get(&validator.address).unwrap_or(&0), + voting_power: validator.voting_power, + }) + }) + .collect(), + total_rounds, + total_transactions, + round_successes: total_successes, + round_failures: total_failures, + nil_blocks, + total_voting_power: validators + .iter() + .map(|validator| validator.voting_power as u128) + .sum(), + }; + } + + /// Print validator stats in a table + pub fn print_detailed_epoch_table( + epoch_stats: &EpochStats, + extra: Option<(&str, &HashMap)>, + sort_by_health: bool, + ) { + println!( + "Rounds: {} successes, {} failures, {} NIL blocks, failure rate: {}%, nil block rate: {}%", + epoch_stats.round_successes, epoch_stats.round_failures, epoch_stats.nil_blocks, + 100.0 * epoch_stats.round_failures as f32 / epoch_stats.total_rounds as f32, + 100.0 * epoch_stats.nil_blocks as f32 / epoch_stats.total_rounds as f32, + ); + println!( + "{: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <30}", + "elected", + "% rounds", + "% failed", + "succeded", + "failed", + "voted", + "transact", + extra.map(|(column, _)| column).unwrap_or("") + ); + + let mut validator_order: Vec<&AccountAddress> = + epoch_stats.validator_stats.keys().collect(); + if sort_by_health { + validator_order.sort_by_cached_key(|v| { + epoch_stats + .validator_stats + .get(v) + .map(|s| { + ( + if s.proposal_successes > 0 { + (s.failure_rate() * 100000.0) as u32 + } else { + 200000 + }, + -((s.proposal_failures + s.proposal_successes) as i32), + *v, + ) + }) + .unwrap() + }); + } else { + validator_order.sort(); + } + + for validator in validator_order { + let cur_stats = epoch_stats.validator_stats.get(validator).unwrap(); + println!( + "{: <10} | {:5.2}% | {:7.3}% | {: <10} | {: <10} | {: <10} | {: <10} | {}", + cur_stats.proposal_failures + cur_stats.proposal_successes, + 100.0 * (cur_stats.proposal_failures + cur_stats.proposal_successes) as f32 + / (epoch_stats.total_rounds as f32), + 100.0 * cur_stats.failure_rate(), + cur_stats.proposal_successes, + cur_stats.proposal_failures, + cur_stats.votes, + cur_stats.transactions, + if let Some((_, extra_map)) = extra { + format!( + "{: <30} | {}", + extra_map.get(validator).unwrap_or(&"".to_string()), + validator + ) + } else { + format!("{}", validator) + } + ); + } + } + + pub fn print_validator_health_over_time( + stats: &HashMap, + validators: &[AccountAddress], + extra: Option<&HashMap>, + ) { + let epochs: Vec<_> = stats.keys().sorted().collect(); + + let mut sorted_validators = validators.to_vec(); + sorted_validators.sort_by_cached_key(|validator| { + ( + epochs + .iter() + .map(|cur_epoch| { + stats + .get(cur_epoch) + .unwrap() + .to_state(validator) + .to_order_weight() + }) + .sum::(), + *validator, + ) + }); + + for validator in sorted_validators { + print!( + "{}: ", + if let Some(extra_map) = extra { + format!( + "{: <30} | {}", + extra_map.get(&validator).unwrap_or(&""), + validator + ) + } else { + format!("{}", validator) + } + ); + for cur_epoch in epochs.iter() { + print!( + "{}", + stats.get(cur_epoch).unwrap().to_state(&validator).to_char() + ); + } + println!(); + } + } + + pub fn print_network_health_over_time( + stats: &HashMap, + validators: &[AccountAddress], + ) { + let epochs = stats.keys().sorted(); + + println!( + "{: <8} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10}", + "epoch", + "reliable", + "r low vote", + "unreliable", + "only vote", + "down(cons)", + "rounds", + "#r failed", + "% failure", + "% stake has >10% of votes", + ); + for cur_epoch in epochs { + let epoch_stats = stats.get(cur_epoch).unwrap(); + + let counts = validators.iter().map(|v| epoch_stats.to_state(v)).counts(); + + let voted_voting_power: u128 = validators + .iter() + .flat_map(|v| { + if epoch_stats.to_votes(v) > epoch_stats.round_successes / 10 { + Some(epoch_stats.to_voting_power(v) as u128) + } else { + None + } + }) + .sum(); + + println!( + "{: <8} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {: <10} | {:10.2} | {:10.2}", + cur_epoch, + counts.get(&NodeState::Reliable).unwrap_or(&0), + counts.get(&NodeState::ReliableLowVotes).unwrap_or(&0), + counts.get(&NodeState::AliveUnreliable).unwrap_or(&0), + counts.get(&NodeState::OnlyVoting).unwrap_or(&0), + counts + .get(&NodeState::NotParticipatingInConsensus) + .unwrap_or(&0), + epoch_stats.total_rounds, + epoch_stats.round_failures, + 100.0 * epoch_stats.round_failures as f32 / epoch_stats.total_rounds as f32, + 100.0 * voted_voting_power as f32 / epoch_stats.total_voting_power as f32, + ); + } + } +} diff --git a/m1/movement/src/node/analyze/fetch_metadata.rs b/m1/movement/src/node/analyze/fetch_metadata.rs new file mode 100644 index 00000000..e9ab24e8 --- /dev/null +++ b/m1/movement/src/node/analyze/fetch_metadata.rs @@ -0,0 +1,337 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{anyhow, Result}; +use aptos_rest_client::{ + aptos_api_types::{IdentifierWrapper, MoveResource, WriteSetChange}, + Client as RestClient, Transaction, VersionedNewBlockEvent, +}; +use aptos_types::account_address::AccountAddress; +use std::str::FromStr; + +const MAX_FETCH_BATCH_SIZE: u16 = 1000; + +#[derive(Eq, PartialEq, Clone, Copy, Debug)] +pub struct ValidatorInfo { + pub address: AccountAddress, + pub voting_power: u64, + pub validator_index: u16, +} + +pub struct EpochInfo { + pub epoch: u64, + pub blocks: Vec, + pub validators: Vec, + pub partial: bool, +} + +pub struct FetchMetadata {} + +impl FetchMetadata { + fn get_validator_addresses( + data: &MoveResource, + field_name: &str, + ) -> Result> { + fn extract_validator_address(validator: &serde_json::Value) -> Result { + Ok(ValidatorInfo { + address: AccountAddress::from_hex_literal( + validator.get("addr").unwrap().as_str().unwrap(), + ) + .map_err(|e| anyhow!("Cannot parse address {:?}", e))?, + voting_power: validator + .get("voting_power") + .unwrap() + .as_str() + .unwrap() + .parse() + .map_err(|e| anyhow!("Cannot parse voting_power {:?}", e))?, + validator_index: validator + .get("config") + .unwrap() + .get("validator_index") + .unwrap() + .as_str() + .unwrap() + .parse() + .map_err(|e| anyhow!("Cannot parse validator_index {:?}", e))?, + }) + } + + let validators_json = data + .data + .0 + .get(&IdentifierWrapper::from_str(field_name).unwrap()) + .unwrap(); + if let serde_json::Value::Array(validators_array) = validators_json { + let mut validators: Vec = vec![]; + for validator in validators_array { + validators.push(extract_validator_address(validator)?); + } + Ok(validators) + } else { + Err(anyhow!("{} validators not in json", field_name)) + } + } + + async fn get_transactions_in_range( + client: &RestClient, + start: u64, + last: u64, + ) -> Result> { + let mut result = Vec::new(); + let mut cursor = start; + while cursor < last { + let limit = std::cmp::min(MAX_FETCH_BATCH_SIZE as u64, last - cursor) as u16; + let mut current = client + .get_transactions(Some(cursor), Some(limit)) + .await? + .into_inner(); + if current.is_empty() { + return Err(anyhow!( + "No transactions returned with start={} and limit={}", + cursor, + limit + )); + } + cursor += current.len() as u64; + result.append(&mut current); + } + Ok(result) + } + + fn get_validators_from_transaction(transaction: &Transaction) -> Result> { + if let Ok(info) = transaction.transaction_info() { + for change in &info.changes { + if let WriteSetChange::WriteResource(resource) = change { + if resource.data.typ.name.0.as_str() == "ValidatorSet" { + // No pending at epoch change + assert_eq!( + Vec::::new(), + FetchMetadata::get_validator_addresses( + &resource.data, + "pending_inactive" + )? + ); + assert_eq!( + Vec::::new(), + FetchMetadata::get_validator_addresses( + &resource.data, + "pending_active" + )? + ); + return FetchMetadata::get_validator_addresses( + &resource.data, + "active_validators", + ); + } + } + } + } + Err(anyhow!("Couldn't find ValidatorSet in the transaction")) + } + + pub async fn fetch_new_block_events( + client: &RestClient, + start_epoch: Option, + end_epoch: Option, + ) -> Result> { + let (last_events, state) = client + .get_new_block_events_bcs(None, Some(1)) + .await? + .into_parts(); + let mut start_seq_num = state.oldest_block_height; + assert_eq!(last_events.len(), 1, "{:?}", last_events); + let last_event = last_events.first().unwrap(); + let last_seq_num = last_event.sequence_number; + + let wanted_start_epoch = { + let mut wanted_start_epoch = start_epoch.unwrap_or(2); + if wanted_start_epoch < 0 { + wanted_start_epoch = last_event.event.epoch() as i64 + wanted_start_epoch + 1; + } + + let oldest_event = client + .get_new_block_events_bcs(Some(start_seq_num), Some(1)) + .await? + .into_inner() + .into_iter() + .next() + .ok_or_else(|| anyhow!("No blocks at oldest_block_height {}", start_seq_num))?; + let oldest_fetchable_epoch = std::cmp::max(oldest_event.event.epoch() + 1, 2); + if oldest_fetchable_epoch > wanted_start_epoch as u64 { + println!( + "Oldest full epoch that can be retreived is {} ", + oldest_fetchable_epoch + ); + oldest_fetchable_epoch + } else { + wanted_start_epoch as u64 + } + }; + let wanted_end_epoch = { + let mut wanted_end_epoch = end_epoch.unwrap_or(i64::MAX); + if wanted_end_epoch < 0 { + wanted_end_epoch = last_event.event.epoch() as i64 + wanted_end_epoch + 1; + } + std::cmp::min( + last_event.event.epoch() + 1, + std::cmp::max(2, wanted_end_epoch) as u64, + ) + }; + + if wanted_start_epoch > 2 { + let mut search_end = last_seq_num; + + // Stop when search is close enough, and we can then linearly + // proceed from there. + // Since we are ignoring results we are fetching during binary search + // we want to stop when we are close. + while start_seq_num + 20 < search_end { + let mid = (start_seq_num + search_end) / 2; + + let mid_epoch = client + .get_new_block_events_bcs(Some(mid), Some(1)) + .await? + .into_inner() + .first() + .unwrap() + .event + .epoch(); + + if mid_epoch < wanted_start_epoch { + start_seq_num = mid; + } else { + search_end = mid; + } + } + } + + let mut batch_index = 0; + + println!( + "Fetching {} to {} sequence number, wanting epochs [{}, {}), last version: {} and epoch: {}", + start_seq_num, last_seq_num, wanted_start_epoch, wanted_end_epoch, state.version, state.epoch, + ); + let mut result: Vec = vec![]; + if wanted_start_epoch >= wanted_end_epoch { + return Ok(result); + } + + let mut validators: Vec = vec![]; + let mut current: Vec = vec![]; + let mut epoch = 0; + + let mut cursor = start_seq_num; + loop { + let response = client + .get_new_block_events_bcs(Some(cursor), Some(MAX_FETCH_BATCH_SIZE)) + .await; + + if response.is_err() { + println!( + "Failed to read new_block_events beyond {}, stopping. {:?}", + cursor, + response.unwrap_err() + ); + assert!(!validators.is_empty()); + result.push(EpochInfo { + epoch, + blocks: current, + validators: validators.clone(), + partial: true, + }); + return Ok(result); + } + let events = response.unwrap().into_inner(); + + if events.is_empty() { + return Err(anyhow!( + "No transactions returned with start={} and limit={}", + cursor, + MAX_FETCH_BATCH_SIZE + )); + } + + cursor += events.len() as u64; + batch_index += 1; + + for event in events { + if event.event.epoch() > epoch { + if epoch == 0 { + epoch = event.event.epoch(); + current = vec![]; + } else { + let last = current.last().cloned(); + if let Some(last) = last { + let transactions = FetchMetadata::get_transactions_in_range( + client, + last.version, + event.version, + ) + .await?; + assert_eq!( + transactions.first().unwrap().version().unwrap(), + last.version + ); + for transaction in transactions { + if let Ok(new_validators) = + FetchMetadata::get_validators_from_transaction(&transaction) + { + if epoch >= wanted_start_epoch { + assert!(!validators.is_empty()); + result.push(EpochInfo { + epoch, + blocks: current, + validators: validators.clone(), + partial: false, + }); + } + current = vec![]; + + validators = new_validators; + validators.sort_by_key(|v| v.validator_index); + assert_eq!(epoch + 1, event.event.epoch()); + epoch = event.event.epoch(); + if epoch >= wanted_end_epoch { + return Ok(result); + } + break; + } + } + assert!( + current.is_empty(), + "Couldn't find ValidatorSet change for transactions start={}, limit={} for epoch {}", + last.version, + event.version - last.version, + event.event.epoch(), + ); + } + } + } + current.push(event); + } + + if batch_index % 100 == 0 { + println!( + "Fetched {} epochs (in epoch {} with {} blocks) from {} NewBlockEvents", + result.len(), + epoch, + current.len(), + cursor + ); + } + + if cursor > last_seq_num { + if !validators.is_empty() { + result.push(EpochInfo { + epoch, + blocks: current, + validators: validators.clone(), + partial: true, + }); + } + return Ok(result); + } + } + } +} diff --git a/m1/movement/src/node/analyze/mod.rs b/m1/movement/src/node/analyze/mod.rs new file mode 100644 index 00000000..ac8c7016 --- /dev/null +++ b/m1/movement/src/node/analyze/mod.rs @@ -0,0 +1,5 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod analyze_validators; +pub mod fetch_metadata; diff --git a/m1/movement/src/node/mod.rs b/m1/movement/src/node/mod.rs new file mode 100644 index 00000000..33caaac4 --- /dev/null +++ b/m1/movement/src/node/mod.rs @@ -0,0 +1,1730 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod analyze; + +use crate::{ + common::{ + types::{ + CliCommand, CliError, CliResult, CliTypedResult, ConfigSearchMode, + OptionalPoolAddressArgs, PoolAddressArgs, ProfileOptions, PromptOptions, RestOptions, + TransactionOptions, TransactionSummary, + }, + utils::{prompt_yes_with_override, read_from_file}, + }, + config::GlobalConfig, + genesis::git::from_yaml, + node::analyze::{ + analyze_validators::{AnalyzeValidators, ValidatorStats}, + fetch_metadata::FetchMetadata, + }, +}; +use aptos_backup_cli::{ + coordinators::restore::{RestoreCoordinator, RestoreCoordinatorOpt}, + metadata::cache::MetadataCacheOpt, + storage::command_adapter::{config::CommandAdapterConfig, CommandAdapter}, + utils::{ConcurrentDownloadsOpt, GlobalRestoreOpt, ReplayConcurrencyLevelOpt, RocksdbOpt}, +}; +use aptos_cached_packages::aptos_stdlib; +use aptos_config::config::NodeConfig; +use aptos_crypto::{bls12381, bls12381::PublicKey, x25519, ValidCryptoMaterialStringExt}; +use aptos_faucet_core::server::{FunderKeyEnum, RunConfig}; +use aptos_genesis::config::{HostAndPort, OperatorConfiguration}; +use aptos_network_checker::args::{ + validate_address, CheckEndpointArgs, HandshakeArgs, NodeAddressArgs, +}; +use aptos_rest_client::{aptos_api_types::VersionedEvent, Client, State}; +use aptos_types::{ + account_address::AccountAddress, + account_config::{BlockResource, CORE_CODE_ADDRESS}, + chain_id::ChainId, + network_address::NetworkAddress, + on_chain_config::{ConfigurationResource, ConsensusScheme, ValidatorSet}, + stake_pool::StakePool, + staking_contract::StakingContractStore, + validator_info::ValidatorInfo, + validator_performances::ValidatorPerformances, + vesting::VestingAdminStore, +}; +use async_trait::async_trait; +use bcs::Result; +use chrono::{DateTime, NaiveDateTime, Utc}; +use clap::Parser; +use futures::FutureExt; +use hex::FromHex; +use rand::{rngs::StdRng, SeedableRng}; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use std::{ + collections::HashMap, + convert::{TryFrom, TryInto}, + path::PathBuf, + pin::Pin, + sync::Arc, + thread, + time::Duration, +}; +use tokio::time::Instant; + +const SECS_TO_MICROSECS: u64 = 1_000_000; + +/// Tool for operations related to nodes +/// +/// This tool allows you to run a local test node for testing, +/// identify issues with nodes, and show related information. +#[derive(Parser)] +pub enum NodeTool { + AnalyzeValidatorPerformance(AnalyzeValidatorPerformance), + BootstrapDbFromBackup(BootstrapDbFromBackup), + CheckNetworkConnectivity(CheckNetworkConnectivity), + GetPerformance(GetPerformance), + GetStakePool(GetStakePool), + InitializeValidator(InitializeValidator), + JoinValidatorSet(JoinValidatorSet), + LeaveValidatorSet(LeaveValidatorSet), + ShowEpochInfo(ShowEpochInfo), + ShowValidatorConfig(ShowValidatorConfig), + ShowValidatorSet(ShowValidatorSet), + ShowValidatorStake(ShowValidatorStake), + RunLocalTestnet(RunLocalTestnet), + UpdateConsensusKey(UpdateConsensusKey), + UpdateValidatorNetworkAddresses(UpdateValidatorNetworkAddresses), +} + +impl NodeTool { + pub async fn execute(self) -> CliResult { + use NodeTool::*; + match self { + AnalyzeValidatorPerformance(tool) => tool.execute_serialized().await, + BootstrapDbFromBackup(tool) => tool.execute_serialized().await, + CheckNetworkConnectivity(tool) => tool.execute_serialized().await, + GetPerformance(tool) => tool.execute_serialized().await, + GetStakePool(tool) => tool.execute_serialized().await, + InitializeValidator(tool) => tool.execute_serialized().await, + JoinValidatorSet(tool) => tool.execute_serialized().await, + LeaveValidatorSet(tool) => tool.execute_serialized().await, + ShowEpochInfo(tool) => tool.execute_serialized().await, + ShowValidatorSet(tool) => tool.execute_serialized().await, + ShowValidatorStake(tool) => tool.execute_serialized().await, + ShowValidatorConfig(tool) => tool.execute_serialized().await, + RunLocalTestnet(tool) => tool.execute_serialized_without_logger().await, + UpdateConsensusKey(tool) => tool.execute_serialized().await, + UpdateValidatorNetworkAddresses(tool) => tool.execute_serialized().await, + } + } +} + +#[derive(Parser)] +pub struct OperatorConfigFileArgs { + /// Operator Configuration file + /// + /// Config file created from the `genesis set-validator-configuration` command + #[clap(long, parse(from_os_str))] + pub(crate) operator_config_file: Option, +} + +impl OperatorConfigFileArgs { + fn load(&self) -> CliTypedResult> { + if let Some(ref file) = self.operator_config_file { + Ok(from_yaml( + &String::from_utf8(read_from_file(file)?).map_err(CliError::from)?, + )?) + } else { + Ok(None) + } + } +} + +#[derive(Parser)] +pub struct ValidatorConsensusKeyArgs { + /// Hex encoded Consensus public key + /// + /// The key should be a BLS12-381 public key + #[clap(long, parse(try_from_str = bls12381::PublicKey::from_encoded_string))] + pub(crate) consensus_public_key: Option, + + /// Hex encoded Consensus proof of possession + /// + /// The key should be a BLS12-381 proof of possession + #[clap(long, parse(try_from_str = bls12381::ProofOfPossession::from_encoded_string))] + pub(crate) proof_of_possession: Option, +} + +impl ValidatorConsensusKeyArgs { + fn get_consensus_public_key<'a>( + &'a self, + operator_config: &'a Option, + ) -> CliTypedResult<&'a bls12381::PublicKey> { + let consensus_public_key = if let Some(ref consensus_public_key) = self.consensus_public_key + { + consensus_public_key + } else if let Some(ref operator_config) = operator_config { + &operator_config.consensus_public_key + } else { + return Err(CliError::CommandArgumentError( + "Must provide either --operator-config-file or --consensus-public-key".to_string(), + )); + }; + Ok(consensus_public_key) + } + + fn get_consensus_proof_of_possession<'a>( + &'a self, + operator_config: &'a Option, + ) -> CliTypedResult<&'a bls12381::ProofOfPossession> { + let proof_of_possession = if let Some(ref proof_of_possession) = self.proof_of_possession { + proof_of_possession + } else if let Some(ref operator_config) = operator_config { + &operator_config.consensus_proof_of_possession + } else { + return Err(CliError::CommandArgumentError( + "Must provide either --operator-config-file or --proof-of-possession".to_string(), + )); + }; + Ok(proof_of_possession) + } +} + +#[derive(Parser)] +pub struct ValidatorNetworkAddressesArgs { + /// Host and port pair for the validator + /// + /// e.g. 127.0.0.1:6180 + #[clap(long)] + pub(crate) validator_host: Option, + + /// Validator x25519 public network key + #[clap(long, parse(try_from_str = x25519::PublicKey::from_encoded_string))] + pub(crate) validator_network_public_key: Option, + + /// Host and port pair for the fullnode + /// + /// e.g. 127.0.0.1:6180. Optional + #[clap(long)] + pub(crate) full_node_host: Option, + + /// Full node x25519 public network key + #[clap(long, parse(try_from_str = x25519::PublicKey::from_encoded_string))] + pub(crate) full_node_network_public_key: Option, +} + +impl ValidatorNetworkAddressesArgs { + fn get_network_configs<'a>( + &'a self, + operator_config: &'a Option, + ) -> CliTypedResult<( + x25519::PublicKey, + Option, + &'a HostAndPort, + Option<&'a HostAndPort>, + )> { + let validator_network_public_key = + if let Some(public_key) = self.validator_network_public_key { + public_key + } else if let Some(ref operator_config) = operator_config { + operator_config.validator_network_public_key + } else { + return Err(CliError::CommandArgumentError( + "Must provide either --operator-config-file or --validator-network-public-key" + .to_string(), + )); + }; + + let full_node_network_public_key = + if let Some(public_key) = self.full_node_network_public_key { + Some(public_key) + } else if let Some(ref operator_config) = operator_config { + operator_config.full_node_network_public_key + } else { + None + }; + + let validator_host = if let Some(ref host) = self.validator_host { + host + } else if let Some(ref operator_config) = operator_config { + &operator_config.validator_host + } else { + return Err(CliError::CommandArgumentError( + "Must provide either --operator-config-file or --validator-host".to_string(), + )); + }; + + let full_node_host = if let Some(ref host) = self.full_node_host { + Some(host) + } else if let Some(ref operator_config) = operator_config { + operator_config.full_node_host.as_ref() + } else { + None + }; + + Ok(( + validator_network_public_key, + full_node_network_public_key, + validator_host, + full_node_host, + )) + } +} + +#[derive(Copy, Clone, Debug, Serialize)] +pub enum StakePoolType { + Direct, + StakingContract, + Vesting, +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize)] +pub enum StakePoolState { + Active, + Inactive, + PendingActive, + PendingInactive, +} + +#[derive(Debug, Serialize)] +pub struct StakePoolResult { + pub state: StakePoolState, + pub pool_address: AccountAddress, + pub operator_address: AccountAddress, + pub voter_address: AccountAddress, + pub pool_type: StakePoolType, + pub total_stake: u64, + pub commission_percentage: u64, + pub commission_not_yet_unlocked: u64, + pub lockup_expiration_utc_time: DateTime, + pub consensus_public_key: String, + pub validator_network_addresses: Vec, + pub fullnode_network_addresses: Vec, + pub epoch_info: EpochInfo, + #[serde(skip_serializing_if = "Option::is_none")] + pub vesting_contract: Option, +} + +/// Show the stake pool +/// +/// Retrieves the associated stake pool from the multiple types for the given owner address +#[derive(Parser)] +pub struct GetStakePool { + /// The owner address of the stake pool + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub(crate) owner_address: AccountAddress, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand> for GetStakePool { + fn command_name(&self) -> &'static str { + "GetStakePool" + } + + async fn execute(mut self) -> CliTypedResult> { + let owner_address = self.owner_address; + let client = &self.rest_options.client(&self.profile_options)?; + get_stake_pools(client, owner_address).await + } +} + +#[derive(Debug, Serialize)] +pub struct StakePoolPerformance { + current_epoch_successful_proposals: u64, + current_epoch_failed_proposals: u64, + previous_epoch_rewards: Vec, + epoch_info: EpochInfo, +} + +/// Show staking performance of the given staking pool +#[derive(Parser)] +pub struct GetPerformance { + #[clap(flatten)] + pub(crate) pool_address_args: PoolAddressArgs, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[async_trait] +impl CliCommand for GetPerformance { + fn command_name(&self) -> &'static str { + "GetPerformance" + } + + async fn execute(mut self) -> CliTypedResult { + let client = &self.rest_options.client(&self.profile_options)?; + let pool_address = self.pool_address_args.pool_address; + let validator_set = &client + .get_account_resource_bcs::(CORE_CODE_ADDRESS, "0x1::stake::ValidatorSet") + .await? + .into_inner(); + + let mut current_epoch_successful_proposals = 0; + let mut current_epoch_failed_proposals = 0; + let state = get_stake_pool_state(validator_set, &pool_address); + if state == StakePoolState::Active || state == StakePoolState::PendingInactive { + let validator_config = client + .get_account_resource_bcs::( + pool_address, + "0x1::stake::ValidatorConfig", + ) + .await? + .into_inner(); + let validator_performances = &client + .get_account_resource_bcs::( + CORE_CODE_ADDRESS, + "0x1::stake::ValidatorPerformance", + ) + .await? + .into_inner(); + let validator_index = validator_config.validator_index as usize; + current_epoch_successful_proposals = + validator_performances.validators[validator_index].successful_proposals; + current_epoch_failed_proposals = + validator_performances.validators[validator_index].failed_proposals; + }; + + let previous_epoch_rewards = client + .get_account_events( + pool_address, + "0x1::stake::StakePool", + "distribute_rewards_events", + Some(0), + Some(10), + ) + .await + .unwrap() + .into_inner() + .into_iter() + .map(|e: VersionedEvent| { + e.data + .get("rewards_amount") + .unwrap() + .as_str() + .unwrap() + .into() + }) + .collect(); + + Ok(StakePoolPerformance { + current_epoch_successful_proposals, + current_epoch_failed_proposals, + previous_epoch_rewards, + epoch_info: get_epoch_info(client).await?, + }) + } +} + +/// Retrieves all stake pools associated with an account +pub async fn get_stake_pools( + client: &Client, + owner_address: AccountAddress, +) -> CliTypedResult> { + let epoch_info = get_epoch_info(client).await?; + let validator_set = &client + .get_account_resource_bcs::(CORE_CODE_ADDRESS, "0x1::stake::ValidatorSet") + .await? + .into_inner(); + let mut stake_pool_results: Vec = vec![]; + // Add direct stake pool if any. + let direct_stake_pool = get_stake_pool_info( + client, + owner_address, + StakePoolType::Direct, + 0, + 0, + epoch_info.clone(), + validator_set, + None, + ) + .await; + if let Ok(direct_stake_pool) = direct_stake_pool { + stake_pool_results.push(direct_stake_pool); + }; + + // Fetch all stake pools managed via staking contracts. + let staking_contract_pools = get_staking_contract_pools( + client, + owner_address, + StakePoolType::StakingContract, + epoch_info.clone(), + validator_set, + None, + ) + .await; + if let Ok(mut staking_contract_pools) = staking_contract_pools { + stake_pool_results.append(&mut staking_contract_pools); + }; + + // Fetch all stake pools managed via employee vesting accounts. + let vesting_admin_store = client + .get_account_resource_bcs::(owner_address, "0x1::vesting::AdminStore") + .await; + if let Ok(vesting_admin_store) = vesting_admin_store { + let vesting_contracts = vesting_admin_store.into_inner().vesting_contracts; + for vesting_contract in vesting_contracts { + let mut staking_contract_pools = get_staking_contract_pools( + client, + vesting_contract, + StakePoolType::Vesting, + epoch_info.clone(), + validator_set, + Some(vesting_contract), + ) + .await + .unwrap(); + stake_pool_results.append(&mut staking_contract_pools); + } + }; + + Ok(stake_pool_results) +} + +/// Retrieve 0x1::staking_contract related pools +pub async fn get_staking_contract_pools( + client: &Client, + staker_address: AccountAddress, + pool_type: StakePoolType, + epoch_info: EpochInfo, + validator_set: &ValidatorSet, + vesting_contract: Option, +) -> CliTypedResult> { + let mut stake_pool_results: Vec = vec![]; + let staking_contract_store = client + .get_account_resource_bcs::( + staker_address, + "0x1::staking_contract::Store", + ) + .await?; + let staking_contracts = staking_contract_store.into_inner().staking_contracts; + for staking_contract in staking_contracts { + let stake_pool_address = get_stake_pool_info( + client, + staking_contract.value.pool_address, + pool_type, + staking_contract.value.principal, + staking_contract.value.commission_percentage, + epoch_info.clone(), + validator_set, + vesting_contract, + ) + .await + .unwrap(); + stake_pool_results.push(stake_pool_address); + } + Ok(stake_pool_results) +} + +pub async fn get_stake_pool_info( + client: &Client, + pool_address: AccountAddress, + pool_type: StakePoolType, + principal: u64, + commission_percentage: u64, + epoch_info: EpochInfo, + validator_set: &ValidatorSet, + vesting_contract: Option, +) -> CliTypedResult { + let stake_pool = client + .get_account_resource_bcs::(pool_address, "0x1::stake::StakePool") + .await? + .into_inner(); + let validator_config = client + .get_account_resource_bcs::(pool_address, "0x1::stake::ValidatorConfig") + .await? + .into_inner(); + let total_stake = stake_pool.get_total_staked_amount(); + let commission_not_yet_unlocked = (total_stake - principal) * commission_percentage / 100; + let state = get_stake_pool_state(validator_set, &pool_address); + + let consensus_public_key = if validator_config.consensus_public_key.is_empty() { + "".into() + } else { + PublicKey::try_from(&validator_config.consensus_public_key[..]) + .unwrap() + .to_encoded_string() + .unwrap() + }; + Ok(StakePoolResult { + state, + pool_address, + operator_address: stake_pool.operator_address, + voter_address: stake_pool.delegated_voter, + pool_type, + total_stake, + commission_percentage, + commission_not_yet_unlocked, + lockup_expiration_utc_time: Time::new_seconds(stake_pool.locked_until_secs).utc_time, + consensus_public_key, + validator_network_addresses: validator_config + .validator_network_addresses() + .unwrap_or_default(), + fullnode_network_addresses: validator_config + .fullnode_network_addresses() + .unwrap_or_default(), + epoch_info, + vesting_contract, + }) +} + +fn get_stake_pool_state( + validator_set: &ValidatorSet, + pool_address: &AccountAddress, +) -> StakePoolState { + if validator_set.active_validators().contains(pool_address) { + StakePoolState::Active + } else if validator_set + .pending_active_validators() + .contains(pool_address) + { + StakePoolState::PendingActive + } else if validator_set + .pending_inactive_validators() + .contains(pool_address) + { + StakePoolState::PendingInactive + } else { + StakePoolState::Inactive + } +} + +/// Register the current account as a validator +/// +/// This will create a new stake pool for the given account. The voter and operator fields will be +/// defaulted to the stake pool account if not provided. +#[derive(Parser)] +pub struct InitializeValidator { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) operator_config_file_args: OperatorConfigFileArgs, + #[clap(flatten)] + pub(crate) validator_consensus_key_args: ValidatorConsensusKeyArgs, + #[clap(flatten)] + pub(crate) validator_network_addresses_args: ValidatorNetworkAddressesArgs, +} + +#[async_trait] +impl CliCommand for InitializeValidator { + fn command_name(&self) -> &'static str { + "InitializeValidator" + } + + async fn execute(mut self) -> CliTypedResult { + let operator_config = self.operator_config_file_args.load()?; + let consensus_public_key = self + .validator_consensus_key_args + .get_consensus_public_key(&operator_config)?; + let consensus_proof_of_possession = self + .validator_consensus_key_args + .get_consensus_proof_of_possession(&operator_config)?; + let ( + validator_network_public_key, + full_node_network_public_key, + validator_host, + full_node_host, + ) = self + .validator_network_addresses_args + .get_network_configs(&operator_config)?; + let validator_network_addresses = + vec![validator_host.as_network_address(validator_network_public_key)?]; + let full_node_network_addresses = + match (full_node_host.as_ref(), full_node_network_public_key) { + (Some(host), Some(public_key)) => vec![host.as_network_address(public_key)?], + (None, None) => vec![], + _ => { + return Err(CliError::CommandArgumentError( + "If specifying fullnode addresses, both host and public key are required." + .to_string(), + )) + }, + }; + + self.txn_options + .submit_transaction(aptos_stdlib::stake_initialize_validator( + consensus_public_key.to_bytes().to_vec(), + consensus_proof_of_possession.to_bytes().to_vec(), + // BCS encode, so that we can hide the original type + bcs::to_bytes(&validator_network_addresses)?, + bcs::to_bytes(&full_node_network_addresses)?, + )) + .await + .map(|inner| inner.into()) + } +} + +/// Arguments used for operator of the staking pool +#[derive(Parser)] +pub struct OperatorArgs { + #[clap(flatten)] + pub(crate) pool_address_args: OptionalPoolAddressArgs, +} + +impl OperatorArgs { + fn address_fallback_to_profile( + &self, + profile_options: &ProfileOptions, + ) -> CliTypedResult { + if let Some(address) = self.pool_address_args.pool_address { + Ok(address) + } else { + profile_options.account_address() + } + } + + fn address_fallback_to_txn( + &self, + transaction_options: &TransactionOptions, + ) -> CliTypedResult { + if let Some(address) = self.pool_address_args.pool_address { + Ok(address) + } else { + transaction_options.sender_address() + } + } +} + +/// Join the validator set after meeting staking requirements +/// +/// Joining the validator set requires sufficient stake. Once the transaction +/// succeeds, you will join the validator set in the next epoch. +#[derive(Parser)] +pub struct JoinValidatorSet { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, +} + +#[async_trait] +impl CliCommand for JoinValidatorSet { + fn command_name(&self) -> &'static str { + "JoinValidatorSet" + } + + async fn execute(mut self) -> CliTypedResult { + let address = self + .operator_args + .address_fallback_to_txn(&self.txn_options)?; + + self.txn_options + .submit_transaction(aptos_stdlib::stake_join_validator_set(address)) + .await + .map(|inner| inner.into()) + } +} + +/// Leave the validator set +/// +/// Leaving the validator set will require you to have unlocked and withdrawn all stake. After this +/// transaction is successful, you will leave the validator set in the next epoch. +#[derive(Parser)] +pub struct LeaveValidatorSet { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, +} + +#[async_trait] +impl CliCommand for LeaveValidatorSet { + fn command_name(&self) -> &'static str { + "LeaveValidatorSet" + } + + async fn execute(mut self) -> CliTypedResult { + let address = self + .operator_args + .address_fallback_to_txn(&self.txn_options)?; + + self.txn_options + .submit_transaction(aptos_stdlib::stake_leave_validator_set(address)) + .await + .map(|inner| inner.into()) + } +} + +/// Show validator stake information for a specific validator +/// +/// This will show information about a specific validator, given its +/// `--pool-address`. +#[derive(Parser)] +pub struct ShowValidatorStake { + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, +} + +#[async_trait] +impl CliCommand for ShowValidatorStake { + fn command_name(&self) -> &'static str { + "ShowValidatorStake" + } + + async fn execute(mut self) -> CliTypedResult { + let client = self.rest_options.client(&self.profile_options)?; + let address = self + .operator_args + .address_fallback_to_profile(&self.profile_options)?; + let response = client + .get_resource(address, "0x1::stake::StakePool") + .await?; + Ok(response.into_inner()) + } +} + +/// Show validator configuration for a specific validator +/// +/// This will show information about a specific validator, given its +/// `--pool-address`. +#[derive(Parser)] +pub struct ShowValidatorConfig { + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, +} + +#[async_trait] +impl CliCommand for ShowValidatorConfig { + fn command_name(&self) -> &'static str { + "ShowValidatorConfig" + } + + async fn execute(mut self) -> CliTypedResult { + let client = self.rest_options.client(&self.profile_options)?; + let address = self + .operator_args + .address_fallback_to_profile(&self.profile_options)?; + let validator_config: ValidatorConfig = client + .get_account_resource_bcs(address, "0x1::stake::ValidatorConfig") + .await? + .into_inner(); + Ok((&validator_config) + .try_into() + .map_err(|err| CliError::BCS("Validator config", err))?) + } +} + +/// Show validator details of the validator set +/// +/// This will show information about the validators including their voting power, addresses, and +/// public keys. +#[derive(Parser)] +pub struct ShowValidatorSet { + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, +} + +#[async_trait] +impl CliCommand for ShowValidatorSet { + fn command_name(&self) -> &'static str { + "ShowValidatorSet" + } + + async fn execute(mut self) -> CliTypedResult { + let client = self.rest_options.client(&self.profile_options)?; + let validator_set: ValidatorSet = client + .get_account_resource_bcs(CORE_CODE_ADDRESS, "0x1::stake::ValidatorSet") + .await? + .into_inner(); + + ValidatorSetSummary::try_from(&validator_set) + .map_err(|err| CliError::BCS("Validator Set", err)) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct ValidatorSetSummary { + pub scheme: ConsensusScheme, + pub active_validators: Vec, + pub pending_inactive: Vec, + pub pending_active: Vec, + pub total_voting_power: u128, + pub total_joining_power: u128, +} + +impl TryFrom<&ValidatorSet> for ValidatorSetSummary { + type Error = bcs::Error; + + fn try_from(set: &ValidatorSet) -> Result { + Ok(ValidatorSetSummary { + scheme: set.scheme, + active_validators: set + .active_validators + .iter() + .filter_map(|validator| validator.try_into().ok()) + .collect(), + pending_inactive: set + .pending_inactive + .iter() + .filter_map(|validator| validator.try_into().ok()) + .collect(), + pending_active: set + .pending_active + .iter() + .filter_map(|validator| validator.try_into().ok()) + .collect(), + total_voting_power: set.total_voting_power, + total_joining_power: set.total_joining_power, + }) + } +} + +impl From<&ValidatorSetSummary> for ValidatorSet { + fn from(summary: &ValidatorSetSummary) -> Self { + ValidatorSet { + scheme: summary.scheme, + active_validators: summary + .active_validators + .iter() + .map(|validator| validator.into()) + .collect(), + pending_inactive: summary + .pending_inactive + .iter() + .map(|validator| validator.into()) + .collect(), + pending_active: summary + .pending_active + .iter() + .map(|validator| validator.into()) + .collect(), + total_voting_power: summary.total_voting_power, + total_joining_power: summary.total_joining_power, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct ValidatorInfoSummary { + // The validator's account address. AccountAddresses are initially derived from the account + // auth pubkey; however, the auth key can be rotated, so one should not rely on this + // initial property. + pub account_address: AccountAddress, + // Voting power of this validator + consensus_voting_power: u64, + // Validator config + config: ValidatorConfigSummary, +} + +impl TryFrom<&ValidatorInfo> for ValidatorInfoSummary { + type Error = bcs::Error; + + fn try_from(info: &ValidatorInfo) -> Result { + let config = info.config(); + let config = ValidatorConfig { + consensus_public_key: config.consensus_public_key.to_bytes().to_vec(), + validator_network_addresses: config.validator_network_addresses.clone(), + fullnode_network_addresses: config.fullnode_network_addresses.clone(), + validator_index: config.validator_index, + }; + Ok(ValidatorInfoSummary { + account_address: info.account_address, + consensus_voting_power: info.consensus_voting_power(), + config: ValidatorConfigSummary::try_from(&config)?, + }) + } +} + +impl From<&ValidatorInfoSummary> for ValidatorInfo { + fn from(summary: &ValidatorInfoSummary) -> Self { + let config = &summary.config; + ValidatorInfo::new( + summary.account_address, + summary.consensus_voting_power, + aptos_types::validator_config::ValidatorConfig::new( + PublicKey::from_encoded_string(&config.consensus_public_key).unwrap(), + bcs::to_bytes(&config.validator_network_addresses).unwrap(), + bcs::to_bytes(&config.fullnode_network_addresses).unwrap(), + config.validator_index, + ), + ) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)] +pub struct ValidatorConfig { + pub consensus_public_key: Vec, + pub validator_network_addresses: Vec, + pub fullnode_network_addresses: Vec, + pub validator_index: u64, +} + +impl ValidatorConfig { + pub fn new( + consensus_public_key: Vec, + validator_network_addresses: Vec, + fullnode_network_addresses: Vec, + validator_index: u64, + ) -> Self { + ValidatorConfig { + consensus_public_key, + validator_network_addresses, + fullnode_network_addresses, + validator_index, + } + } + + pub fn fullnode_network_addresses(&self) -> Result, bcs::Error> { + bcs::from_bytes(&self.fullnode_network_addresses) + } + + pub fn validator_network_addresses(&self) -> Result, bcs::Error> { + bcs::from_bytes(&self.validator_network_addresses) + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Serialize)] +pub struct ValidatorConfigSummary { + pub consensus_public_key: String, + /// This is an bcs serialized `Vec` + pub validator_network_addresses: Vec, + /// This is an bcs serialized `Vec` + pub fullnode_network_addresses: Vec, + pub validator_index: u64, +} + +impl TryFrom<&ValidatorConfig> for ValidatorConfigSummary { + type Error = bcs::Error; + + fn try_from(config: &ValidatorConfig) -> Result { + let consensus_public_key = if config.consensus_public_key.is_empty() { + "".into() + } else { + PublicKey::try_from(&config.consensus_public_key[..]) + .unwrap() + .to_encoded_string() + .unwrap() + }; + Ok(ValidatorConfigSummary { + consensus_public_key, + // TODO: We should handle if some of these are not parsable + validator_network_addresses: config.validator_network_addresses()?, + fullnode_network_addresses: config.fullnode_network_addresses()?, + validator_index: config.validator_index, + }) + } +} + +impl From<&ValidatorConfigSummary> for ValidatorConfig { + fn from(summary: &ValidatorConfigSummary) -> Self { + let consensus_public_key = if summary.consensus_public_key.is_empty() { + vec![] + } else { + summary.consensus_public_key.as_bytes().to_vec() + }; + ValidatorConfig { + consensus_public_key, + validator_network_addresses: bcs::to_bytes(&summary.validator_network_addresses) + .unwrap(), + fullnode_network_addresses: bcs::to_bytes(&summary.fullnode_network_addresses).unwrap(), + validator_index: summary.validator_index, + } + } +} + +const MAX_WAIT_S: u64 = 30; +const WAIT_INTERVAL_MS: u64 = 100; +const TESTNET_FOLDER: &str = "testnet"; + +/// Run local testnet +/// +/// This local testnet will run it's own Genesis and run as a single node +/// network locally. Optionally, a faucet can be added for minting APT coins. +#[derive(Parser)] +pub struct RunLocalTestnet { + /// An overridable config template for the test node + /// + /// If provided, the config will be used, and any needed configuration for the local testnet + /// will override the config's values + #[clap(long, parse(from_os_str))] + config_path: Option, + + /// The directory to save all files for the node + /// + /// Defaults to .aptos/testnet + #[clap(long, parse(from_os_str))] + test_dir: Option, + + /// Random seed for key generation in test mode + /// + /// This allows you to have deterministic keys for testing + #[clap(long, parse(try_from_str = FromHex::from_hex))] + seed: Option<[u8; 32]>, + + /// Clean the state and start with a new chain at genesis + /// + /// This will wipe the aptosdb in `test-dir` to remove any incompatible changes, and start + /// the chain fresh. Note, that you will need to publish the module again and distribute funds + /// from the faucet accordingly + #[clap(long)] + force_restart: bool, + + /// Run a faucet alongside the node + /// + /// Allows you to run a faucet alongside the node to create and fund accounts for testing + #[clap(long)] + with_faucet: bool, + + /// Port to run the faucet on + /// + /// When running, you'll be able to use the faucet at `http://localhost:/mint` e.g. + /// `http//localhost:8080/mint` + #[clap(long, default_value = "8081")] + faucet_port: u16, + + /// Disable the delegation of faucet minting to a dedicated account + #[clap(long)] + do_not_delegate: bool, + + #[clap(flatten)] + prompt_options: PromptOptions, +} + +#[async_trait] +impl CliCommand<()> for RunLocalTestnet { + fn command_name(&self) -> &'static str { + "RunLocalTestnet" + } + + async fn execute(mut self) -> CliTypedResult<()> { + let rng = self + .seed + .map(StdRng::from_seed) + .unwrap_or_else(StdRng::from_entropy); + + let global_config = GlobalConfig::load()?; + let test_dir = match self.test_dir { + Some(test_dir) => test_dir, + None => global_config + .get_config_location(ConfigSearchMode::CurrentDirAndParents)? + .join(TESTNET_FOLDER), + }; + + // Remove the current test directory and start with a new node + if self.force_restart && test_dir.exists() { + prompt_yes_with_override( + "Are you sure you want to delete the existing chain?", + self.prompt_options, + )?; + std::fs::remove_dir_all(test_dir.as_path()).map_err(|err| { + CliError::IO(format!("Failed to delete {}", test_dir.display()), err) + })?; + } + + // Spawn the node in a separate thread + let config_path = self.config_path.clone(); + let test_dir_copy = test_dir.clone(); + let node_thread_handle = thread::spawn(move || { + let result = aptos_node::setup_test_environment_and_start_node( + config_path, + Some(test_dir_copy), + false, + false, + aptos_cached_packages::head_release_bundle(), + rng, + ); + eprintln!("Node stopped unexpectedly {:#?}", result); + }); + + // Run faucet if selected + let maybe_faucet_future = if self.with_faucet { + let max_wait = Duration::from_secs(MAX_WAIT_S); + let wait_interval = Duration::from_millis(WAIT_INTERVAL_MS); + + // Load the config to get the rest port + let config_path = test_dir.join("0").join("node.yaml"); + + // We have to wait for the node to be configured above in the other thread + let mut config = None; + let start = Instant::now(); + while start.elapsed() < max_wait { + if let Ok(loaded_config) = NodeConfig::load_from_path(&config_path) { + config = Some(loaded_config); + break; + } + tokio::time::sleep(wait_interval).await; + } + + // Retrieve the port from the local node + let port = if let Some(config) = config { + config.api.address.port() + } else { + return Err(CliError::UnexpectedError( + "Failed to find node configuration to start faucet".to_string(), + )); + }; + + // Check that the REST API is ready + let rest_url = Url::parse(&format!("http://localhost:{}", port)).map_err(|err| { + CliError::UnexpectedError(format!("Failed to parse localhost URL {}", err)) + })?; + let rest_client = aptos_rest_client::Client::new(rest_url.clone()); + let start = Instant::now(); + let mut started_successfully = false; + + while start.elapsed() < max_wait { + if rest_client.get_index().await.is_ok() { + started_successfully = true; + break; + } + tokio::time::sleep(wait_interval).await + } + + if !started_successfully { + return Err(CliError::UnexpectedError(format!( + "Local node at {} did not start up before faucet", + rest_url + ))); + } + + // Build the config for the faucet service. + let faucet_config = RunConfig::build_for_cli( + rest_url, + self.faucet_port, + FunderKeyEnum::KeyFile(test_dir.join("mint.key")), + self.do_not_delegate, + None, + ); + + // Start the faucet + Some(faucet_config.run().map(|result| { + eprintln!("Faucet stopped unexpectedly {:#?}", result); + })) + } else { + None + }; + + // Collect futures that should never end. + let mut futures: Vec + Send>>> = Vec::new(); + + // This future just waits for the node thread. + let node_future = async move { + loop { + if node_thread_handle.is_finished() { + return; + } + tokio::time::sleep(Duration::from_millis(500)).await; + } + }; + + // Wait for all the futures. We should never get past this point unless + // something goes wrong or the user signals for the process to end. + futures.push(Box::pin(node_future)); + if let Some(faucet_future) = maybe_faucet_future { + futures.push(Box::pin(faucet_future)); + } + futures::future::select_all(futures).await; + + Err(CliError::UnexpectedError( + "One of the components stopped unexpectedly".to_string(), + )) + } +} + +/// Update consensus key for the validator node +/// +/// This will take effect in the next epoch +#[derive(Parser)] +pub struct UpdateConsensusKey { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, + #[clap(flatten)] + pub(crate) operator_config_file_args: OperatorConfigFileArgs, + #[clap(flatten)] + pub(crate) validator_consensus_key_args: ValidatorConsensusKeyArgs, +} + +#[async_trait] +impl CliCommand for UpdateConsensusKey { + fn command_name(&self) -> &'static str { + "UpdateConsensusKey" + } + + async fn execute(mut self) -> CliTypedResult { + let address = self + .operator_args + .address_fallback_to_txn(&self.txn_options)?; + + let operator_config = self.operator_config_file_args.load()?; + let consensus_public_key = self + .validator_consensus_key_args + .get_consensus_public_key(&operator_config)?; + let consensus_proof_of_possession = self + .validator_consensus_key_args + .get_consensus_proof_of_possession(&operator_config)?; + self.txn_options + .submit_transaction(aptos_stdlib::stake_rotate_consensus_key( + address, + consensus_public_key.to_bytes().to_vec(), + consensus_proof_of_possession.to_bytes().to_vec(), + )) + .await + .map(|inner| inner.into()) + } +} + +/// Update the current validator's network and fullnode network addresses +/// +/// This will take effect in the next epoch +#[derive(Parser)] +pub struct UpdateValidatorNetworkAddresses { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, + #[clap(flatten)] + pub(crate) operator_args: OperatorArgs, + #[clap(flatten)] + pub(crate) operator_config_file_args: OperatorConfigFileArgs, + #[clap(flatten)] + pub(crate) validator_network_addresses_args: ValidatorNetworkAddressesArgs, +} + +#[async_trait] +impl CliCommand for UpdateValidatorNetworkAddresses { + fn command_name(&self) -> &'static str { + "UpdateValidatorNetworkAddresses" + } + + async fn execute(mut self) -> CliTypedResult { + let address = self + .operator_args + .address_fallback_to_txn(&self.txn_options)?; + + let validator_config = self.operator_config_file_args.load()?; + let ( + validator_network_public_key, + full_node_network_public_key, + validator_host, + full_node_host, + ) = self + .validator_network_addresses_args + .get_network_configs(&validator_config)?; + let validator_network_addresses = + vec![validator_host.as_network_address(validator_network_public_key)?]; + let full_node_network_addresses = + match (full_node_host.as_ref(), full_node_network_public_key) { + (Some(host), Some(public_key)) => vec![host.as_network_address(public_key)?], + (None, None) => vec![], + _ => { + return Err(CliError::CommandArgumentError( + "If specifying fullnode addresses, both host and public key are required." + .to_string(), + )) + }, + }; + + self.txn_options + .submit_transaction(aptos_stdlib::stake_update_network_and_fullnode_addresses( + address, + // BCS encode, so that we can hide the original type + bcs::to_bytes(&validator_network_addresses)?, + bcs::to_bytes(&full_node_network_addresses)?, + )) + .await + .map(|inner| inner.into()) + } +} + +/// Analyze the performance of one or more validators +#[derive(Parser)] +pub struct AnalyzeValidatorPerformance { + /// First epoch to analyze + /// + /// Defaults to the first epoch + #[clap(long, default_value = "-2")] + pub start_epoch: i64, + + /// Last epoch to analyze + /// + /// Defaults to the latest epoch + #[clap(long)] + pub end_epoch: Option, + + /// Analyze mode for the validator: [All, DetailedEpochTable, ValidatorHealthOverTime, NetworkHealthOverTime] + #[clap(arg_enum, long)] + pub(crate) analyze_mode: AnalyzeMode, + + /// Filter of stake pool addresses to analyze + /// + /// Defaults to all stake pool addresses + #[clap(long, multiple_values = true, parse(try_from_str=crate::common::types::load_account_arg))] + pub pool_addresses: Vec, + + #[clap(flatten)] + pub(crate) rest_options: RestOptions, + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, +} + +#[derive(PartialEq, Eq, clap::ArgEnum, Clone)] +pub enum AnalyzeMode { + /// Print all other modes simultaneously + All, + /// For each epoch, print a detailed table containing performance + /// of each of the validators. + DetailedEpochTable, + /// For each validator, summarize it's performance in an epoch into + /// one of the predefined reliability buckets, + /// and prints it's performance across epochs. + ValidatorHealthOverTime, + /// For each epoch summarize how many validators were in + /// each of the reliability buckets. + NetworkHealthOverTime, +} + +#[async_trait] +impl CliCommand<()> for AnalyzeValidatorPerformance { + fn command_name(&self) -> &'static str { + "AnalyzeValidatorPerformance" + } + + async fn execute(mut self) -> CliTypedResult<()> { + let client = self.rest_options.client(&self.profile_options)?; + + let epochs = + FetchMetadata::fetch_new_block_events(&client, Some(self.start_epoch), self.end_epoch) + .await?; + let mut stats = HashMap::new(); + + let print_detailed = self.analyze_mode == AnalyzeMode::DetailedEpochTable + || self.analyze_mode == AnalyzeMode::All; + for epoch_info in epochs { + let mut epoch_stats = + AnalyzeValidators::analyze(&epoch_info.blocks, &epoch_info.validators); + if !self.pool_addresses.is_empty() { + let mut filtered_stats: HashMap = HashMap::new(); + for pool_address in &self.pool_addresses { + filtered_stats.insert( + *pool_address, + *epoch_stats.validator_stats.get(pool_address).unwrap(), + ); + } + epoch_stats.validator_stats = filtered_stats; + } + if print_detailed { + println!( + "Detailed table for {}epoch {}:", + if epoch_info.partial { "partial " } else { "" }, + epoch_info.epoch + ); + AnalyzeValidators::print_detailed_epoch_table( + &epoch_stats, + Some(( + "voting_power", + &epoch_info + .validators + .iter() + .map(|v| (v.address, v.voting_power.to_string())) + .collect::>(), + )), + true, + ); + } + if !epoch_info.partial { + stats.insert(epoch_info.epoch, epoch_stats); + } + } + + if stats.is_empty() { + println!("No data found for given input"); + return Ok(()); + } + let total_stats = stats.values().cloned().reduce(|a, b| a + b).unwrap(); + if print_detailed { + println!( + "Detailed table for all epochs [{}, {}]:", + stats.keys().min().unwrap(), + stats.keys().max().unwrap() + ); + AnalyzeValidators::print_detailed_epoch_table(&total_stats, None, true); + } + let all_validators: Vec<_> = total_stats.validator_stats.keys().cloned().collect(); + if self.analyze_mode == AnalyzeMode::ValidatorHealthOverTime + || self.analyze_mode == AnalyzeMode::All + { + println!( + "Validator health over epochs [{}, {}]:", + stats.keys().min().unwrap(), + stats.keys().max().unwrap() + ); + AnalyzeValidators::print_validator_health_over_time(&stats, &all_validators, None); + } + if self.analyze_mode == AnalyzeMode::NetworkHealthOverTime + || self.analyze_mode == AnalyzeMode::All + { + println!( + "Network health over epochs [{}, {}]:", + stats.keys().min().unwrap(), + stats.keys().max().unwrap() + ); + AnalyzeValidators::print_network_health_over_time(&stats, &all_validators); + } + Ok(()) + } +} + +/// Bootstrap AptosDB from a backup +/// +/// Enables users to load from a backup to catch their node's DB up to a known state. +#[derive(Parser)] +pub struct BootstrapDbFromBackup { + /// Config file for the source backup + /// + /// This file configures if we should use local files or cloud storage, and how to access + /// the backup. + #[clap(long, parse(from_os_str))] + config_path: PathBuf, + + /// Target database directory + /// + /// The directory to create the AptosDB with snapshots and transactions from the backup. + /// The data folder can later be used to start an Aptos node. e.g. /opt/aptos/data/db + #[clap(long = "target-db-dir", parse(from_os_str))] + pub db_dir: PathBuf, + + #[clap(flatten)] + pub metadata_cache_opt: MetadataCacheOpt, + + #[clap(flatten)] + pub concurrent_downloads: ConcurrentDownloadsOpt, + + #[clap(flatten)] + pub replay_concurrency_level: ReplayConcurrencyLevelOpt, +} + +#[async_trait] +impl CliCommand<()> for BootstrapDbFromBackup { + fn command_name(&self) -> &'static str { + "BootstrapDbFromBackup" + } + + async fn execute(self) -> CliTypedResult<()> { + let opt = RestoreCoordinatorOpt { + metadata_cache_opt: self.metadata_cache_opt, + replay_all: false, + ledger_history_start_version: None, + skip_epoch_endings: false, + }; + let global_opt = GlobalRestoreOpt { + dry_run: false, + db_dir: Some(self.db_dir), + target_version: None, + trusted_waypoints: Default::default(), + rocksdb_opt: RocksdbOpt::default(), + concurrent_downloads: self.concurrent_downloads, + replay_concurrency_level: self.replay_concurrency_level, + } + .try_into()?; + let storage = Arc::new(CommandAdapter::new( + CommandAdapterConfig::load_from_file(&self.config_path).await?, + )); + + // hack: get around this error, related to use of `async_trait`: + // error: higher-ranked lifetime error + // ... + // = note: could not prove for<'r, 's> Pin>>>: CoerceUnsized> + std::marker::Send + 's)>>> + tokio::task::spawn_blocking(|| { + let runtime = tokio::runtime::Runtime::new().unwrap(); + runtime.block_on(RestoreCoordinator::new(opt, global_opt, storage).run()) + }) + .await + .unwrap()?; + Ok(()) + } +} + +/// Checks the network connectivity of a node +/// +/// Checks network connectivity by dialing the node and attempting +/// to establish a connection with a noise handshake. +#[derive(Parser)] +pub struct CheckNetworkConnectivity { + /// `NetworkAddress` of remote server interface. + /// Examples include: + /// - `/dns/example.com/tcp/6180/noise-ik//handshake/1` + /// - `/ip4//tcp/6182/noise-ik//handshake/0` + #[clap(long, value_parser = validate_address)] + pub address: NetworkAddress, + + /// `ChainId` of remote server. + /// Examples include: + /// - Chain numbers, e.g., `2`, `3` and `25`. + /// - Chain names, e.g., `devnet`, `testnet`, `mainnet` and `testing` (for local test networks). + #[clap(long)] + pub chain_id: ChainId, + + #[clap(flatten)] + pub handshake_args: HandshakeArgs, +} + +#[async_trait] +impl CliCommand for CheckNetworkConnectivity { + fn command_name(&self) -> &'static str { + "CheckNetworkConnectivity" + } + + async fn execute(self) -> CliTypedResult { + // Create the check endpoint args for the checker + let node_address_args = NodeAddressArgs { + address: self.address, + chain_id: self.chain_id, + }; + let check_endpoint_args = CheckEndpointArgs { + node_address_args, + handshake_args: self.handshake_args, + }; + + // Check the endpoint + aptos_network_checker::check_endpoint(&check_endpoint_args, None) + .await + .map_err(|error| CliError::UnexpectedError(error.to_string())) + } +} + +/// Show epoch information +/// +/// Displays the current epoch, the epoch length, and the estimated time of the next epoch +#[derive(Parser)] +pub struct ShowEpochInfo { + #[clap(flatten)] + pub(crate) profile_options: ProfileOptions, + #[clap(flatten)] + pub(crate) rest_options: RestOptions, +} + +#[async_trait] +impl CliCommand for ShowEpochInfo { + fn command_name(&self) -> &'static str { + "ShowEpochInfo" + } + + async fn execute(self) -> CliTypedResult { + let client = &self.rest_options.client(&self.profile_options)?; + get_epoch_info(client).await + } +} + +async fn get_epoch_info(client: &Client) -> CliTypedResult { + let (block_resource, state): (BlockResource, State) = client + .get_account_resource_bcs(CORE_CODE_ADDRESS, "0x1::block::BlockResource") + .await? + .into_parts(); + let reconfig_resource: ConfigurationResource = client + .get_account_resource_at_version_bcs( + CORE_CODE_ADDRESS, + "0x1::reconfiguration::Configuration", + state.version, + ) + .await? + .into_inner(); + + let epoch_interval = block_resource.epoch_interval(); + let epoch_interval_secs = epoch_interval / SECS_TO_MICROSECS; + let last_reconfig = reconfig_resource.last_reconfiguration_time(); + Ok(EpochInfo { + epoch: reconfig_resource.epoch(), + epoch_interval_secs, + current_epoch_start_time: Time::new_micros(last_reconfig), + next_epoch_start_time: Time::new_micros(last_reconfig + epoch_interval), + }) +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct EpochInfo { + epoch: u64, + epoch_interval_secs: u64, + current_epoch_start_time: Time, + next_epoch_start_time: Time, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Time { + unix_time: u128, + utc_time: DateTime, +} + +impl Time { + pub fn new(time: Duration) -> Self { + let date_time = + NaiveDateTime::from_timestamp_opt(time.as_secs() as i64, time.subsec_nanos()).unwrap(); + let utc_time = DateTime::from_utc(date_time, Utc); + // TODO: Allow configurable time zone + Self { + unix_time: time.as_micros(), + utc_time, + } + } + + pub fn new_micros(microseconds: u64) -> Self { + Self::new(Duration::from_micros(microseconds)) + } + + pub fn new_seconds(seconds: u64) -> Self { + Self::new(Duration::from_secs(seconds)) + } +} + +#[cfg(test)] +mod tests { + use crate::{CliResult, Tool}; + use clap::Parser; + + // TODO: there have to be cleaner ways to test things. Maybe a CLI test framework? + + #[tokio::test] + // Verifies basic properties about the network connectivity checker + async fn test_check_network_connectivity() { + // Verify the help function works + let args = &["movement", "node", "check-network-connectivity", "--help"]; + let help_message = run_tool_with_args(args).await.unwrap_err(); + assert_contains(help_message, "USAGE:"); // We expect the command to return USAGE info + + // Verify that an invalid address will return an error + let args = &[ + "movement", + "node", + "check-network-connectivity", + "--address", + "invalid-address", + "--chain-id", + "mainnet", + ]; + let error_message = run_tool_with_args(args).await.unwrap_err(); + assert_contains(error_message, "Invalid address"); + + // Verify that an invalid chain-id will return an error + let args = &["movement", "node", "check-network-connectivity", "--address", "/ip4/34.70.116.169/tcp/6182/noise-ik/0x249f3301db104705652e0a0c471b46d13172b2baf14e31f007413f3baee46b0c/handshake/0", "--chain-id", "invalid-chain"]; + let error_message = run_tool_with_args(args).await.unwrap_err(); + assert_contains(error_message, "Invalid value"); + + // Verify that a failure to connect will return a timeout + let args = &["movement", "node", "check-network-connectivity", "--address", "/ip4/31.71.116.169/tcp/0001/noise-ik/0x249f3301db104705652e0a0c471b46d13172b2baf14e31f007413f3baee46b0c/handshake/0", "--chain-id", "testnet"]; + let error_message = run_tool_with_args(args).await.unwrap_err(); + assert_contains(error_message, "Timed out while checking endpoint"); + } + + async fn run_tool_with_args(args: &[&str]) -> CliResult { + let tool: Tool = Tool::try_parse_from(args).map_err(|msg| msg.to_string())?; + tool.execute().await + } + + fn assert_contains(message: String, expected_string: &str) { + if !message.contains(expected_string) { + panic!( + "Expected message to contain {:?}, but it did not! Message: {:?}", + expected_string, message + ); + } + } +} diff --git a/m1/movement/src/op/key.rs b/m1/movement/src/op/key.rs new file mode 100644 index 00000000..aac2d31f --- /dev/null +++ b/m1/movement/src/op/key.rs @@ -0,0 +1,396 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{ + account_address_from_public_key, CliError, CliTypedResult, EncodingOptions, + EncodingType, KeyType, RngArgs, SaveFile, + }, + utils::{ + append_file_extension, check_if_file_exists, generate_vanity_account_ed25519, + write_to_file, + }, + }, + CliCommand, CliResult, +}; +use aptos_config::config::{Peer, PeerRole}; +use aptos_crypto::{bls12381, ed25519, x25519, PrivateKey, ValidCryptoMaterial}; +use aptos_genesis::config::HostAndPort; +use aptos_types::account_address::{ + create_multisig_account_address, from_identity_public_key, AccountAddress, +}; +use async_trait::async_trait; +use clap::{Parser, Subcommand}; +use std::{ + collections::{HashMap, HashSet}, + path::{Path, PathBuf}, +}; + +pub const PUBLIC_KEY_EXTENSION: &str = "pub"; + +/// Tool for generating, inspecting, and interacting with keys +/// +/// This tool allows users to generate and extract related information +/// with all key types used on the Aptos blockchain. +#[derive(Debug, Subcommand)] +pub enum KeyTool { + Generate(GenerateKey), + ExtractPeer(ExtractPeer), +} + +impl KeyTool { + pub async fn execute(self) -> CliResult { + match self { + KeyTool::Generate(tool) => tool.execute_serialized().await, + KeyTool::ExtractPeer(tool) => tool.execute_serialized().await, + } + } +} + +/// Extract full peer information for an upstream peer +/// +/// This command builds a YAML blob that can be copied into a user's network configuration. +/// A host is required to build the network address used for the connection, and the +/// network key is required to identify the peer. +/// +/// A `private-network-key` or `public-network-key` can be given encoded on the command line, or +/// a `private-network-key-file` or a `public-network-key-file` can be given to read from. +/// The `output-file` will be a YAML serialized peer information for use in network config. +#[derive(Debug, Parser)] +pub struct ExtractPeer { + /// Host and port of the full node + /// + /// e.g. 127.0.0.1:6180 or my-awesome-dns.com:6180 + #[clap(long)] + pub(crate) host: HostAndPort, + + #[clap(flatten)] + pub(crate) network_key_input_options: NetworkKeyInputOptions, + #[clap(flatten)] + pub(crate) output_file_options: SaveFile, + #[clap(flatten)] + pub(crate) encoding_options: EncodingOptions, +} + +#[async_trait] +impl CliCommand> for ExtractPeer { + fn command_name(&self) -> &'static str { + "ExtractPeer" + } + + async fn execute(self) -> CliTypedResult> { + // Load key based on public or private + let public_key = self + .network_key_input_options + .extract_public_network_key(self.encoding_options.encoding)?; + + // Check output file exists + self.output_file_options.check_file()?; + + // Build peer info + let peer_id = from_identity_public_key(public_key); + let mut public_keys = HashSet::new(); + public_keys.insert(public_key); + + let address = self.host.as_network_address(public_key).map_err(|err| { + CliError::UnexpectedError(format!("Failed to build network address: {}", err)) + })?; + + let peer = Peer::new(vec![address], public_keys, PeerRole::Upstream); + + let mut map = HashMap::new(); + map.insert(peer_id, peer); + + // Save to file + let yaml = serde_yaml::to_string(&map) + .map_err(|err| CliError::UnexpectedError(err.to_string()))?; + self.output_file_options + .save_to_file("Extracted peer", yaml.as_bytes())?; + Ok(map) + } +} + +#[derive(Debug, Default, Parser)] +pub struct NetworkKeyInputOptions { + /// x25519 Private key input file name + #[clap(long, group = "network_key_input", parse(from_os_str))] + private_network_key_file: Option, + + /// x25519 Private key encoded in a type as shown in `encoding` + #[clap(long, group = "network_key_input")] + private_network_key: Option, + + /// x25519 Public key input file name + #[clap(long, group = "network_key_input", parse(from_os_str))] + public_network_key_file: Option, + + /// x25519 Public key encoded in a type as shown in `encoding` + #[clap(long, group = "network_key_input")] + public_network_key: Option, +} + +impl NetworkKeyInputOptions { + pub fn from_private_key_file(file: PathBuf) -> Self { + Self { + private_network_key_file: Some(file), + private_network_key: None, + public_network_key_file: None, + public_network_key: None, + } + } + + pub fn extract_public_network_key( + self, + encoding: EncodingType, + ) -> CliTypedResult { + // The grouping above prevents there from being more than one, but just in case + match (self.public_network_key, self.public_network_key_file, self.private_network_key, self.private_network_key_file){ + (Some(public_network_key), None, None, None) => encoding.decode_key("--public-network-key", public_network_key.as_bytes().to_vec()), + (None, Some(public_network_key_file),None, None) => encoding.load_key("--public-network-key-file", public_network_key_file.as_path()), + (None, None, Some(private_network_key), None) => { + let private_network_key: x25519::PrivateKey = encoding.decode_key("--private-network-key", private_network_key.as_bytes().to_vec())?; + Ok(private_network_key.public_key()) + }, + (None, None, None, Some(private_network_key_file)) => { + let private_network_key: x25519::PrivateKey = encoding.load_key("--private-network-key-file", private_network_key_file.as_path())?; + Ok(private_network_key.public_key()) + }, + _ => Err(CliError::CommandArgumentError("Must provide exactly one of [--public-network-key, --public-network-key-file, --private-network-key, --private-network-key-file]".to_string())) + } + } +} + +/// Generates a `x25519` or `ed25519` key. +/// +/// This can be used for generating an identity. Two files will be created +/// `output_file` and `output_file.pub`. `output_file` will contain the private +/// key encoded with the `encoding` and `output_file.pub` will contain the public +/// key encoded with the `encoding`. +#[derive(Debug, Parser)] +pub struct GenerateKey { + /// Key type to generate. Must be one of [x25519, ed25519, bls12381] + #[clap(long, default_value_t = KeyType::Ed25519)] + pub(crate) key_type: KeyType, + /// Vanity prefix that resultant account address should start with, e.g. 0xaceface or d00d. Each + /// additional character multiplies by a factor of 16 the computational difficulty associated + /// with generating an address, so try out shorter prefixes first and be prepared to wait for + /// longer ones + #[clap(long)] + pub vanity_prefix: Option, + /// Use this flag when vanity prefix is for a multisig account. This mines a private key for + /// a single signer account that can, as its first transaction, create a multisig account with + /// the given vanity prefix + #[clap(long)] + pub vanity_multisig: bool, + #[clap(flatten)] + pub rng_args: RngArgs, + #[clap(flatten)] + pub(crate) save_params: SaveKey, +} + +#[async_trait] +impl CliCommand> for GenerateKey { + fn command_name(&self) -> &'static str { + "GenerateKey" + } + + async fn execute(self) -> CliTypedResult> { + if self.vanity_prefix.is_some() && !matches!(self.key_type, KeyType::Ed25519) { + return Err(CliError::CommandArgumentError(format!( + "Vanity prefixes are only accepted for {} keys", + KeyType::Ed25519 + ))); + } + if self.vanity_multisig && self.vanity_prefix.is_none() { + return Err(CliError::CommandArgumentError( + "No vanity prefix provided".to_string(), + )); + } + self.save_params.check_key_file()?; + let mut keygen = self.rng_args.key_generator()?; + match self.key_type { + KeyType::X25519 => { + let private_key = keygen.generate_x25519_private_key().map_err(|err| { + CliError::UnexpectedError(format!( + "Failed to convert ed25519 to x25519 {:?}", + err + )) + })?; + self.save_params.save_key(&private_key, "x25519") + }, + KeyType::Ed25519 => { + // If no vanity prefix specified, generate a standard Ed25519 private key. + let private_key = if self.vanity_prefix.is_none() { + keygen.generate_ed25519_private_key() + } else { + // If a vanity prefix is specified, generate vanity Ed25519 account from it. + generate_vanity_account_ed25519( + self.vanity_prefix.clone().unwrap().as_str(), + self.vanity_multisig, + )? + }; + // Store CLI result from key save operation, to append vanity address(es) if needed. + let mut result_map = self.save_params.save_key(&private_key, "ed25519").unwrap(); + if self.vanity_prefix.is_some() { + let account_address = account_address_from_public_key( + &ed25519::Ed25519PublicKey::from(&private_key), + ); + // Store account address in a PathBuf so it can be displayed in CLI result. + result_map.insert( + "Account Address:", + PathBuf::from(account_address.to_hex_literal()), + ); + if self.vanity_multisig { + let multisig_account_address = + create_multisig_account_address(account_address, 0); + result_map.insert( + "Multisig Account Address:", + PathBuf::from(multisig_account_address.to_hex_literal()), + ); + } + } + return Ok(result_map); + }, + KeyType::Bls12381 => { + let private_key = keygen.generate_bls12381_private_key(); + self.save_params.save_bls_key(&private_key, "bls12381") + }, + } + } +} + +impl GenerateKey { + /// A test friendly typed key generation for x25519 keys. + pub async fn generate_x25519( + encoding: EncodingType, + key_file: &Path, + ) -> CliTypedResult<(x25519::PrivateKey, x25519::PublicKey)> { + let args = format!( + "generate --key-type {key_type:?} --output-file {key_file} --encoding {encoding:?} --assume-yes", + key_type = KeyType::X25519, + key_file = key_file.display(), + encoding = encoding, + ); + let command = GenerateKey::parse_from(args.split_whitespace()); + command.execute().await?; + Ok(( + encoding.load_key("private_key", key_file)?, + encoding.load_key( + "public_key", + &append_file_extension(key_file, PUBLIC_KEY_EXTENSION)?, + )?, + )) + } + + /// A test friendly typed key generation for e25519 keys. + pub async fn generate_ed25519( + encoding: EncodingType, + key_file: &Path, + ) -> CliTypedResult<(ed25519::Ed25519PrivateKey, ed25519::Ed25519PublicKey)> { + let args = format!( + "generate --key-type {key_type:?} --output-file {key_file} --encoding {encoding:?} --assume-yes", + key_type = KeyType::Ed25519, + key_file = key_file.display(), + encoding = encoding, + ); + let command = GenerateKey::parse_from(args.split_whitespace()); + command.execute().await?; + Ok(( + encoding.load_key("private_key", key_file)?, + encoding.load_key( + "public_key", + &append_file_extension(key_file, PUBLIC_KEY_EXTENSION)?, + )?, + )) + } +} + +#[derive(Debug, Parser)] +pub struct SaveKey { + #[clap(flatten)] + pub(crate) file_options: SaveFile, + #[clap(flatten)] + pub(crate) encoding_options: EncodingOptions, +} + +impl SaveKey { + /// Public key file name + fn public_key_file(&self) -> CliTypedResult { + append_file_extension( + self.file_options.output_file.as_path(), + PUBLIC_KEY_EXTENSION, + ) + } + + /// Public key file name + fn proof_of_possession_file(&self) -> CliTypedResult { + append_file_extension(self.file_options.output_file.as_path(), "pop") + } + + /// Check if the key file exists already + pub fn check_key_file(&self) -> CliTypedResult<()> { + // Check if file already exists + self.file_options.check_file()?; + check_if_file_exists(&self.public_key_file()?, self.file_options.prompt_options) + } + + /// Saves a key to a file encoded in a string + pub fn save_key( + self, + key: &Key, + key_name: &'static str, + ) -> CliTypedResult> { + let encoded_private_key = self.encoding_options.encoding.encode_key(key_name, key)?; + let encoded_public_key = self + .encoding_options + .encoding + .encode_key(key_name, &key.public_key())?; + + // Write private and public keys to files + let public_key_file = self.public_key_file()?; + self.file_options + .save_to_file_confidential(key_name, &encoded_private_key)?; + write_to_file(&public_key_file, key_name, &encoded_public_key)?; + + let mut map = HashMap::new(); + map.insert("PrivateKey Path", self.file_options.output_file); + map.insert("PublicKey Path", public_key_file); + Ok(map) + } + + /// Saves a key to a file encoded in a string + pub fn save_bls_key( + self, + key: &bls12381::PrivateKey, + key_name: &'static str, + ) -> CliTypedResult> { + let encoded_private_key = self.encoding_options.encoding.encode_key(key_name, key)?; + let encoded_public_key = self + .encoding_options + .encoding + .encode_key(key_name, &key.public_key())?; + let encoded_proof_of_posession = self + .encoding_options + .encoding + .encode_key(key_name, &bls12381::ProofOfPossession::create(key))?; + + // Write private and public keys to files + let public_key_file = self.public_key_file()?; + let proof_of_possession_file = self.proof_of_possession_file()?; + self.file_options + .save_to_file_confidential(key_name, &encoded_private_key)?; + write_to_file(&public_key_file, key_name, &encoded_public_key)?; + write_to_file( + &proof_of_possession_file, + key_name, + &encoded_proof_of_posession, + )?; + + let mut map = HashMap::new(); + map.insert("PrivateKey Path", self.file_options.output_file); + map.insert("PublicKey Path", public_key_file); + map.insert("Proof of possession Path", proof_of_possession_file); + Ok(map) + } +} diff --git a/m1/movement/src/op/mod.rs b/m1/movement/src/op/mod.rs new file mode 100644 index 00000000..989a2bb8 --- /dev/null +++ b/m1/movement/src/op/mod.rs @@ -0,0 +1,4 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod key; diff --git a/m1/movement/src/stake/mod.rs b/m1/movement/src/stake/mod.rs new file mode 100644 index 00000000..45168a70 --- /dev/null +++ b/m1/movement/src/stake/mod.rs @@ -0,0 +1,668 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + common::{ + types::{ + CliCommand, CliError, CliResult, CliTypedResult, TransactionOptions, TransactionSummary, + }, + utils::prompt_yes_with_override, + }, + node::{get_stake_pools, StakePoolType}, +}; +use aptos_cached_packages::aptos_stdlib; +use aptos_types::{ + account_address::{ + create_vesting_contract_address, default_stake_pool_address, AccountAddress, + }, + vesting::VestingAdminStore, +}; +use async_trait::async_trait; +use clap::Parser; + +/// Tool for manipulating stake and stake pools +/// +#[derive(Parser)] +pub enum StakeTool { + AddStake(AddStake), + CreateStakingContract(CreateStakingContract), + DistributeVestedCoins(DistributeVestedCoins), + IncreaseLockup(IncreaseLockup), + InitializeStakeOwner(InitializeStakeOwner), + RequestCommission(RequestCommission), + SetDelegatedVoter(SetDelegatedVoter), + SetOperator(SetOperator), + UnlockStake(UnlockStake), + UnlockVestedCoins(UnlockVestedCoins), + WithdrawStake(WithdrawStake), +} + +impl StakeTool { + pub async fn execute(self) -> CliResult { + use StakeTool::*; + match self { + AddStake(tool) => tool.execute_serialized().await, + CreateStakingContract(tool) => tool.execute_serialized().await, + DistributeVestedCoins(tool) => tool.execute_serialized().await, + IncreaseLockup(tool) => tool.execute_serialized().await, + InitializeStakeOwner(tool) => tool.execute_serialized().await, + RequestCommission(tool) => tool.execute_serialized().await, + SetDelegatedVoter(tool) => tool.execute_serialized().await, + SetOperator(tool) => tool.execute_serialized().await, + UnlockStake(tool) => tool.execute_serialized().await, + UnlockVestedCoins(tool) => tool.execute_serialized().await, + WithdrawStake(tool) => tool.execute_serialized().await, + } + } +} + +/// Add APT to a stake pool +/// +/// This command allows stake pool owners to add APT to their stake. +#[derive(Parser)] +pub struct AddStake { + /// Amount of Octas (10^-8 APT) to add to stake + #[clap(long)] + pub amount: u64, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for AddStake { + fn command_name(&self) -> &'static str { + "AddStake" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let amount = self.amount; + let owner_address = self.txn_options.sender_address()?; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::stake_add_stake(amount)) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_add_stake( + stake_pool.operator_address, + amount, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + return Err(CliError::UnexpectedError( + "Adding stake is not supported for vesting contracts".into(), + )) + }, + } + } + Ok(transaction_summaries) + } +} + +/// Unlock staked APT in a stake pool +/// +/// APT coins can only be unlocked if they no longer have an applied lockup period +#[derive(Parser)] +pub struct UnlockStake { + /// Amount of Octas (10^-8 APT) to unlock + #[clap(long)] + pub amount: u64, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for UnlockStake { + fn command_name(&self) -> &'static str { + "UnlockStake" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let amount = self.amount; + let owner_address = self.txn_options.sender_address()?; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::stake_unlock(amount)) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_unlock_stake( + stake_pool.operator_address, + amount, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + return Err(CliError::UnexpectedError( + "Unlocking stake is not supported for vesting contracts".into(), + )) + }, + } + } + Ok(transaction_summaries) + } +} + +/// Withdraw unlocked staked APT from a stake pool +/// +/// This allows users to withdraw stake back into their CoinStore. +/// Before calling `WithdrawStake`, `UnlockStake` must be called first. +#[derive(Parser)] +pub struct WithdrawStake { + /// Amount of Octas (10^-8 APT) to withdraw. + /// This only applies to stake pools owned directly by the owner account, instead of via + /// a staking contract. In the latter case, when withdrawal is issued, all coins are distributed + #[clap(long)] + pub amount: u64, + + #[clap(flatten)] + pub(crate) node_op_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for WithdrawStake { + fn command_name(&self) -> &'static str { + "WithdrawStake" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .node_op_options + .rest_options + .client(&self.node_op_options.profile_options)?; + let amount = self.amount; + let owner_address = self.node_op_options.sender_address()?; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.node_op_options + .submit_transaction(aptos_stdlib::stake_withdraw(amount)) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.node_op_options + .submit_transaction(aptos_stdlib::staking_contract_distribute( + owner_address, + stake_pool.operator_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + return Err(CliError::UnexpectedError( + "Stake withdrawal from vesting contract should use distribute-vested-coins" + .into(), + )) + }, + } + } + Ok(transaction_summaries) + } +} + +/// Increase lockup of all staked APT in a stake pool +/// +/// Lockup may need to be increased in order to vote on a proposal. +#[derive(Parser)] +pub struct IncreaseLockup { + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for IncreaseLockup { + fn command_name(&self) -> &'static str { + "IncreaseLockup" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let owner_address = self.txn_options.sender_address()?; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::stake_increase_lockup()) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_reset_lockup( + stake_pool.operator_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::vesting_reset_lockup( + stake_pool.vesting_contract.unwrap(), + )) + .await + .map(|inner| inner.into())?, + ); + }, + } + } + Ok(transaction_summaries) + } +} + +/// Initialize a stake pool owner +/// +/// Initializing stake owner adds the capability to delegate the +/// stake pool to an operator, or delegate voting to a different account. +#[derive(Parser)] +pub struct InitializeStakeOwner { + /// Initial amount of Octas (10^-8 APT) to be staked + #[clap(long)] + pub initial_stake_amount: u64, + + /// Account Address of delegated operator + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub operator_address: Option, + + /// Account address of delegated voter + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub voter_address: Option, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for InitializeStakeOwner { + fn command_name(&self) -> &'static str { + "InitializeStakeOwner" + } + + async fn execute(mut self) -> CliTypedResult { + let owner_address = self.txn_options.sender_address()?; + self.txn_options + .submit_transaction(aptos_stdlib::stake_initialize_stake_owner( + self.initial_stake_amount, + self.operator_address.unwrap_or(owner_address), + self.voter_address.unwrap_or(owner_address), + )) + .await + .map(|inner| inner.into()) + } +} + +/// Delegate operator capability to another account +/// +/// This changes teh operator capability from its current operator to a different operator. +/// By default, the operator of a stake pool is the owner of the stake pool +#[derive(Parser)] +pub struct SetOperator { + /// Account Address of delegated operator + /// + /// If not specified, it will be the same as the owner + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub operator_address: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for SetOperator { + fn command_name(&self) -> &'static str { + "SetOperator" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let owner_address = self.txn_options.sender_address()?; + let new_operator_address = self.operator_address; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::stake_set_operator( + new_operator_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.txn_options + .submit_transaction( + aptos_stdlib::staking_contract_switch_operator_with_same_commission( + stake_pool.operator_address, + new_operator_address, + ), + ) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + transaction_summaries.push( + self.txn_options + .submit_transaction( + aptos_stdlib::vesting_update_operator_with_same_commission( + stake_pool.vesting_contract.unwrap(), + new_operator_address, + ), + ) + .await + .map(|inner| inner.into())?, + ); + }, + } + } + Ok(transaction_summaries) + } +} + +/// Delegate voting capability to another account +/// +/// Delegates voting capability from its current voter to a different voter. +/// By default, the voter of a stake pool is the owner of the stake pool +#[derive(Parser)] +pub struct SetDelegatedVoter { + /// Account Address of delegated voter + /// + /// If not specified, it will be the same as the owner + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub voter_address: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand> for SetDelegatedVoter { + fn command_name(&self) -> &'static str { + "SetDelegatedVoter" + } + + async fn execute(mut self) -> CliTypedResult> { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + let owner_address = self.txn_options.sender_address()?; + let new_voter_address = self.voter_address; + let mut transaction_summaries: Vec = vec![]; + + let stake_pool_results = get_stake_pools(&client, owner_address).await?; + for stake_pool in stake_pool_results { + match stake_pool.pool_type { + StakePoolType::Direct => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::stake_set_delegated_voter( + new_voter_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::StakingContract => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_update_voter( + stake_pool.operator_address, + new_voter_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + StakePoolType::Vesting => { + transaction_summaries.push( + self.txn_options + .submit_transaction(aptos_stdlib::vesting_update_voter( + stake_pool.vesting_contract.unwrap(), + new_voter_address, + )) + .await + .map(|inner| inner.into())?, + ); + }, + } + } + Ok(transaction_summaries) + } +} + +/// Create a staking contract stake pool +/// +/// +#[derive(Parser)] +pub struct CreateStakingContract { + /// Account Address of operator + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub operator: AccountAddress, + + /// Account Address of delegated voter + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub voter: AccountAddress, + + /// Amount to create the staking contract with + #[clap(long)] + pub amount: u64, + + /// Percentage of accumulated rewards to pay the operator as commission + #[clap(long)] + pub commission_percentage: u64, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for CreateStakingContract { + fn command_name(&self) -> &'static str { + "CreateStakingContract" + } + + async fn execute(mut self) -> CliTypedResult { + let pool_address = default_stake_pool_address( + self.txn_options.profile_options.account_address()?, + self.operator, + ); + prompt_yes_with_override( + &format!( + "Creating a new staking contract with pool address 0x{}. Confirm?", + pool_address + ), + self.txn_options.prompt_options, + )?; + + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_create_staking_contract( + self.operator, + self.voter, + self.amount, + self.commission_percentage, + vec![], + )) + .await + .map(|inner| inner.into()) + } +} + +/// Distribute fully unlocked coins from vesting +/// +/// Distribute fully unlocked coins (rewards and/or vested coins) from the vesting contract +/// to shareholders. +#[derive(Parser)] +pub struct DistributeVestedCoins { + /// Address of the vesting contract's admin. + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub admin_address: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for DistributeVestedCoins { + fn command_name(&self) -> &'static str { + "DistributeVestedCoins" + } + + async fn execute(mut self) -> CliTypedResult { + let vesting_contract_address = create_vesting_contract_address(self.admin_address, 0, &[]); + self.txn_options + .submit_transaction(aptos_stdlib::vesting_distribute(vesting_contract_address)) + .await + .map(|inner| inner.into()) + } +} + +/// Unlock vested coins +/// +/// Unlock vested coins according to the vesting contract's schedule. +/// This also unlocks any accumulated staking rewards and pays commission to the operator of the +/// vesting contract's stake pool first. +/// +/// The unlocked vested tokens and staking rewards are still subject to the staking lockup and +/// cannot be withdrawn until after the lockup expires. +#[derive(Parser)] +pub struct UnlockVestedCoins { + /// Address of the vesting contract's admin. + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub admin_address: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for UnlockVestedCoins { + fn command_name(&self) -> &'static str { + "UnlockVestedCoins" + } + + async fn execute(mut self) -> CliTypedResult { + let vesting_contract_address = create_vesting_contract_address(self.admin_address, 0, &[]); + self.txn_options + .submit_transaction(aptos_stdlib::vesting_vest(vesting_contract_address)) + .await + .map(|inner| inner.into()) + } +} + +/// Request commission from running a stake pool +/// +/// Allows operators or owners to request commission from running a stake pool (only if there's a +/// staking contract set up with the staker). The commission will be withdrawable at the end of the +/// stake pool's current lockup period. +#[derive(Parser)] +pub struct RequestCommission { + /// Address of the owner of the stake pool + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub owner_address: AccountAddress, + + /// Address of the operator of the stake pool + #[clap(long, parse(try_from_str=crate::common::types::load_account_arg))] + pub operator_address: AccountAddress, + + #[clap(flatten)] + pub(crate) txn_options: TransactionOptions, +} + +#[async_trait] +impl CliCommand for RequestCommission { + fn command_name(&self) -> &'static str { + "RequestCommission" + } + + async fn execute(mut self) -> CliTypedResult { + let client = self + .txn_options + .rest_options + .client(&self.txn_options.profile_options)?; + + // If this is a vesting stake pool, retrieve the associated vesting contract + let vesting_admin_store = client + .get_account_resource_bcs::( + self.owner_address, + "0x1::vesting::AdminStore", + ) + .await; + + // Note: this only works if the vesting contract has exactly one staking contract + // associated + let staker_address = if let Ok(vesting_admin_store) = vesting_admin_store { + vesting_admin_store.into_inner().vesting_contracts[0] + } else { + self.owner_address + }; + self.txn_options + .submit_transaction(aptos_stdlib::staking_contract_request_commission( + staker_address, + self.operator_address, + )) + .await + .map(|inner| inner.into()) + } +} diff --git a/m1/movement/src/test/mod.rs b/m1/movement/src/test/mod.rs new file mode 100644 index 00000000..f80891b1 --- /dev/null +++ b/m1/movement/src/test/mod.rs @@ -0,0 +1,1221 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + account::{ + create::{CreateAccount, DEFAULT_FUNDED_COINS}, + fund::FundWithFaucet, + key_rotation::{LookupAddress, RotateKey, RotateSummary}, + list::{ListAccount, ListQuery}, + transfer::{TransferCoins, TransferSummary}, + }, + common::{ + init::{InitTool, Network}, + types::{ + account_address_from_public_key, AccountAddressWrapper, ArgWithTypeVec, CliError, + CliTypedResult, EncodingOptions, EntryFunctionArguments, FaucetOptions, GasOptions, + KeyType, MoveManifestAccountWrapper, MovePackageDir, OptionalPoolAddressArgs, + PoolAddressArgs, PrivateKeyInputOptions, PromptOptions, PublicKeyInputOptions, + RestOptions, RngArgs, SaveFile, TransactionOptions, TransactionSummary, + }, + utils::write_to_file, + }, + governance::{ + CompileScriptFunction, ProposalSubmissionSummary, SubmitProposal, SubmitVote, + VerifyProposal, VerifyProposalResponse, + }, + move_tool::{ + ArgWithType, CompilePackage, DownloadPackage, FrameworkPackageArgs, IncludedArtifacts, + IncludedArtifactsArgs, InitPackage, MemberId, PublishPackage, RunFunction, RunScript, + TestPackage, + }, + node::{ + AnalyzeMode, AnalyzeValidatorPerformance, GetStakePool, InitializeValidator, + JoinValidatorSet, LeaveValidatorSet, OperatorArgs, OperatorConfigFileArgs, + ShowValidatorConfig, ShowValidatorSet, ShowValidatorStake, StakePoolResult, + UpdateConsensusKey, UpdateValidatorNetworkAddresses, ValidatorConfig, + ValidatorConsensusKeyArgs, ValidatorNetworkAddressesArgs, + }, + op::key::{ExtractPeer, GenerateKey, NetworkKeyInputOptions, SaveKey}, + stake::{ + AddStake, IncreaseLockup, InitializeStakeOwner, SetDelegatedVoter, SetOperator, + UnlockStake, WithdrawStake, + }, + CliCommand, +}; +use aptos_config::config::Peer; +use aptos_crypto::{ + bls12381, + ed25519::{Ed25519PrivateKey, Ed25519PublicKey}, + x25519, PrivateKey, +}; +use aptos_genesis::config::HostAndPort; +use aptos_keygen::KeyGen; +use aptos_logger::warn; +use aptos_rest_client::{ + aptos_api_types::{MoveStructTag, MoveType}, + Transaction, +}; +use aptos_sdk::move_types::{account_address::AccountAddress, language_storage::ModuleId}; +use aptos_temppath::TempPath; +use aptos_types::on_chain_config::ValidatorSet; +use move_core_types::ident_str; +use reqwest::Url; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::{ + collections::{BTreeMap, HashMap}, + mem, + path::PathBuf, + str::FromStr, + time::Duration, +}; +use tempfile::TempDir; +use thiserror::__private::PathAsDisplay; +#[cfg(feature = "cli-framework-test-move")] +use thiserror::__private::PathAsDisplay; +use tokio::time::{sleep, Instant}; + +#[cfg(test)] +mod tests; + +pub const INVALID_ACCOUNT: &str = "0xDEADBEEFCAFEBABE"; + +pub const FIRST_MOVE_FILE: &str = " +module NamedAddress0::store { + use std::string; + use aptos_framework::coin::{Self}; + + struct CoolCoin has key {} + + public entry fun init( + account: &signer, + decimals: u64, + monitor_supply: bool + ) { + let (_, _) = coin::initialize(account, string::utf8(b\"CoolCoin\"), string::utf8(b\"COOL\"), decimals, monitor_supply); + coin::register(account); + } +}"; + +/// A framework for testing the CLI +pub struct CliTestFramework { + account_addresses: Vec, + account_keys: Vec, + endpoint: Url, + faucet_endpoint: Url, + move_dir: Option, +} + +impl CliTestFramework { + pub fn local_new(num_accounts: usize) -> CliTestFramework { + let dummy_url = Url::parse("http://localhost").unwrap(); + let mut framework = CliTestFramework { + account_addresses: Vec::new(), + account_keys: Vec::new(), + endpoint: dummy_url.clone(), + faucet_endpoint: dummy_url, + move_dir: None, + }; + let mut keygen = KeyGen::from_seed([0; 32]); + for _ in 0..num_accounts { + let key = keygen.generate_ed25519_private_key(); + framework.add_account_to_cli(key); + } + framework + } + + pub async fn new(endpoint: Url, faucet_endpoint: Url, num_accounts: usize) -> CliTestFramework { + let mut framework = CliTestFramework { + account_addresses: Vec::new(), + account_keys: Vec::new(), + endpoint, + faucet_endpoint, + move_dir: None, + }; + let mut keygen = KeyGen::from_seed([0; 32]); + + for _ in 0..num_accounts { + framework + .create_cli_account_from_faucet(keygen.generate_ed25519_private_key(), None) + .await + .unwrap(); + } + + framework + } + + pub fn addresses(&self) -> Vec { + self.account_addresses.clone() + } + + async fn check_account_exists(&self, index: usize) -> bool { + // Create account if it doesn't exist (and there's a faucet) + let client = aptos_rest_client::Client::new(self.endpoint.clone()); + let address = self.account_id(index); + client.get_account(address).await.is_ok() + } + + pub fn add_account_to_cli(&mut self, private_key: Ed25519PrivateKey) -> usize { + let address = account_address_from_public_key(&private_key.public_key()); + self.account_addresses.push(address); + self.account_keys.push(private_key); + println!( + "Account: {} (index: {})", + address.to_hex_literal(), + self.account_keys.len() - 1 + ); + self.account_keys.len() - 1 + } + + pub fn add_account_with_address_to_cli( + &mut self, + private_key: Ed25519PrivateKey, + address: AccountAddress, + ) -> usize { + self.account_addresses.push(address); + self.account_keys.push(private_key); + self.account_keys.len() - 1 + } + + pub async fn create_cli_account( + &mut self, + private_key: Ed25519PrivateKey, + sender_index: usize, + ) -> CliTypedResult { + let index = self.add_account_to_cli(private_key); + if self.check_account_exists(index).await { + return Err(CliError::UnexpectedError( + "Account already exists".to_string(), + )); + } + CreateAccount { + txn_options: self.transaction_options(sender_index, None), + account: self.account_id(index), + } + .execute() + .await?; + + Ok(index) + } + + pub async fn create_cli_account_from_faucet( + &mut self, + private_key: Ed25519PrivateKey, + amount: Option, + ) -> CliTypedResult { + let index = self.add_account_to_cli(private_key); + if self.check_account_exists(index).await { + return Err(CliError::UnexpectedError( + "Account already exists".to_string(), + )); + } + + self.fund_account(index, amount).await?; + warn!( + "Funded account {:?} with {:?} OCTA", + self.account_id(index), + amount.unwrap_or(DEFAULT_FUNDED_COINS) + ); + Ok(index) + } + + pub async fn fund_account(&self, index: usize, amount: Option) -> CliTypedResult { + FundWithFaucet { + profile_options: Default::default(), + account: self.account_id(index), + faucet_options: self.faucet_options(), + amount: amount.unwrap_or(DEFAULT_FUNDED_COINS), + rest_options: self.rest_options(), + } + .execute() + .await + } + + pub async fn lookup_address( + &self, + public_key: &Ed25519PublicKey, + ) -> CliTypedResult { + LookupAddress { + public_key_options: PublicKeyInputOptions::from_key(public_key), + rest_options: self.rest_options(), + encoding_options: Default::default(), + profile_options: Default::default(), + } + .execute() + .await + } + + pub async fn rotate_key( + &mut self, + index: usize, + new_private_key: String, + gas_options: Option, + ) -> CliTypedResult { + let response = RotateKey { + txn_options: TransactionOptions { + private_key_options: PrivateKeyInputOptions::from_private_key( + self.private_key(index), + ) + .unwrap(), + sender_account: Some(self.account_id(index)), + rest_options: self.rest_options(), + gas_options: gas_options.unwrap_or_default(), + prompt_options: PromptOptions::yes(), + ..Default::default() + }, + new_private_key: Some(new_private_key), + save_to_profile: None, + new_private_key_file: None, + skip_saving_profile: true, + } + .execute() + .await?; + + Ok(response) + } + + pub async fn list_account(&self, index: usize, query: ListQuery) -> CliTypedResult> { + ListAccount { + rest_options: self.rest_options(), + profile_options: Default::default(), + account: Some(self.account_id(index)), + query, + } + .execute() + .await + } + + pub async fn transfer_coins( + &self, + sender_index: usize, + receiver_index: usize, + amount: u64, + gas_options: Option, + ) -> CliTypedResult { + TransferCoins { + txn_options: self.transaction_options(sender_index, gas_options), + account: self.account_id(receiver_index), + amount, + } + .execute() + .await + } + + pub async fn transfer_invalid_addr( + &self, + sender_index: usize, + amount: u64, + gas_options: Option, + ) -> CliTypedResult { + RunFunction { + entry_function_args: EntryFunctionArguments { + function_id: MemberId { + module_id: ModuleId::new(AccountAddress::ONE, ident_str!("coin").into()), + member_id: ident_str!("transfer").into(), + }, + arg_vec: ArgWithTypeVec { + args: vec![ + ArgWithType::from_str("address:0xdeadbeefcafebabe").unwrap(), + ArgWithType::from_str(&format!("u64:{}", amount)).unwrap(), + ], + }, + type_args: vec![MoveType::Struct(MoveStructTag::new( + AccountAddress::ONE.into(), + ident_str!("aptos_coin").into(), + ident_str!("AptosCoin").into(), + vec![], + ))], + }, + txn_options: self.transaction_options(sender_index, gas_options), + } + .execute() + .await + } + + pub async fn show_validator_config( + &self, + pool_index: usize, + ) -> CliTypedResult { + ShowValidatorConfig { + rest_options: self.rest_options(), + profile_options: Default::default(), + operator_args: self.operator_args(Some(pool_index)), + } + .execute() + .await + .map(|v| (&v).into()) + } + + pub async fn show_validator_set(&self) -> CliTypedResult { + ShowValidatorSet { + rest_options: self.rest_options(), + profile_options: Default::default(), + } + .execute() + .await + .map(|v| (&v).into()) + } + + pub async fn show_validator_stake(&self, pool_index: usize) -> CliTypedResult { + ShowValidatorStake { + rest_options: self.rest_options(), + profile_options: Default::default(), + operator_args: self.operator_args(Some(pool_index)), + } + .execute() + .await + } + + pub async fn initialize_validator( + &self, + index: usize, + consensus_public_key: bls12381::PublicKey, + proof_of_possession: bls12381::ProofOfPossession, + validator_host: HostAndPort, + validator_network_public_key: x25519::PublicKey, + ) -> CliTypedResult { + InitializeValidator { + txn_options: self.transaction_options(index, None), + operator_config_file_args: OperatorConfigFileArgs { + operator_config_file: None, + }, + validator_consensus_key_args: ValidatorConsensusKeyArgs { + consensus_public_key: Some(consensus_public_key), + proof_of_possession: Some(proof_of_possession), + }, + validator_network_addresses_args: ValidatorNetworkAddressesArgs { + validator_host: Some(validator_host), + validator_network_public_key: Some(validator_network_public_key), + full_node_host: None, + full_node_network_public_key: None, + }, + } + .execute() + .await + } + + pub async fn add_stake( + &self, + index: usize, + amount: u64, + ) -> CliTypedResult> { + AddStake { + txn_options: self.transaction_options(index, None), + amount, + } + .execute() + .await + } + + pub async fn unlock_stake( + &self, + index: usize, + amount: u64, + ) -> CliTypedResult> { + UnlockStake { + txn_options: self.transaction_options(index, None), + amount, + } + .execute() + .await + } + + pub async fn withdraw_stake( + &self, + index: usize, + amount: u64, + ) -> CliTypedResult> { + WithdrawStake { + node_op_options: self.transaction_options(index, None), + amount, + } + .execute() + .await + } + + pub async fn increase_lockup(&self, index: usize) -> CliTypedResult> { + IncreaseLockup { + txn_options: self.transaction_options(index, None), + } + .execute() + .await + } + + pub async fn join_validator_set( + &self, + operator_index: usize, + pool_index: Option, + ) -> CliTypedResult { + JoinValidatorSet { + txn_options: self.transaction_options(operator_index, None), + operator_args: self.operator_args(pool_index), + } + .execute() + .await + } + + pub async fn leave_validator_set( + &self, + operator_index: usize, + pool_index: Option, + ) -> CliTypedResult { + LeaveValidatorSet { + txn_options: self.transaction_options(operator_index, None), + operator_args: self.operator_args(pool_index), + } + .execute() + .await + } + + pub async fn update_validator_network_addresses( + &self, + operator_index: usize, + pool_index: Option, + validator_host: HostAndPort, + validator_network_public_key: x25519::PublicKey, + ) -> CliTypedResult { + UpdateValidatorNetworkAddresses { + txn_options: self.transaction_options(operator_index, None), + operator_args: self.operator_args(pool_index), + operator_config_file_args: OperatorConfigFileArgs { + operator_config_file: None, + }, + validator_network_addresses_args: ValidatorNetworkAddressesArgs { + validator_host: Some(validator_host), + validator_network_public_key: Some(validator_network_public_key), + full_node_host: None, + full_node_network_public_key: None, + }, + } + .execute() + .await + } + + pub async fn analyze_validator_performance( + &self, + start_epoch: Option, + end_epoch: Option, + ) -> CliTypedResult<()> { + AnalyzeValidatorPerformance { + start_epoch: start_epoch.unwrap_or(-2), + end_epoch, + rest_options: self.rest_options(), + profile_options: Default::default(), + analyze_mode: AnalyzeMode::All, + pool_addresses: vec![], + } + .execute() + .await + } + + pub async fn update_consensus_key( + &self, + operator_index: usize, + pool_index: Option, + consensus_public_key: bls12381::PublicKey, + proof_of_possession: bls12381::ProofOfPossession, + ) -> CliTypedResult { + UpdateConsensusKey { + txn_options: self.transaction_options(operator_index, None), + operator_args: self.operator_args(pool_index), + operator_config_file_args: OperatorConfigFileArgs { + operator_config_file: None, + }, + validator_consensus_key_args: ValidatorConsensusKeyArgs { + consensus_public_key: Some(consensus_public_key), + proof_of_possession: Some(proof_of_possession), + }, + } + .execute() + .await + } + + pub async fn init(&self, private_key: &Ed25519PrivateKey) -> CliTypedResult<()> { + InitTool { + network: Some(Network::Custom), + rest_url: Some(self.endpoint.clone()), + faucet_url: Some(self.faucet_endpoint.clone()), + rng_args: RngArgs::from_seed([0; 32]), + private_key_options: PrivateKeyInputOptions::from_private_key(private_key)?, + profile_options: Default::default(), + prompt_options: PromptOptions::yes(), + encoding_options: EncodingOptions::default(), + skip_faucet: false, + } + .execute() + .await + } + + pub async fn get_pool_address( + &self, + owner_index: usize, + ) -> CliTypedResult> { + GetStakePool { + owner_address: self.account_id(owner_index), + rest_options: self.rest_options(), + profile_options: Default::default(), + } + .execute() + .await + } + + pub async fn initialize_stake_owner( + &self, + owner_index: usize, + initial_stake_amount: u64, + voter_index: Option, + operator_index: Option, + ) -> CliTypedResult { + InitializeStakeOwner { + txn_options: self.transaction_options(owner_index, None), + initial_stake_amount, + operator_address: operator_index.map(|idx| self.account_id(idx)), + voter_address: voter_index.map(|idx| self.account_id(idx)), + } + .execute() + .await + } + + pub async fn create_stake_pool( + &self, + owner_index: usize, + operator_index: usize, + voter_index: usize, + amount: u64, + commission_percentage: u64, + ) -> CliTypedResult { + RunFunction { + entry_function_args: EntryFunctionArguments { + function_id: MemberId::from_str("0x1::staking_contract::create_staking_contract") + .unwrap(), + arg_vec: ArgWithTypeVec { + args: vec![ + ArgWithType::address(self.account_id(operator_index)), + ArgWithType::address(self.account_id(voter_index)), + ArgWithType::u64(amount), + ArgWithType::u64(commission_percentage), + ArgWithType::bytes(vec![]), + ], + }, + type_args: vec![], + }, + txn_options: self.transaction_options(owner_index, None), + } + .execute() + .await + } + + pub async fn set_operator( + &self, + owner_index: usize, + operator_index: usize, + ) -> CliTypedResult> { + SetOperator { + txn_options: self.transaction_options(owner_index, None), + operator_address: self.account_id(operator_index), + } + .execute() + .await + } + + pub async fn set_delegated_voter( + &self, + owner_index: usize, + voter_index: usize, + ) -> CliTypedResult> { + SetDelegatedVoter { + txn_options: self.transaction_options(owner_index, None), + voter_address: self.account_id(voter_index), + } + .execute() + .await + } + + /// Wait for an account to exist + pub async fn wait_for_account(&self, index: usize) -> CliTypedResult> { + let mut result = self.list_account(index, ListQuery::Balance).await; + let start = Instant::now(); + while start.elapsed() < Duration::from_secs(10) { + match result { + Ok(_) => return result, + _ => { + sleep(Duration::from_millis(500)).await; + result = self.list_account(index, ListQuery::Balance).await; + }, + }; + } + + result + } + + pub async fn account_balance_now(&self, index: usize) -> CliTypedResult { + let result = self.list_account(index, ListQuery::Balance).await?; + Ok(json_account_to_balance(result.first().unwrap())) + } + + pub async fn assert_account_balance_now(&self, index: usize, expected: u64) { + let result = self.list_account(index, ListQuery::Balance).await; + assert!( + result.is_ok(), + "Account {} not yet created, {}, last 10 transactions: {}", + self.account_id(index), + result.unwrap_err(), + self.last_n_transactions_details(10).await + ); + let accounts = result.unwrap(); + let account = accounts.first().unwrap(); + let coin = json_account_to_balance(account); + assert_eq!( + coin, + expected, + "Account {} with state: {:?}, last 10 transactions: {}", + self.account_id(index), + account, + self.last_n_transactions_details(10).await + ); + } + + async fn last_n_transactions_details(&self, count: u16) -> String { + let result = aptos_rest_client::Client::new(self.endpoint.clone()) + .get_transactions(None, Some(count)) + .await; + if let Err(e) = result { + return format!("Err({:?})", e); + } + let lines = result + .unwrap() + .inner() + .iter() + .map(|t| { + if let Transaction::UserTransaction(u) = t { + format!( + " * [{}] {}: sender={}, payload={:?}", + t.version().unwrap_or(0), + t.vm_status(), + u.request.sender, + u.request.payload + ) + } else { + format!( + " * [{}] {}: {}", + t.version().unwrap_or(0), + t.vm_status(), + t.type_str() + ) + } + }) + .collect::>(); + format!("\n{}\n", lines.join("\n")) + } + + pub async fn generate_x25519_key( + &self, + output_file: PathBuf, + seed: [u8; 32], + ) -> CliTypedResult> { + GenerateKey { + key_type: KeyType::X25519, + rng_args: RngArgs::from_seed(seed), + save_params: SaveKey { + file_options: SaveFile { + output_file, + prompt_options: PromptOptions::yes(), + }, + encoding_options: Default::default(), + }, + vanity_prefix: None, + vanity_multisig: false, + } + .execute() + .await + } + + pub async fn extract_peer( + &self, + host: HostAndPort, + private_key_file: PathBuf, + output_file: PathBuf, + ) -> CliTypedResult> { + ExtractPeer { + host, + network_key_input_options: NetworkKeyInputOptions::from_private_key_file( + private_key_file, + ), + output_file_options: SaveFile { + output_file, + prompt_options: PromptOptions::yes(), + }, + encoding_options: Default::default(), + } + .execute() + .await + } + + pub fn init_move_dir(&mut self) { + let move_dir = TempPath::new(); + move_dir + .create_as_dir() + .expect("Expected to be able to create move temp dir"); + self.move_dir = Some(move_dir.path().to_path_buf()); + } + + #[cfg(feature = "cli-framework-test-move")] + pub fn add_move_files(&self) { + let move_dir = self.move_dir(); + let sources_dir = move_dir.join("sources"); + + let hello_blockchain_contents = include_str!( + "../../../../aptos-move/move-examples/hello_blockchain/sources/hello_blockchain.move" + ); + let source_path = sources_dir.join("hello_blockchain.move"); + write_to_file( + source_path.as_path(), + &source_path.as_display().to_string(), + hello_blockchain_contents.as_bytes(), + ) + .unwrap(); + + let hello_blockchain_test_contents = include_str!("../../../../aptos-move/move-examples/hello_blockchain/sources/hello_blockchain_test.move"); + let test_path = sources_dir.join("hello_blockchain_test.move"); + write_to_file( + test_path.as_path(), + &test_path.as_display().to_string(), + hello_blockchain_test_contents.as_bytes(), + ) + .unwrap(); + } + + pub fn move_dir(&self) -> PathBuf { + assert!(self.move_dir.is_some(), "Must have initialized the temp move directory with `CliTestFramework::init_move_dir()` first"); + self.move_dir.as_ref().cloned().unwrap() + } + + pub async fn init_package( + &self, + name: String, + account_strs: BTreeMap<&str, &str>, + framework_dir: Option, + ) -> CliTypedResult<()> { + InitPackage { + name, + package_dir: Some(self.move_dir()), + named_addresses: Self::move_manifest_named_addresses(account_strs), + prompt_options: PromptOptions { + assume_yes: false, + assume_no: true, + }, + framework_package_args: FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: framework_dir, + skip_fetch_latest_git_deps: false, + }, + } + .execute() + .await + } + + pub async fn compile_package( + &self, + account_strs: BTreeMap<&str, &str>, + included_artifacts: Option, + ) -> CliTypedResult> { + CompilePackage { + move_options: self.move_options(account_strs), + save_metadata: false, + included_artifacts_args: IncludedArtifactsArgs { + included_artifacts: included_artifacts.unwrap_or(IncludedArtifacts::Sparse), + }, + } + .execute() + .await + } + + pub async fn test_package( + &self, + account_strs: BTreeMap<&str, &str>, + filter: Option<&str>, + ) -> CliTypedResult<&'static str> { + TestPackage { + instruction_execution_bound: 100_000, + move_options: self.move_options(account_strs), + filter: filter.map(|str| str.to_string()), + ignore_compile_warnings: false, + compute_coverage: false, + dump_state: false, + } + .execute() + .await + } + + pub async fn publish_package( + &self, + index: usize, + gas_options: Option, + account_strs: BTreeMap<&str, &str>, + included_artifacts: Option, + ) -> CliTypedResult { + PublishPackage { + move_options: self.move_options(account_strs), + txn_options: self.transaction_options(index, gas_options), + override_size_check: false, + included_artifacts_args: IncludedArtifactsArgs { + included_artifacts: included_artifacts.unwrap_or(IncludedArtifacts::Sparse), + }, + } + .execute() + .await + } + + pub async fn download_package( + &self, + index: usize, + package: String, + output_dir: PathBuf, + ) -> CliTypedResult<&'static str> { + DownloadPackage { + rest_options: self.rest_options(), + profile_options: Default::default(), + account: self.account_id(index), + package, + output_dir: Some(output_dir), + } + .execute() + .await + } + + pub async fn run_function( + &self, + index: usize, + gas_options: Option, + function_id: MemberId, + args: Vec<&str>, + type_args: Vec<&str>, + ) -> CliTypedResult { + let mut parsed_args = Vec::new(); + for arg in args { + parsed_args.push( + ArgWithType::from_str(arg) + .map_err(|err| CliError::UnexpectedError(err.to_string()))?, + ) + } + + let mut parsed_type_args = Vec::new(); + for arg in type_args { + parsed_type_args.push( + MoveType::from_str(arg) + .map_err(|err| CliError::UnexpectedError(err.to_string()))?, + ) + } + + RunFunction { + txn_options: self.transaction_options(index, gas_options), + entry_function_args: EntryFunctionArguments { + function_id, + arg_vec: ArgWithTypeVec { args: parsed_args }, + type_args: parsed_type_args, + }, + } + .execute() + .await + } + + /// Runs the given script contents using the local aptos_framework directory. + pub async fn run_script( + &self, + index: usize, + script_contents: &str, + ) -> CliTypedResult { + self.run_script_with_framework_package(index, script_contents, FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: Some(Self::aptos_framework_dir()), + skip_fetch_latest_git_deps: false, + }) + .await + } + + /// Runs the given script contents using the aptos_framework from aptos-core git repository. + pub async fn run_script_with_default_framework( + &self, + index: usize, + script_contents: &str, + ) -> CliTypedResult { + self.run_script_with_framework_package(index, script_contents, FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: None, + skip_fetch_latest_git_deps: false, + }) + .await + } + + /// Runs the given script with the provided framework package arguments + pub async fn run_script_with_framework_package( + &self, + index: usize, + script_contents: &str, + framework_package_args: FrameworkPackageArgs, + ) -> CliTypedResult { + // Make a temporary directory for compilation + let temp_dir = TempDir::new().map_err(|err| { + CliError::UnexpectedError(format!("Failed to create temporary directory {}", err)) + })?; + + let source_path = temp_dir.path().join("script.move"); + write_to_file( + source_path.as_path(), + &source_path.as_display().to_string(), + script_contents.as_bytes(), + ) + .unwrap(); + + RunScript { + txn_options: self.transaction_options(index, None), + compile_proposal_args: CompileScriptFunction { + script_path: Some(source_path), + compiled_script_path: None, + framework_package_args, + bytecode_version: None, + }, + arg_vec: ArgWithTypeVec { args: Vec::new() }, + type_args: Vec::new(), + } + .execute() + .await + } + + pub async fn run_script_with_script_path( + &self, + index: usize, + script_path: &str, + args: Vec, + type_args: Vec, + ) -> CliTypedResult { + RunScript { + txn_options: self.transaction_options(index, None), + compile_proposal_args: CompileScriptFunction { + script_path: Some(script_path.parse().unwrap()), + compiled_script_path: None, + framework_package_args: FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: Some(Self::aptos_framework_dir()), + skip_fetch_latest_git_deps: false, + }, + bytecode_version: None, + }, + arg_vec: ArgWithTypeVec { args }, + type_args, + } + .execute() + .await + } + + fn aptos_framework_dir() -> PathBuf { + PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("..") + .join("..") + .join("aptos-move") + .join("framework") + .join("aptos-framework") + } + + pub fn move_options(&self, account_strs: BTreeMap<&str, &str>) -> MovePackageDir { + MovePackageDir { + package_dir: Some(self.move_dir()), + output_dir: None, + named_addresses: Self::named_addresses(account_strs), + skip_fetch_latest_git_deps: true, + bytecode_version: None, + } + } + + pub fn move_manifest_named_addresses( + account_strs: BTreeMap<&str, &str>, + ) -> BTreeMap { + account_strs + .iter() + .map(|(key, value)| { + ( + key.to_string(), + MoveManifestAccountWrapper::from_str(value).unwrap(), + ) + }) + .collect() + } + + pub fn named_addresses( + account_strs: BTreeMap<&str, &str>, + ) -> BTreeMap { + account_strs + .iter() + .map(|(key, value)| { + ( + key.to_string(), + AccountAddressWrapper::from_str(value).unwrap(), + ) + }) + .collect() + } + + pub fn rest_options(&self) -> RestOptions { + RestOptions::new(Some(self.endpoint.clone()), None) + } + + pub fn faucet_options(&self) -> FaucetOptions { + FaucetOptions::new(Some(self.faucet_endpoint.clone())) + } + + fn transaction_options( + &self, + index: usize, + gas_options: Option, + ) -> TransactionOptions { + TransactionOptions { + private_key_options: PrivateKeyInputOptions::from_private_key(self.private_key(index)) + .unwrap(), + sender_account: Some(self.account_id(index)), + rest_options: self.rest_options(), + gas_options: gas_options.unwrap_or_default(), + prompt_options: PromptOptions::yes(), + ..Default::default() + } + } + + fn operator_args(&self, pool_index: Option) -> OperatorArgs { + OperatorArgs { + pool_address_args: OptionalPoolAddressArgs { + pool_address: pool_index.map(|idx| self.account_id(idx)), + }, + } + } + + pub fn private_key(&self, index: usize) -> &Ed25519PrivateKey { + self.account_keys.get(index).unwrap() + } + + pub fn set_private_key( + &mut self, + index: usize, + new_key: Ed25519PrivateKey, + ) -> Ed25519PrivateKey { + // Insert the new private key into the test framework, returning the old one + mem::replace(&mut self.account_keys[index], new_key) + } + + pub fn account_id(&self, index: usize) -> AccountAddress { + *self.account_addresses.get(index).unwrap() + } + + pub async fn create_proposal( + &mut self, + index: usize, + metadata_url: &str, + script_path: PathBuf, + pool_address: AccountAddress, + is_multi_step: bool, + ) -> CliTypedResult { + SubmitProposal { + metadata_url: Url::parse(metadata_url).unwrap(), + pool_address_args: PoolAddressArgs { pool_address }, + txn_options: self.transaction_options(index, None), + is_multi_step, + compile_proposal_args: CompileScriptFunction { + script_path: Some(script_path), + compiled_script_path: None, + framework_package_args: FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: Some(Self::aptos_framework_dir()), + skip_fetch_latest_git_deps: false, + }, + bytecode_version: None, + }, + } + .execute() + .await + } + + pub async fn vote( + &self, + index: usize, + proposal_id: u64, + yes: bool, + no: bool, + pool_addresses: Vec, + ) { + SubmitVote { + proposal_id, + yes, + no, + pool_addresses, + txn_options: self.transaction_options(index, None), + } + .execute() + .await + .expect("Successfully voted."); + } + + pub async fn verify_proposal( + &self, + proposal_id: u64, + script_path: &str, + ) -> CliTypedResult { + VerifyProposal { + proposal_id, + compile_proposal_args: CompileScriptFunction { + script_path: Some(script_path.parse().unwrap()), + compiled_script_path: None, + framework_package_args: FrameworkPackageArgs { + framework_git_rev: None, + framework_local_dir: Some(Self::aptos_framework_dir()), + skip_fetch_latest_git_deps: false, + }, + bytecode_version: None, + }, + rest_options: self.rest_options(), + profile: Default::default(), + prompt_options: PromptOptions::yes(), + } + .execute() + .await + } +} + +// ValidatorConfig/ValidatorSet doesn't match Move ValidatorSet struct, +// and json is serialized with different types from both, so hardcoding deserialization. + +fn json_account_to_balance(value: &Value) -> u64 { + u64::from_str( + value + .as_object() + .unwrap() + .get("coin") + .unwrap() + .as_object() + .unwrap() + .get("value") + .unwrap() + .as_str() + .unwrap(), + ) + .unwrap() +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct IndividualValidatorPerformance { + successful_proposals: String, + failed_proposals: String, +} + +impl IndividualValidatorPerformance { + pub fn successful_proposals(&self) -> u32 { + self.successful_proposals.parse().unwrap() + } + + pub fn failed_proposals(&self) -> u32 { + self.failed_proposals.parse().unwrap() + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ValidatorPerformance { + pub validators: Vec, +} diff --git a/m1/movement/src/test/tests.rs b/m1/movement/src/test/tests.rs new file mode 100644 index 00000000..62a8d997 --- /dev/null +++ b/m1/movement/src/test/tests.rs @@ -0,0 +1,136 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + move_tool::{ArgWithType, FunctionArgType}, + CliResult, Tool, +}; +use clap::Parser; +use std::str::FromStr; + +/// In order to ensure that there aren't duplicate input arguments for untested CLI commands, +/// we call help on every command to ensure it at least runs +#[tokio::test] +async fn ensure_every_command_args_work() { + assert_cmd_not_panic(&["movement"]).await; + + assert_cmd_not_panic(&["movement", "account"]).await; + assert_cmd_not_panic(&["movement", "account", "create", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "create-resource-account", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "fund-with-faucet", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "list", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "lookup-address", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "rotate-key", "--help"]).await; + assert_cmd_not_panic(&["movement", "account", "transfer", "--help"]).await; + + assert_cmd_not_panic(&["movement", "config"]).await; + assert_cmd_not_panic(&["movement", "config", "generate-shell-completions", "--help"]).await; + assert_cmd_not_panic(&["movement", "config", "init", "--help"]).await; + assert_cmd_not_panic(&["movement", "config", "set-global-config", "--help"]).await; + assert_cmd_not_panic(&["movement", "config", "show-global-config"]).await; + assert_cmd_not_panic(&["movement", "config", "show-profiles"]).await; + + assert_cmd_not_panic(&["movement", "genesis"]).await; + assert_cmd_not_panic(&["movement", "genesis", "generate-genesis", "--help"]).await; + assert_cmd_not_panic(&["movement", "genesis", "generate-keys", "--help"]).await; + assert_cmd_not_panic(&["movement", "genesis", "generate-layout-template", "--help"]).await; + assert_cmd_not_panic(&["movement", "genesis", "set-validator-configuration", "--help"]).await; + assert_cmd_not_panic(&["movement", "genesis", "setup-git", "--help"]).await; + assert_cmd_not_panic(&["movement", "genesis", "generate-admin-write-set", "--help"]).await; + + assert_cmd_not_panic(&["movement", "governance"]).await; + assert_cmd_not_panic(&["movement", "governance", "execute-proposal", "--help"]).await; + assert_cmd_not_panic(&["movement", "governance", "generate-upgrade-proposal", "--help"]).await; + assert_cmd_not_panic(&["movement", "governance", "propose", "--help"]).await; + assert_cmd_not_panic(&["movement", "governance", "vote", "--help"]).await; + + assert_cmd_not_panic(&["movement", "info"]).await; + + assert_cmd_not_panic(&["movement", "init", "--help"]).await; + + assert_cmd_not_panic(&["movement", "key"]).await; + assert_cmd_not_panic(&["movement", "key", "generate", "--help"]).await; + assert_cmd_not_panic(&["movement", "key", "extract-peer", "--help"]).await; + + assert_cmd_not_panic(&["movement", "move"]).await; + assert_cmd_not_panic(&["movement", "move", "clean", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "compile", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "compile-script", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "download", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "init", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "list", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "prove", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "publish", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "run", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "run-script", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "test", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "transactional-test", "--help"]).await; + assert_cmd_not_panic(&["movement", "move", "view", "--help"]).await; + + assert_cmd_not_panic(&["movement", "node"]).await; + assert_cmd_not_panic(&["movement", "node", "check-network-connectivity", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "get-stake-pool", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "analyze-validator-performance", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "bootstrap-db-from-backup", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "initialize-validator", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "join-validator-set", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "leave-validator-set", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "run-local-testnet", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "show-validator-config", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "show-validator-set", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "show-validator-stake", "--help"]).await; + assert_cmd_not_panic(&["movement", "node", "update-consensus-key", "--help"]).await; + assert_cmd_not_panic(&[ + "movement", + "node", + "update-validator-network-addresses", + "--help", + ]) + .await; + + assert_cmd_not_panic(&["movement", "stake"]).await; + assert_cmd_not_panic(&["movement", "stake", "add-stake", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "increase-lockup", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "initialize-stake-owner", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "set-delegated-voter", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "set-operator", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "unlock-stake", "--help"]).await; + assert_cmd_not_panic(&["movement", "stake", "withdraw-stake", "--help"]).await; +} + +/// Ensure we can parse URLs for args +#[tokio::test] +async fn ensure_can_parse_args_with_urls() { + let result = ArgWithType::from_str("string:https://aptoslabs.com").unwrap(); + matches!(result._ty, FunctionArgType::String); + assert_eq!( + result.arg, + bcs::to_bytes(&"https://aptoslabs.com".to_string()).unwrap() + ); +} + +async fn assert_cmd_not_panic(args: &[&str]) { + // When a command fails, it will have a panic in it due to an improperly setup command + // thread 'main' panicked at 'Command propose: Argument names must be unique, but 'assume-yes' is + // in use by more than one argument or group', ... + + match run_cmd(args).await { + Ok(inner) => assert!( + !inner.contains("panic"), + "Failed to not panic cmd {}: {}", + args.join(" "), + inner + ), + Err(inner) => assert!( + !inner.contains("panic"), + "Failed to not panic cmd {}: {}", + args.join(" "), + inner + ), + } +} + +async fn run_cmd(args: &[&str]) -> CliResult { + let tool: Tool = Tool::try_parse_from(args).map_err(|msg| msg.to_string())?; + tool.execute().await +} diff --git a/m1/movement/src/update/helpers.rs b/m1/movement/src/update/helpers.rs new file mode 100644 index 00000000..b54a0c75 --- /dev/null +++ b/m1/movement/src/update/helpers.rs @@ -0,0 +1,77 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{anyhow, Context, Result}; +use self_update::{backends::github::ReleaseList, cargo_crate_version, version::bump_is_greater}; + +#[derive(Debug)] +pub struct UpdateRequiredInfo { + pub update_required: bool, + pub current_version: String, + pub latest_version: String, + pub latest_version_tag: String, +} + +/// Return information about whether an update is required. +pub fn check_if_update_required(repo_owner: &str, repo_name: &str) -> Result { + // Build a configuration for determining the latest release. + let config = ReleaseList::configure() + .repo_owner(repo_owner) + .repo_name(repo_name) + .build() + .map_err(|e| anyhow!("Failed to build configuration to fetch releases: {:#}", e))?; + + // Get the most recent releases. + let releases = config + .fetch() + .map_err(|e| anyhow!("Failed to fetch releases: {:#}", e))?; + + // Find the latest release of the CLI, in which we filter for the CLI tag. + // If the release isn't in the last 30 items (the default API page size) + // this will fail. See https://github.com/aptos-labs/aptos-core/issues/6411. + let mut releases = releases.into_iter(); + let latest_release = loop { + let release = match releases.next() { + Some(release) => release, + None => return Err(anyhow!("Failed to find latest CLI release")), + }; + if release.version.starts_with("movement-cli-") { + break release; + } + }; + let latest_version_tag = latest_release.version; + let latest_version = latest_version_tag.split("-v").last().unwrap(); + + // Return early if we're up to date already. + let current_version = cargo_crate_version!(); + let update_required = bump_is_greater(current_version, latest_version) + .context("Failed to compare current and latest CLI versions")?; + + Ok(UpdateRequiredInfo { + update_required, + current_version: current_version.to_string(), + latest_version: latest_version.to_string(), + latest_version_tag, + }) +} + +pub enum InstallationMethod { + Source, + Homebrew, + Other, +} + +impl InstallationMethod { + pub fn from_env() -> Result { + // Determine update instructions based on what we detect about the installation. + let exe_path = std::env::current_exe()?; + let installation_method = if exe_path.to_string_lossy().contains("brew") { + InstallationMethod::Homebrew + } else if exe_path.to_string_lossy().contains("target") { + InstallationMethod::Source + } else { + InstallationMethod::Other + }; + Ok(installation_method) + } +} diff --git a/m1/movement/src/update/mod.rs b/m1/movement/src/update/mod.rs new file mode 100644 index 00000000..17dab6d1 --- /dev/null +++ b/m1/movement/src/update/mod.rs @@ -0,0 +1,8 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +mod helpers; +mod tool; + +use helpers::check_if_update_required; +pub use tool::UpdateTool; diff --git a/m1/movement/src/update/tool.rs b/m1/movement/src/update/tool.rs new file mode 100644 index 00000000..1528ff85 --- /dev/null +++ b/m1/movement/src/update/tool.rs @@ -0,0 +1,145 @@ +// Copyright © Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use super::{check_if_update_required, helpers::InstallationMethod}; +use crate::common::{ + types::{CliCommand, CliTypedResult}, + utils::cli_build_information, +}; +use anyhow::{anyhow, Context}; +use aptos_build_info::BUILD_OS; +use async_trait::async_trait; +use clap::Parser; +use self_update::{backends::github::Update, cargo_crate_version, Status}; +use std::process::Command; + +/// Update the CLI itself +/// +/// This can be used to update the CLI to the latest version. This is useful if you +/// installed the CLI via the install script / by downloading the binary directly. +#[derive(Debug, Parser)] +pub struct UpdateTool { + /// The owner of the repo to download the binary from. + #[clap(long, default_value = "movemnt")] + repo_owner: String, + + /// The name of the repo to download the binary from. + #[clap(long, default_value = "subnet")] // TODO: update CI/CD to include this binar release in GitHub workflows + repo_name: String, +} + +impl UpdateTool { + // Out of the box this crate assumes that you have releases named a specific way + // with the crate name, version, and target triple in a specific format. We don't + // do this with our releases, we have other GitHub releases beyond just the CLI, + // and we don't build for all major target triples, so we have to do some of the + // work ourselves first to figure out what the latest version of the CLI is and + // which binary to download based on the current OS. Then we can plug that into + // the library which takes care of the rest. + fn update(&self) -> CliTypedResult { + let installation_method = + InstallationMethod::from_env().context("Failed to determine installation method")?; + match installation_method { + InstallationMethod::Source => { + return Err( + anyhow!("Detected this CLI was built from source, refusing to update").into(), + ); + }, + InstallationMethod::Homebrew => { + return Err(anyhow!( + "Detected this CLI comes from homebrew, use `brew upgrade aptos` instead" + ) + .into()); + }, + InstallationMethod::Other => {}, + } + + let info = check_if_update_required(&self.repo_owner, &self.repo_name)?; + if !info.update_required { + return Ok(format!("CLI already up to date (v{})", info.latest_version)); + } + + // Determine the target we should download. This is necessary because we don't + // name our binary releases using the target triples nor do we build specifically + // for all major triples, so we have to generalize to one of the binaries we do + // happen to build. We figure this out based on what system the CLI was built on. + let build_info = cli_build_information(); + let target = match build_info.get(BUILD_OS).context("Failed to determine build info of current CLI")?.as_str() { + "linux-x86_64" => { + // In the case of Linux, which build to use depends on the OpenSSL + // library on the host machine. So we try to determine that here. + // This code below parses the output of the `openssl version` command, + // where the version string is the 1th (0-indexing) item in the string + // when split by whitespace. + let output = Command::new("openssl") + .args(["version"]) + .output(); + let version = match output { + Ok(output) => { + let stdout = String::from_utf8(output.stdout).unwrap(); + stdout.split_whitespace().collect::>()[1].to_string() + }, + Err(e) => { + println!("Failed to determine OpenSSL version, assuming an older version: {:#}", e); + "1.0.0".to_string() + } + }; + // On Ubuntu < 22.04 the bundled OpenSSL is version 1.x.x, whereas on + // 22.04+ it is 3.x.x. Unfortunately if you build the CLI on a system + // with one major version of OpenSSL, you cannot use it on a system + // with a different version. Accordingly, if the current system uses + // OpenSSL 3.x.x, we use the version of the CLI built on a system with + // OpenSSL 3.x.x, meaning Ubuntu 22.04. Otherwise we use the one built + // on 20.04. + if version.starts_with('3') { + "Ubuntu-22.04-x86_64" + } else { + "Ubuntu-x86_64" + } + }, + "macos-x86_64" => "MacOSX-x86_64", + "windows-x86_64" => "Windows-x86_64", + wildcard => return Err(anyhow!("Self-updating is not supported on your OS right now, please download the binary manually: {}", wildcard).into()), + }; + + // Build a new configuration that will direct the library to download the + // binary with the target version tag and target that we determined above. + let config = Update::configure() + .repo_owner(&self.repo_owner) + .repo_name(&self.repo_name) + .bin_name("movement") + .current_version(cargo_crate_version!()) + .target_version_tag(&info.latest_version_tag) + .target(target) + .build() + .map_err(|e| anyhow!("Failed to build self-update configuration: {:#}", e))?; + + // Update the binary. + let result = config + .update() + .map_err(|e| anyhow!("Failed to update Movement CLI: {:#}", e))?; + + let message = match result { + Status::UpToDate(_) => panic!("We should have caught this already"), + Status::Updated(_) => format!( + "Successfully updated from v{} to v{}", + info.current_version, info.latest_version + ), + }; + + Ok(message) + } +} + +#[async_trait] +impl CliCommand for UpdateTool { + fn command_name(&self) -> &'static str { + "Update" + } + + async fn execute(self) -> CliTypedResult { + tokio::task::spawn_blocking(move || self.update()) + .await + .context("Failed to self-update Movement CLI")? + } +} diff --git a/m1/subnet/Cargo.toml b/m1/subnet/Cargo.toml new file mode 100644 index 00000000..c70a118c --- /dev/null +++ b/m1/subnet/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "subnet" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +avalanche-types = { version = "0.0.398", features = ["subnet", "codec_base64"] } +tokio = { version = "1.25.0", features = ["fs", "rt-multi-thread"] } +tonic = { version = "0.8.3", features = ["gzip"] } +serde = { version = "1.0.152", features = ["derive"] } +serde_json = "1.0.93" # https://github.com/serde-rs/json/releases +serde_with = { version = "2.2.0", features = ["hex"] } +log = "0.4.17" +dirs = "5.0.1" +hex = "0.4.3" +bytes = "1.4.0" +env_logger = "0.10.0" +base64 = { version = "0.21.0" } +chrono = "0.4.23" +derivative = "2.2.0" +jsonrpc-core = "18.0.0" +jsonrpc-core-client = { version = "18.0.0" } +jsonrpc-derive = "18.0.0" + +anyhow = { workspace = true } +aptos-config = { workspace = true } +aptos-consensus-types = { workspace = true } +aptos-mempool = { workspace = true } +aptos-runtimes = { workspace = true } +aptos-crypto = { workspace = true } +aptos-db = { workspace = true, features = ["fuzzing"] } +aptos-executor = { workspace = true } +aptos-logger = { workspace = true } +aptos-api-types = { workspace = true } +aptos-api = { workspace = true } +aptos-executor-types = { workspace = true } +aptos-genesis = { workspace = true } +aptos-sdk = { workspace = true } +aptos-state-view = { workspace = true } +aptos-storage-interface = { workspace = true } +aptos-temppath = { workspace = true } +aptos-types = { workspace = true } +aptos-vm = { workspace = true } +futures = { workspace = true } +aptos-vm-genesis = { workspace = true } +rand = { workspace = true } +bcs = { workspace = true } \ No newline at end of file diff --git a/m1/subnet/src/api/chain_handlers.rs b/m1/subnet/src/api/chain_handlers.rs new file mode 100644 index 00000000..cf86476d --- /dev/null +++ b/m1/subnet/src/api/chain_handlers.rs @@ -0,0 +1,463 @@ +use std::io; +use std::marker::PhantomData; + +use avalanche_types::proto::http::Element; +use avalanche_types::subnet::rpc::http::handle::Handle; +use bytes::Bytes; +use jsonrpc_core::{BoxFuture, Error, ErrorCode, IoHandler, Result}; +use jsonrpc_derive::rpc; +use serde::{Deserialize, Serialize}; +use aptos_api::accept_type::AcceptType; +use aptos_api_types::U64; + +use crate::api::de_request; +use crate::vm::Vm; + +#[rpc] +pub trait Rpc { + /*******************************TRANSACTION START***************************************/ + #[rpc(name = "getTransactions", alias("aptosvm.getTransactions"))] + fn get_transactions(&self, args: PageArgs) -> BoxFuture>; + + #[rpc(name = "submitTransaction", alias("aptosvm.submitTransaction"))] + fn submit_transaction(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "submitTransactionBatch", alias("aptosvm.submitTransactionBatch"))] + fn submit_transaction_batch(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getTransactionByHash", alias("aptosvm.getTransactionByHash"))] + fn get_transaction_by_hash(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getTransactionByVersion", alias("aptosvm.getTransactionByVersion"))] + fn get_transaction_by_version(&self, args: GetTransactionByVersionArgs) -> BoxFuture>; + + #[rpc(name = "getAccountsTransactions", alias("aptosvm.getAccountsTransactions"))] + fn get_accounts_transactions(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "simulateTransaction", alias("aptosvm.simulateTransaction"))] + fn simulate_transaction(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "encodeSubmission", alias("aptosvm.encodeSubmission"))] + fn encode_submission(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "estimateGasPrice", alias("aptosvm.estimateGasPrice"))] + fn estimate_gas_price(&self) -> BoxFuture>; + /*******************************TRANSACTION END***************************************/ + + + /*******************************HELPER API START***************************************/ + #[rpc(name = "faucet", alias("aptosvm.faucet"))] + fn faucet_apt(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "createAccount", alias("aptosvm.createAccount"))] + fn create_account(&self, args: RpcReq) -> BoxFuture>; + + /*******************************HELPER API END***************************************/ + + + /******************************* ACCOUNT START ***************************************/ + + #[rpc(name = "getAccount", alias("aptosvm.getAccount"))] + fn get_account(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getAccountResources", alias("aptosvm.getAccountResources"))] + fn get_account_resources(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getAccountModules", alias("aptosvm.getAccountModules"))] + fn get_account_modules(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getAccountResourcesState", alias("aptosvm.getAccountResourcesState"))] + fn get_account_resources_state(&self, args: AccountStateArgs) -> BoxFuture>; + + #[rpc(name = "getAccountModulesState", alias("aptosvm.getAccountModulesState"))] + fn get_account_modules_state(&self, args: AccountStateArgs) -> BoxFuture>; + /******************************* ACCOUNT END ***************************************/ + + + /*******************************BLOCK START***************************************/ + #[rpc(name = "getBlockByHeight", alias("aptosvm.getBlockByHeight"))] + fn get_block_by_height(&self, args: BlockArgs) -> BoxFuture>; + + #[rpc(name = "getBlockByVersion", alias("aptosvm.getBlockByVersion"))] + fn get_block_by_version(&self, args: BlockArgs) -> BoxFuture>; + /*******************************BLOCK END***************************************/ + + #[rpc(name = "viewFunction", alias("aptosvm.viewFunction"))] + fn view_function(&self, args: RpcReq) -> BoxFuture>; + + #[rpc(name = "getTableItem", alias("aptosvm.getTableItem"))] + fn get_table_item(&self, args: RpcTableReq) -> BoxFuture>; + + #[rpc(name = "getRawTableItem", alias("aptosvm.getRawTableItem"))] + fn get_raw_table_item(&self, args: RpcTableReq) -> BoxFuture>; + + #[rpc(name = "getEventsByCreationNumber", alias("aptosvm.getEventsByCreationNumber"))] + fn get_events_by_creation_number(&self, args: RpcEventNumReq) -> BoxFuture>; + + #[rpc(name = "getEventsByEventHandle", alias("aptosvm.getEventsByEventHandle"))] + fn get_events_by_event_handle(&self, args: RpcEventHandleReq) -> BoxFuture>; + + #[rpc(name = "getLedgerInfo", alias("aptosvm.getLedgerInfo"))] + fn get_ledger_info(&self) -> BoxFuture>; +} + + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GetTableItemArgs { + pub table_handle: String, + pub key_type: String, + pub value_type: String, + pub key: String, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct RpcReq { + pub data: String, + pub ledger_version: Option, + pub start: Option, + pub limit: Option, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct RpcRes { + pub data: String, + pub header: String, +} + + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct RpcTableReq { + pub query: String, + pub body: String, + pub ledger_version: Option, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct RpcEventNumReq { + pub address: String, + pub creation_number: U64, + pub start: Option, + pub limit: Option, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct RpcEventHandleReq { + pub start: Option, + pub limit: Option, + pub address: String, + pub event_handle: String, + pub field_name: String, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct BlockArgs { + pub height_or_version: u64, + pub with_transactions: Option, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct GetTransactionByVersionArgs { + pub version: U64, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct AccountStateArgs { + pub account: String, + pub resource: String, + pub ledger_version: Option, + pub is_bsc_format: Option, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct PageArgs { + pub start: Option, + pub limit: Option, + pub is_bsc_format: Option, +} + + +#[derive(Clone)] +pub struct ChainService { + pub vm: Vm, +} + +impl ChainService { + pub fn new(vm: Vm) -> Self { + Self { vm } + } +} + + +impl Rpc for ChainService { + fn get_transactions(&self, args: PageArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_transactions(args).await; + return Ok(ret); + }) + } + + fn submit_transaction(&self, args: RpcReq) -> BoxFuture> { + log::debug!("submit_transaction called"); + let vm = self.vm.clone(); + Box::pin(async move { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let r = vm.submit_transaction(hex::decode(args.data).unwrap(), accept).await; + Ok(r) + }) + } + + fn submit_transaction_batch(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let r = vm.submit_transaction_batch(hex::decode(args.data).unwrap(), accept).await; + Ok(r) + }) + } + + fn get_transaction_by_hash(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_transaction_by_hash(args).await; + return Ok(ret); + }) + } + + fn get_transaction_by_version(&self, args: GetTransactionByVersionArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_transaction_by_version(args).await; + return Ok(ret); + }) + } + + fn get_accounts_transactions(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_accounts_transactions(args).await; + return Ok(ret); + }) + } + + fn simulate_transaction(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let data = hex::decode(args.data).unwrap(); + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let ret = vm.simulate_transaction(data, accept).await; + Ok(ret) + }) + } + + fn encode_submission(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.encode_submission(args.data.as_str()).await; + return Ok(ret); + }) + } + + fn estimate_gas_price(&self) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.estimate_gas_price().await; + Ok(ret) + }) + } + + fn faucet_apt(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let acc = hex::decode(args.data).unwrap(); + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let ret = vm.faucet_apt(acc, accept).await; + Ok(ret) + }) + } + + fn create_account(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let ret = vm.create_account(args.data.as_str(), accept).await; + Ok(ret) + }) + } + + fn get_account(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_account(args).await; + return Ok(ret); + }) + } + + fn get_account_resources(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_account_resources(args).await; + return Ok(ret); + }) + } + + fn get_account_modules(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_account_modules(args).await; + return Ok(ret); + }) + } + + fn get_account_resources_state(&self, args: AccountStateArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_account_resources_state(args).await; + return Ok(ret); + }) + } + + fn get_account_modules_state(&self, args: AccountStateArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_account_modules_state(args).await; + return Ok(ret); + }) + } + + fn get_block_by_height(&self, args: BlockArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_block_by_height(args).await; + return Ok(ret); + }) + } + + fn get_block_by_version(&self, args: BlockArgs) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_block_by_version(args).await; + return Ok(ret); + }) + } + + fn view_function(&self, args: RpcReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + log::info!("view_function called {}",args.data.clone()); + let ret = vm.view_function(args).await; + return Ok(ret); + }) + } + + fn get_table_item(&self, args: RpcTableReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_table_item(args).await; + return Ok(ret); + }) + } + + fn get_raw_table_item(&self, args: RpcTableReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_raw_table_item(args).await; + return Ok(ret); + }) + } + + fn get_events_by_creation_number(&self, args: RpcEventNumReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_events_by_creation_number(args).await; + return Ok(ret); + }) + } + + fn get_events_by_event_handle(&self, args: RpcEventHandleReq) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_events_by_event_handle(args).await; + return Ok(ret); + }) + } + + fn get_ledger_info(&self) -> BoxFuture> { + let vm = self.vm.clone(); + Box::pin(async move { + let ret = vm.get_ledger_info().await; + return Ok(ret); + }) + } +} + +#[derive(Clone, Debug)] +pub struct ChainHandler { + pub handler: IoHandler, + _marker: PhantomData, +} + +#[tonic::async_trait] +impl Handle for ChainHandler + where + T: Rpc + Send + Sync + Clone + 'static, +{ + async fn request( + &self, + req: &Bytes, + _headers: &[Element], + ) -> io::Result<(Bytes, Vec)> { + match self.handler.handle_request(&de_request(req)?).await { + Some(resp) => Ok((Bytes::from(resp), Vec::new())), + None => Err(io::Error::new( + io::ErrorKind::Other, + "failed to handle request", + )), + } + } +} + +impl ChainHandler { + pub fn new(service: T) -> Self { + let mut handler = jsonrpc_core::IoHandler::new(); + handler.extend_with(Rpc::to_delegate(service)); + Self { + handler, + _marker: PhantomData, + } + } +} + + +fn create_jsonrpc_error(e: std::io::Error) -> Error { + let mut error = Error::new(ErrorCode::InternalError); + error.message = format!("{}", e); + error +} diff --git a/m1/subnet/src/api/mod.rs b/m1/subnet/src/api/mod.rs new file mode 100644 index 00000000..1e5bcc57 --- /dev/null +++ b/m1/subnet/src/api/mod.rs @@ -0,0 +1,30 @@ +//! Implementation of timestampvm APIs, to be registered via +//! `create_static_handlers` and `create_handlers` in the [`vm`](crate::vm) crate. + +use std::io; + +use bytes::Bytes; +use jsonrpc_core::MethodCall; +use serde::{Deserialize, Serialize}; + +pub mod chain_handlers; +pub mod static_handlers; + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct PingResponse { + pub success: bool, +} +pub fn de_request(req: &Bytes) -> io::Result { + let method_call: MethodCall = serde_json::from_slice(req).map_err(|e| { + io::Error::new( + io::ErrorKind::Other, + format!("failed to deserialize request: {e}"), + ) + })?; + serde_json::to_string(&method_call).map_err(|e| { + io::Error::new( + io::ErrorKind::Other, + format!("failed to serialize request: {e}"), + ) + }) +} diff --git a/m1/subnet/src/api/static_handlers.rs b/m1/subnet/src/api/static_handlers.rs new file mode 100644 index 00000000..392e5b37 --- /dev/null +++ b/m1/subnet/src/api/static_handlers.rs @@ -0,0 +1,69 @@ +//! Implements static handlers specific to this VM. +//! To be served via `[HOST]/ext/vm/[VM ID]/static`. + +use std::io; + +use avalanche_types::proto::http::Element; +use avalanche_types::subnet::rpc::http::handle::Handle; +use bytes::Bytes; +use jsonrpc_core::{BoxFuture, IoHandler, Result}; +use jsonrpc_derive::rpc; + +use crate::api::de_request; + +/// Defines static handler RPCs for this VM. +#[rpc] +pub trait Rpc { + #[rpc(name = "ping", alias("timestampvm.ping"))] + fn ping(&self) -> BoxFuture>; +} + +/// Implements API services for the static handlers. +#[derive(Default)] +pub struct StaticService {} + +impl StaticService { + #[must_use] + pub fn new() -> Self { + Self {} + } +} + + + +impl Rpc for StaticService { + fn ping(&self) -> BoxFuture> { + log::debug!("ping called"); + Box::pin(async move { Ok(crate::api::PingResponse { success: true }) }) + } +} +#[derive(Clone)] +pub struct StaticHandler { + pub handler: IoHandler, +} + +impl StaticHandler { + #[must_use] + pub fn new(service: StaticService) -> Self { + let mut handler = jsonrpc_core::IoHandler::new(); + handler.extend_with(Rpc::to_delegate(service)); + Self { handler } + } +} + +#[tonic::async_trait] +impl Handle for StaticHandler { + async fn request( + &self, + req: &Bytes, + _headers: &[Element], + ) -> std::io::Result<(Bytes, Vec)> { + match self.handler.handle_request(&de_request(req)?).await { + Some(resp) => Ok((Bytes::from(resp), Vec::new())), + None => Err(io::Error::new( + io::ErrorKind::Other, + "failed to handle request", + )), + } + } +} diff --git a/m1/subnet/src/block/mod.rs b/m1/subnet/src/block/mod.rs new file mode 100644 index 00000000..aee144d0 --- /dev/null +++ b/m1/subnet/src/block/mod.rs @@ -0,0 +1,296 @@ +//! Implementation of [`snowman.Block`](https://pkg.go.dev/github.com/ava-labs/avalanchego/snow/consensus/snowman#Block) interface for timestampvm. + +use std::{ + fmt, + io::{self, Error, ErrorKind}, +}; + +use avalanche_types::{choices, ids, subnet}; +use chrono::{Duration, Utc}; +use derivative::{self, Derivative}; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; + +use crate::state; + +/// Represents a block, specific to [`Vm`](crate::vm::Vm). +#[serde_as] +#[derive(Serialize, Deserialize, Clone, Derivative)] +#[derivative(Debug, PartialEq, Eq)] +pub struct Block { + /// The block Id of the parent block. + parent_id: ids::Id, + /// This block's height. + /// The height of the genesis block is 0. + height: u64, + /// Unix second when this block was proposed. + timestamp: u64, + + data: Vec, + + /// Current block status. + #[serde(skip)] + status: choices::status::Status, + /// This block's encoded bytes. + #[serde(skip)] + bytes: Vec, + /// Generated block Id. + #[serde(skip)] + id: ids::Id, + + /// Reference to the Vm state manager for blocks. + #[derivative(Debug = "ignore", PartialEq = "ignore")] + #[serde(skip)] + state: state::State, +} + +impl Block { + pub fn new( + parent_id: ids::Id, + height: u64, + timestamp: u64, + data: Vec, + status: choices::status::Status, + ) -> io::Result { + let mut b = Self { + parent_id, + height, + timestamp, + data, + status: choices::status::Status::default(), + bytes: Vec::new(), + id: ids::Id::empty(), + state: state::State::default(), + }; + + b.status = status; + b.bytes = b.to_slice()?; + b.id = ids::Id::sha256(&b.bytes); + + Ok(b) + } + + pub fn to_json_string(&self) -> io::Result { + serde_json::to_string(&self).map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to serialize Block to JSON string {}", e), + ) + }) + } + + /// Encodes the [`Block`](Block) to JSON in bytes. + pub fn to_slice(&self) -> io::Result> { + serde_json::to_vec(&self).map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to serialize Block to JSON bytes {}", e), + ) + }) + } + + /// Loads [`Block`](Block) from JSON bytes. + pub fn from_slice(d: impl AsRef<[u8]>) -> io::Result { + let dd = d.as_ref(); + let mut b: Self = serde_json::from_slice(dd).map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to deserialize Block from JSON {}", e), + ) + })?; + + b.bytes = dd.to_vec(); + b.id = ids::Id::sha256(&b.bytes); + + Ok(b) + } + + /// Returns the parent block Id. + pub fn parent_id(&self) -> ids::Id { + self.parent_id + } + + /// Returns the height of this block. + pub fn height(&self) -> u64 { + self.height + } + + /// Returns the timestamp of this block. + pub fn timestamp(&self) -> u64 { + self.timestamp + } + + /// Returns the data of this block. + pub fn data(&self) -> &[u8] { + &self.data + } + + /// Returns the status of this block. + pub fn status(&self) -> choices::status::Status { + self.status.clone() + } + + /// Updates the status of this block. + pub fn set_status(&mut self, status: choices::status::Status) { + self.status = status; + } + + /// Returns the byte representation of this block. + pub fn bytes(&self) -> &[u8] { + &self.bytes + } + + /// Returns the ID of this block + pub fn id(&self) -> ids::Id { + self.id + } + + /// Updates the state of the block. + pub fn set_state(&mut self, state: state::State) { + self.state = state; + } + + /// Verifies [`Block`](Block) properties (e.g., heights), + /// and once verified, records it to the [`State`](crate::state::State). + pub async fn verify(&mut self) -> io::Result<()> { + if self.height == 0 && self.parent_id == ids::Id::empty() { + log::debug!( + "block {} has an empty parent Id since it's a genesis block -- skipping verify", + self.id + ); + self.state.add_verified(&self.clone()).await; + return Ok(()); + } + + // if already exists in database, it means it's already accepted + // thus no need to verify once more + if self.state.get_block(&self.id).await.is_ok() { + log::debug!("block {} already verified", self.id); + return Ok(()); + } + let prnt_blk = self.state.get_block(&self.parent_id).await?; + + // ensure the height of the block is immediately following its parent + if prnt_blk.height != self.height - 1 { + return Err(Error::new( + ErrorKind::InvalidData, + format!( + "parent block height {} != current block height {} - 1", + prnt_blk.height, self.height + ), + )); + } + + // ensure block timestamp is after its parent + if prnt_blk.timestamp > self.timestamp { + return Err(Error::new( + ErrorKind::InvalidData, + format!( + "parent block timestamp {} > current block timestamp {}", + prnt_blk.timestamp, self.timestamp + ), + )); + } + + // ensure block timestamp is no more than an hour ahead of this nodes time + if self.timestamp >= (Utc::now() + Duration::hours(1)).timestamp() as u64 { + return Err(Error::new( + ErrorKind::InvalidData, + format!( + "block timestamp {} is more than 1 hour ahead of local time", + self.timestamp + ), + )); + } + + // add newly verified block to memory + self.state.add_verified(&self.clone()).await; + Ok(()) + } + + /// Mark this [`Block`](Block) accepted and updates [`State`](crate::state::State) accordingly. + pub async fn accept(&mut self) -> io::Result<()> { + self.inner_build().await?; + println!("-----accept----1---"); + self.set_status(choices::status::Status::Accepted); + // only decided blocks are persistent -- no reorg + self.state.write_block(&self.clone()).await?; + self.state.set_last_accepted_block(&self.id()).await?; + self.state.remove_verified(&self.id()).await; + Ok(()) + } + + async fn inner_build(&self) -> io::Result<()> { + if let Some(vm_) = self.state.vm.as_ref() { + let vm = vm_.read().await; + return vm.inner_build_block(self.data.clone()).await; + } + return Ok(()); + } + + /// Mark this [`Block`](Block) rejected and updates [`State`](crate::state::State) accordingly. + pub async fn reject(&mut self) -> io::Result<()> { + self.set_status(choices::status::Status::Rejected); + println!("-----reject----1---"); + // only decided blocks are persistent -- no reorg + self.state.write_block(&self.clone()).await?; + + self.state.remove_verified(&self.id()).await; + Ok(()) + } +} + +/// ref. https://doc.rust-lang.org/std/string/trait.ToString.html +/// ref. https://doc.rust-lang.org/std/fmt/trait.Display.html +/// Use "Self.to_string()" to directly invoke this +impl fmt::Display for Block { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let serialized = self.to_json_string().unwrap(); + write!(f, "{serialized}") + } +} + + +#[tonic::async_trait] +impl subnet::rpc::consensus::snowman::Block for Block { + async fn bytes(&self) -> &[u8] { + return self.bytes.as_ref(); + } + + async fn height(&self) -> u64 { + self.height + } + + async fn timestamp(&self) -> u64 { + self.timestamp + } + + async fn parent(&self) -> ids::Id { + self.parent_id + } + + async fn verify(&mut self) -> io::Result<()> { + self.verify().await + } +} + +#[tonic::async_trait] +impl subnet::rpc::consensus::snowman::Decidable for Block { + async fn id(&self) -> ids::Id { + self.id + } + + /// Implements "snowman.Block.choices.Decidable" + async fn status(&self) -> choices::status::Status { + self.status.clone() + } + + async fn accept(&mut self) -> io::Result<()> { + self.accept().await + } + + async fn reject(&mut self) -> io::Result<()> { + self.reject().await + } +} + diff --git a/m1/subnet/src/main.rs b/m1/subnet/src/main.rs new file mode 100644 index 00000000..f79bd7bb --- /dev/null +++ b/m1/subnet/src/main.rs @@ -0,0 +1,19 @@ +use std::io; + +use avalanche_types::subnet; +use tokio::sync::broadcast::{self, Receiver, Sender}; + +mod vm; +mod state; +mod block; +mod api; + +#[tokio::main] +async fn main() -> io::Result<()> { + env_logger::init_from_env( + env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "info"), + ); + let (stop_ch_tx, stop_ch_rx): (Sender<()>, Receiver<()>) = broadcast::channel(1); + let vm_server = subnet::rpc::vm::server::Server::new(vm::Vm::new(), stop_ch_tx); + subnet::rpc::vm::serve(vm_server, stop_ch_rx).await +} diff --git a/m1/subnet/src/state/mod.rs b/m1/subnet/src/state/mod.rs new file mode 100644 index 00000000..1a21945a --- /dev/null +++ b/m1/subnet/src/state/mod.rs @@ -0,0 +1,182 @@ +//! Manages the virtual machine states. + +use std::{ + collections::HashMap, + io::{self, Error, ErrorKind}, + sync::Arc, +}; + +use avalanche_types::{choices, ids, subnet}; +use serde::{Deserialize, Serialize}; +use tokio::sync::RwLock; + +use crate::block::Block; +use crate::vm::{ Vm}; + +/// Manages block and chain states for this Vm, both in-memory and persistent. +#[derive(Clone)] +pub struct State { + pub db: Arc>>, + + /// Maps block Id to Block. + /// Each element is verified but not yet accepted/rejected (e.g., preferred). + pub verified_blocks: Arc>>, + // pub vm: Option>>, + pub vm: Option>>, +} + +impl Default for State { + fn default() -> State { + Self { + db: Arc::new(RwLock::new(subnet::rpc::database::memdb::Database::new())), + verified_blocks: Arc::new(RwLock::new(HashMap::new())), + vm: None, + } + } +} + +const LAST_ACCEPTED_BLOCK_KEY: &[u8] = b"last_accepted_block"; + +const STATUS_PREFIX: u8 = 0x0; + +const DELIMITER: u8 = b'/'; + +/// Returns a vec of bytes used as a key for identifying blocks in state. +/// 'STATUS_PREFIX' + 'BYTE_DELIMITER' + [block_id] +fn block_with_status_key(blk_id: &ids::Id) -> Vec { + let mut k: Vec = Vec::with_capacity(ids::LEN + 2); + k.push(STATUS_PREFIX); + k.push(DELIMITER); + k.extend_from_slice(&blk_id.to_vec()); + k +} + +/// Wraps a [`Block`](crate::block::Block) and its status. +/// This is the data format that [`State`](State) uses to persist blocks. +#[derive(Serialize, Deserialize, Clone)] +struct BlockWithStatus { + block_bytes: Vec, + status: choices::status::Status, +} + +impl BlockWithStatus { + fn encode(&self) -> io::Result> { + serde_json::to_vec(&self).map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to serialize BlockStatus to JSON bytes: {}", e), + ) + }) + } + + fn from_slice(d: impl AsRef<[u8]>) -> io::Result { + let dd = d.as_ref(); + serde_json::from_slice(dd).map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to deserialize BlockStatus from JSON: {}", e), + ) + }) + } +} + +impl State { + /// Persists the last accepted block Id to state. + pub async fn set_last_accepted_block(&self, blk_id: &ids::Id) -> io::Result<()> { + let mut db = self.db.write().await; + db.put(LAST_ACCEPTED_BLOCK_KEY, &blk_id.to_vec()) + .await + .map_err(|e| { + Error::new( + ErrorKind::Other, + format!("failed to put last accepted block: {:?}", e), + ) + }) + } + + pub fn set_vm(&mut self, vm:Vm) { + self.vm = Some(Arc::new(RwLock::new(vm))); + } + + /// Returns "true" if there's a last accepted block found. + pub async fn has_last_accepted_block(&self) -> io::Result { + let db = self.db.read().await; + match db.has(LAST_ACCEPTED_BLOCK_KEY).await { + Ok(found) => Ok(found), + Err(e) => Err(Error::new( + ErrorKind::Other, + format!("failed to load last accepted block: {}", e), + )), + } + } + + /// Returns the last accepted block Id from state. + pub async fn get_last_accepted_block_id(&self) -> io::Result { + let db = self.db.read().await; + match db.get(LAST_ACCEPTED_BLOCK_KEY).await { + Ok(d) => Ok(ids::Id::from_slice(&d)), + Err(e) => { + if subnet::rpc::errors::is_not_found(&e) { + return Ok(ids::Id::empty()); + } + Err(e) + } + } + } + + /// Adds a block to "verified_blocks". + pub async fn add_verified(&mut self, block: &Block) { + let blk_id = block.id(); + let mut verified_blocks = self.verified_blocks.write().await; + verified_blocks.insert(blk_id, block.clone()); + } + + /// Removes a block from "verified_blocks". + pub async fn remove_verified(&mut self, blk_id: &ids::Id) { + let mut verified_blocks = self.verified_blocks.write().await; + verified_blocks.remove(blk_id); + } + + /// Returns "true" if the block Id has been already verified. + pub async fn has_verified(&self, blk_id: &ids::Id) -> bool { + let verified_blocks = self.verified_blocks.read().await; + verified_blocks.contains_key(blk_id) + } + + /// Writes a block to the state storage. + pub async fn write_block(&mut self, block: &Block) -> io::Result<()> { + let blk_id = block.id(); + let blk_bytes = block.to_slice()?; + + let mut db = self.db.write().await; + + let blk_status = BlockWithStatus { + block_bytes: blk_bytes, + status: block.status(), + }; + let blk_status_bytes = blk_status.encode()?; + + db.put(&block_with_status_key(&blk_id), &blk_status_bytes) + .await + .map_err(|e| Error::new(ErrorKind::Other, format!("failed to put block: {:?}", e))) + } + + /// Reads a block from the state storage using the block_with_status_key. + pub async fn get_block(&self, blk_id: &ids::Id) -> io::Result { + // check if the block exists in memory as previously verified. + let verified_blocks = self.verified_blocks.read().await; + if let Some(b) = verified_blocks.get(blk_id) { + return Ok(b.clone()); + } + let db = self.db.read().await; + + let blk_status_bytes = db.get(&block_with_status_key(blk_id)).await?; + let blk_status = BlockWithStatus::from_slice(blk_status_bytes)?; + + let mut blk = Block::from_slice(&blk_status.block_bytes)?; + blk.set_status(blk_status.status); + + Ok(blk) + } +} + diff --git a/m1/subnet/src/vm/mod.rs b/m1/subnet/src/vm/mod.rs new file mode 100644 index 00000000..bbdefb38 --- /dev/null +++ b/m1/subnet/src/vm/mod.rs @@ -0,0 +1,1743 @@ +use std::{collections::HashMap, fs, io::{self, Error, ErrorKind}, sync::Arc}; +use std::str::FromStr; +use std::time::{Duration}; +use avalanche_types::{ + choices, ids, + subnet::{self, rpc::snow}, +}; +use avalanche_types::subnet::rpc::database::manager::{DatabaseManager, Manager}; +use avalanche_types::subnet::rpc::health::Checkable; +use avalanche_types::subnet::rpc::snow::engine::common::appsender::AppSender; +use avalanche_types::subnet::rpc::snow::engine::common::appsender::client::AppSenderClient; +use avalanche_types::subnet::rpc::snow::engine::common::engine::{AppHandler, CrossChainAppHandler, NetworkAppHandler}; +use avalanche_types::subnet::rpc::snow::engine::common::http_handler::{HttpHandler, LockOptions}; +use avalanche_types::subnet::rpc::snow::engine::common::message::Message::PendingTxs; +use avalanche_types::subnet::rpc::snow::engine::common::vm::{CommonVm, Connector}; +use avalanche_types::subnet::rpc::snow::validators::client::ValidatorStateClient; +use avalanche_types::subnet::rpc::snowman::block::{BatchedChainVm, ChainVm, Getter, Parser}; +use bytes::Bytes; +use chrono::{DateTime, Utc}; +use futures::{channel::mpsc as futures_mpsc, StreamExt}; +use hex; +use serde::{Deserialize, Serialize}; +use tokio::sync::{mpsc::Sender, RwLock}; + +use aptos_api::{Context, get_raw_api_service, RawApi}; +use aptos_api::accept_type::AcceptType; +use aptos_api::response::{AptosResponseContent, BasicResponse}; +use aptos_api::transactions::{SubmitTransactionPost, SubmitTransactionResponse, SubmitTransactionsBatchPost, SubmitTransactionsBatchResponse}; +use aptos_api_types::{Address, EncodeSubmissionRequest, IdentifierWrapper, MoveStructTag, RawTableItemRequest, StateKeyWrapper, TableItemRequest, ViewRequest}; +use aptos_config::config::NodeConfig; +use aptos_crypto::{HashValue, ValidCryptoMaterialStringExt}; +use aptos_crypto::ed25519::Ed25519PublicKey; +use aptos_db::AptosDB; +use aptos_executor::block_executor::BlockExecutor; +use aptos_executor::db_bootstrapper::{generate_waypoint, maybe_bootstrap}; +use aptos_executor_types::BlockExecutorTrait; +use aptos_mempool::{MempoolClientRequest, MempoolClientSender, SubmissionStatus}; +use aptos_mempool::core_mempool::{CoreMempool, TimelineState}; +use aptos_sdk::rest_client::aptos_api_types::MAX_RECURSIVE_TYPES_ALLOWED; +use aptos_sdk::transaction_builder::TransactionFactory; +use aptos_sdk::types::{AccountKey, LocalAccount}; +use aptos_state_view::account_with_state_view::AsAccountWithStateView; +use aptos_storage_interface::DbReaderWriter; +use aptos_storage_interface::state_view::DbStateViewAtVersion; +use aptos_types::account_address::AccountAddress; +use aptos_types::account_config::aptos_test_root_address; +use aptos_types::account_view::AccountView; +use aptos_types::block_info::BlockInfo; +use aptos_types::block_metadata::BlockMetadata; +use aptos_types::chain_id::ChainId; +use aptos_types::ledger_info::{generate_ledger_info_with_sig, LedgerInfo}; +use aptos_types::mempool_status::{MempoolStatus, MempoolStatusCode}; +use aptos_types::transaction::{SignedTransaction, Transaction, WriteSetPayload}; +use aptos_types::transaction::Transaction::UserTransaction; +use aptos_types::validator_signer::ValidatorSigner; +use aptos_vm::AptosVM; +use aptos_vm_genesis::{GENESIS_KEYPAIR, test_genesis_change_set_and_validators}; + +use crate::{block::Block, state}; +use crate::api::chain_handlers::{AccountStateArgs, BlockArgs, ChainHandler, ChainService, GetTransactionByVersionArgs, PageArgs, RpcEventHandleReq, RpcEventNumReq, RpcReq, RpcRes, RpcTableReq}; +use crate::api::static_handlers::{StaticHandler, StaticService}; + +const VERSION: &str = env!("CARGO_PKG_VERSION"); +const MOVE_DB_DIR: &str = ".move-chain-data"; + +#[derive(Serialize, Deserialize, Clone)] +pub struct AptosData( + pub Vec, // block info + pub HashValue, // block id + pub HashValue, + pub u64, + pub u64, +); + +#[derive(Serialize, Deserialize, Clone)] +pub struct AptosHeader { + chain_id: u8, + ledger_version: u64, + ledger_oldest_version: u64, + ledger_timestamp_usec: u64, + epoch: u64, + block_height: u64, + oldest_block_height: u64, + cursor: Option, +} + + +/// Represents VM-specific states. +/// Defined in a separate struct, for interior mutability in [`Vm`](Vm). +/// To be protected with `Arc` and `RwLock`. +pub struct VmState { + pub ctx: Option>, + + /// Represents persistent Vm state. + pub state: Option, + /// Currently preferred block Id. + pub preferred: ids::Id, + + /// Set "true" to indicate that the Vm has finished bootstrapping + /// for the chain. + pub bootstrapped: bool, +} + +impl Default for VmState { + fn default() -> Self { + Self { + ctx: None, + state: None, + preferred: ids::Id::empty(), + bootstrapped: false, + } + } +} + +/// Implements [`snowman.block.ChainVM`](https://pkg.go.dev/github.com/ava-labs/avalanchego/snow/engine/snowman/block#ChainVM) interface. +#[derive(Clone)] +pub struct Vm { + pub state: Arc>, + + pub app_sender: Option, + + pub api_service: Option, + + pub api_context: Option, + + pub core_mempool: Option>>, + + /// Channel to send messages to the snowman consensus engine. + pub to_engine: Option>>>, + + pub db: Option>>, + + pub signer: Option, + + pub executor: Option>>>, + + pub build_status: Arc>, + // 0 done 1 building + pub has_pending_tx: Arc>, + +} + + +impl Default for Vm + +{ + fn default() -> Self { + Self::new() + } +} + +impl Vm { + pub fn new() -> Self { + Self { + state: Arc::new(RwLock::new(VmState::default())), + app_sender: None, + api_service: None, + api_context: None, + core_mempool: None, + to_engine: None, + signer: None, + executor: None, + db: None, + build_status: Arc::new(RwLock::new(0)), + has_pending_tx: Arc::new(RwLock::new(false)), + } + } + + pub async fn is_bootstrapped(&self) -> bool { + let vm_state = self.state.read().await; + vm_state.bootstrapped + } + + pub async fn get_transactions(&self, args: PageArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let ret = transactions_api.get_transactions_raw(accept, args.start, args.limit).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_block_by_height(&self, args: BlockArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let blocks_api = self.api_service.as_ref().unwrap().blocks_api.clone(); + let ret = blocks_api.get_block_by_height_raw(accept, args.height_or_version, args.with_transactions).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_block_by_version(&self, args: BlockArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let blocks_api = self.api_service.as_ref().unwrap().blocks_api.clone(); + let ret = blocks_api.get_block_by_version_raw(accept, args.height_or_version, args.with_transactions).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_accounts_transactions(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.data.as_str(); + let accounts_api = self.api_service.as_ref().unwrap().accounts_api.clone(); + let start = match args.start { + None => None, + Some(_) => Some(StateKeyWrapper::from_str(args.start.unwrap().as_str()).unwrap()) + }; + let ret = accounts_api.get_account_resources_raw( + accept, + Address::from_str(account).unwrap(), + args.ledger_version, + start, + args.limit, + ).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_account_resources(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.data.as_str(); + let accounts_api = self.api_service.as_ref().unwrap().accounts_api.clone(); + let start = match args.start { + None => None, + Some(_) => Some(StateKeyWrapper::from_str(args.start.unwrap().as_str()).unwrap()) + }; + let ret = accounts_api.get_account_resources_raw( + accept, + Address::from_str(account).unwrap(), + args.ledger_version, + start, + args.limit, + ).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_account(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.data.as_str(); + let accounts_api = self.api_service.as_ref().unwrap().accounts_api.clone(); + let ret = accounts_api.get_account_raw(accept, + Address::from_str(account).unwrap(), args.ledger_version).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + pub async fn get_account_modules_state(&self, args: AccountStateArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.account.as_str(); + let module_name = args.resource.as_str(); + let module_name = IdentifierWrapper::from_str(module_name).unwrap().clone(); + let state_api = self.api_service.as_ref().unwrap().state_api.clone(); + let ret = state_api.get_account_module_raw( + accept, + Address::from_str(account).unwrap(), + module_name, args.ledger_version).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_account_resources_state(&self, args: AccountStateArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.account.as_str(); + let resource = args.resource.as_str(); + let state_api = self.api_service.as_ref().unwrap().state_api.clone(); + let ret = state_api.get_account_resource_raw(accept, + Address::from_str(account).unwrap(), + MoveStructTag::from_str(resource).unwrap(), + args.ledger_version).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_account_modules(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.data.as_str(); + let start = match args.start { + None => None, + Some(_) => Some(StateKeyWrapper::from_str(args.start.unwrap().as_str()).unwrap()) + }; + let accounts_api = self.api_service.as_ref().unwrap().accounts_api.clone(); + let address = Address::from_str(account).unwrap(); + let ret = accounts_api.get_account_modules_raw(accept, + address, + args.ledger_version, + start, + args.limit).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_ledger_info(&self) -> RpcRes { + let index_api = self.api_service.as_ref().unwrap().index_api.clone(); + let ret = index_api.get_ledger_info_raw(AcceptType::Json).await.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn view_function(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let view_function_api = self.api_service.as_ref().unwrap().view_function_api.clone(); + let req = serde_json::from_str::(args.data.as_str()).unwrap(); + let ret = view_function_api.view_function_raw( + accept, + req, + args.ledger_version, + ).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_transaction_by_hash(&self, args: RpcReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let h = args.data.as_str(); + let h1 = HashValue::from_hex(h).unwrap(); + let hash = aptos_api_types::hash::HashValue::from(h1); + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let ret = transactions_api.get_transaction_by_hash_raw(accept, + hash).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_transaction_by_version(&self, args: GetTransactionByVersionArgs) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let ret = transactions_api.get_transaction_by_version_raw(accept, + args.version).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn encode_submission(&self, data: &str) -> RpcRes { + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let payload = serde_json::from_str::(data).unwrap(); + let ret = + transactions_api.encode_submission_raw(AcceptType::Json, payload).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn submit_transaction(&self, data: Vec, accept: AcceptType) -> RpcRes { + log::info!("submit_transaction length {}",{data.len()}); + let transacions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let payload = SubmitTransactionPost::Bcs(aptos_api::bcs_payload::Bcs(data.clone())); + let ret = + transacions_api.submit_transaction_raw(accept, payload).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + SubmitTransactionResponse::Accepted(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + let signed_transaction: SignedTransaction = + bcs::from_bytes_with_limit(&data, + MAX_RECURSIVE_TYPES_ALLOWED as usize).unwrap(); + let sender = self.app_sender.as_ref().unwrap(); + sender.send_app_gossip(serde_json::to_vec(&signed_transaction.clone()).unwrap()).await.unwrap(); + self.add_pool(signed_transaction).await; + self.notify_block_ready().await; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn submit_transaction_batch(&self, data: Vec, accept: AcceptType) -> RpcRes { + log::info!("submit_transaction_batch length {}",{data.len()}); + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let payload = SubmitTransactionsBatchPost::Bcs(aptos_api::bcs_payload::Bcs(data.clone())); + let ret = transactions_api.submit_transactions_batch_raw(accept, + payload).await; + let ret = ret.unwrap(); + let mut failed_index = vec![]; + let header; + let ret = match ret { + SubmitTransactionsBatchResponse::Accepted(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + SubmitTransactionsBatchResponse::AcceptedPartial(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + for x in &json.transaction_failures { + failed_index.push(x.transaction_index.clone()); + } + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + let signed_transactions: Vec = + bcs::from_bytes(&data).unwrap(); + let sender = self.app_sender.as_ref().unwrap(); + let mut exist_count = 0; + for (i, signed_transaction) in signed_transactions.iter().enumerate() { + if !failed_index.contains(&i) { + sender.send_app_gossip(serde_json::to_vec(signed_transaction).unwrap()).await.unwrap(); + self.add_pool(signed_transaction.clone()).await; + } else { + exist_count += 1; + } + } + if exist_count > 0 { + self.notify_block_ready().await; + } + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + pub async fn get_table_item(&self, args: RpcTableReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.query; + let body = args.body; + let payload = serde_json::from_str::(body.as_str()).unwrap(); + let state_api = self.api_service.as_ref().unwrap().state_api.clone(); + let ret = state_api.get_table_item_raw( + accept, + Address::from_str(account.as_str()).unwrap(), + payload, + args.ledger_version).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_raw_table_item(&self, args: RpcTableReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let account = args.query; + let body = args.body; + let payload = serde_json::from_str::(body.as_str()).unwrap(); + let state_api = self.api_service.as_ref().unwrap().state_api.clone(); + let ret = state_api.get_raw_table_item_raw( + accept, + Address::from_str(account.as_str()).unwrap(), + payload, + args.ledger_version).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn get_events_by_creation_number(&self, args: RpcEventNumReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let events_api = self.api_service.as_ref().unwrap().events_api.clone(); + let ret = events_api.get_events_by_creation_number_raw( + accept, + Address::from_str(args.address.as_str()).unwrap(), + args.creation_number, + args.start, args.limit).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + pub async fn get_events_by_event_handle(&self, args: RpcEventHandleReq) -> RpcRes { + let accept = if args.is_bsc_format.unwrap_or(false) { + AcceptType::Bcs + } else { + AcceptType::Json + }; + let event_handle = MoveStructTag::from_str(args.event_handle.as_str()).unwrap(); + let field_name = IdentifierWrapper::from_str(args.field_name.as_str()).unwrap(); + let events_api = self.api_service.as_ref().unwrap().events_api.clone(); + let ret = events_api.get_events_by_event_handle_raw( + accept, + Address::from_str(args.address.as_str()).unwrap(), + event_handle, + field_name, args.start, args.limit).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + async fn add_pool(&self, signed_transaction: SignedTransaction) { + let mut core_pool = self.core_mempool.as_ref().unwrap().write().await; + core_pool.add_txn(signed_transaction.clone(), + 0, + signed_transaction.clone().sequence_number(), + TimelineState::NonQualified); + drop(core_pool); + } + async fn get_pending_tx(&self, count: u64) -> Vec { + let core_pool = self.core_mempool.as_ref().unwrap().read().await; + core_pool.get_batch(count, + 1024 * 5 * 1000, + true, + true, vec![]) + } + + async fn check_pending_tx(&self) { + let shared_self = Arc::new(self.clone()); + let check_duration = Duration::from_millis(2000); + tokio::spawn(async move { + loop { + _ = tokio::time::sleep(check_duration).await; + let status = shared_self.build_status.try_read(); + match status { + Ok(s_) => { + let s = s_.clone(); + drop(s_); + if let 0 = s { + let more = shared_self.has_pending_tx.try_read(); + match more { + Ok(t_) => { + let t = t_.clone(); + drop(t_); + if t == true { + shared_self.update_pending_tx_flag(false).await; + shared_self.notify_block_ready2().await; + } + } + _ => {} + } + } + } + _ => {} + } + } + }); + } + + async fn update_build_block_status(&self, s: u8) { + let mut status = self.build_status.write().await; + if *status != s { + *status = s; + } + } + + async fn update_pending_tx_flag(&self, n: bool) { + let mut tx = self.has_pending_tx.write().await; + if *tx != n { + *tx = n; + } + } + + async fn notify_block_ready2(&self) { + if let Some(to_engine) = &self.to_engine { + let send_result = { + let to_engine = to_engine.read().await; + to_engine.send(PendingTxs).await + }; + if send_result.is_ok() { + self.update_build_block_status(1).await; + println!("----------notify_block_ready----success------------------"); + } else { + log::info!("send tx to_engine error ") + } + } else { + log::info!("send tx to_engine error ") + } + } + + async fn notify_block_ready(&self) { + let status_ = self.build_status.read().await; + let tx_ = self.has_pending_tx.read().await; + let status = status_.clone(); + let tx = tx_.clone(); + drop(tx_); + drop(status_); + match status { + 1 => { // building + if tx == false { + self.update_pending_tx_flag(true).await; + } else {} + println!("----------notify_block_ready----ignore------------------"); + } + 0 => {// done + self.notify_block_ready2().await; + } + _ => {} + } + } + + pub async fn simulate_transaction(&self, data: Vec, accept: AcceptType) -> RpcRes { + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let ret = transactions_api.simulate_transaction_raw( + accept, + Some(true), + Some(false), + Some(true), + SubmitTransactionPost::Bcs(aptos_api::bcs_payload::Bcs(data))).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn estimate_gas_price(&self) -> RpcRes { + let transactions_api = self.api_service.as_ref().unwrap().transactions_api.clone(); + let ret = transactions_api.estimate_gas_price_raw( + AcceptType::Json).await; + let ret = ret.unwrap(); + let header; + let ret = match ret { + BasicResponse::Ok(c, a, b, d, e, f, g, h, k) => { + header = AptosHeader { + chain_id: a, + ledger_version: b, + ledger_oldest_version: d, + ledger_timestamp_usec: e, + epoch: f, + block_height: g, + oldest_block_height: h, + cursor: k, + }; + match c { + AptosResponseContent::Json(json) => { + serde_json::to_string(&json.0).unwrap() + } + AptosResponseContent::Bcs(bytes) => { + format!("{}", hex::encode(bytes.0)) + } + } + } + }; + RpcRes { data: ret, header: serde_json::to_string(&header).unwrap() } + } + + pub async fn faucet_apt(&self, acc: Vec, accept: AcceptType) -> RpcRes { + let to = AccountAddress::from_bytes(acc).unwrap(); + let db = self.db.as_ref().unwrap().read().await; + let mut core_account = self.get_core_account(&db).await; + let tx_factory = TransactionFactory::new(ChainId::test()); + let tx_acc_mint = core_account + .sign_with_transaction_builder( + tx_factory.mint(to, 10 * 100_000_000) + ); + return self.submit_transaction(bcs::to_bytes(&tx_acc_mint).unwrap(), accept).await; + } + + pub async fn create_account(&self, key: &str, accept: AcceptType) -> RpcRes { + let to = Ed25519PublicKey::from_encoded_string(key).unwrap(); + let db = self.db.as_ref().unwrap().read().await; + let mut core_account = self.get_core_account(&db).await; + let tx_factory = TransactionFactory::new(ChainId::test()); + let tx_acc_create = core_account + .sign_with_transaction_builder( + tx_factory.create_user_account(&to) + ); + return self.submit_transaction(bcs::to_bytes(&tx_acc_create).unwrap(), accept).await; + } + + /// Sets the state of the Vm. + /// # Errors + /// Will fail if the `snow::State` is syncing + pub async fn set_state(&self, snow_state: snow::State) -> io::Result<()> { + let mut vm_state = self.state.write().await; + match snow_state { + // called by chains manager when it is creating the chain. + snow::State::Initializing => { + log::info!("set_state: initializing"); + vm_state.bootstrapped = false; + Ok(()) + } + + snow::State::StateSyncing => { + log::info!("set_state: state syncing"); + Err(Error::new(ErrorKind::Other, "state sync is not supported")) + } + + // called by the bootstrapper to signal bootstrapping has started. + snow::State::Bootstrapping => { + log::info!("set_state: bootstrapping"); + vm_state.bootstrapped = false; + Ok(()) + } + + // called when consensus has started signalling bootstrap phase is complete. + snow::State::NormalOp => { + log::info!("set_state: normal op"); + vm_state.bootstrapped = true; + Ok(()) + } + } + } + + + /// Sets the container preference of the Vm. + pub async fn set_preference(&self, id: ids::Id) -> io::Result<()> { + let mut vm_state = self.state.write().await; + vm_state.preferred = id; + + Ok(()) + } + + /// Returns the last accepted block Id. + pub async fn last_accepted(&self) -> io::Result { + let vm_state = self.state.read().await; + if let Some(state) = &vm_state.state { + let blk_id = state.get_last_accepted_block_id().await?; + return Ok(blk_id); + } + Err(Error::new(ErrorKind::NotFound, "state manager not found")) + } + + pub async fn get_core_account(&self, db: &DbReaderWriter) -> LocalAccount { + let acc = aptos_test_root_address(); + let state_proof = db.reader.get_latest_ledger_info().unwrap(); + let current_version = state_proof.ledger_info().version(); + let db_state_view = db.reader.state_view_at_version(Some(current_version)).unwrap(); + let view = db_state_view.as_account_with_state_view(&acc); + let av = view.get_account_resource().unwrap(); + let sn = av.unwrap().sequence_number(); + LocalAccount::new( + aptos_test_root_address(), + AccountKey::from_private_key(GENESIS_KEYPAIR.0.clone()), + sn, + ) + } + + + pub async fn inner_build_block(&self, data: Vec) -> io::Result<()> { + + + // obtain the executor guard for reading + let executor = self.executor.as_ref().unwrap().read().await; + + // build the aptos data from the slice + let AptosData(txs, block_id, parent_block_id, next_epoch, ts) + = serde_json::from_slice::(&data).unwrap(); + let block_tx = serde_json::from_slice::>(&txs).unwrap(); + let block_meta = block_tx.get(0).unwrap().try_as_block_metadata().unwrap(); + let block_id_now = block_meta.id(); + + if block_id_now.ne(&block_id) { + return Err(Error::new( + ErrorKind::Interrupted, + "block format error", + )); + } + + let parent_block_id_now = executor.committed_block_id(); + if parent_block_id.ne(&parent_block_id_now) { + return Err(Error::new( + ErrorKind::Interrupted, + "block error,maybe not sync ", + )); + } + println!("------------inner_build_block {}----", block_id); + + // execute the block + let output = executor + .execute_block((block_id, block_tx.clone()), parent_block_id) + .unwrap(); + + // sign for the the ledger + let ledger_info = LedgerInfo::new( + BlockInfo::new( + next_epoch, + 0, + block_id, + output.root_hash(), + output.version(), + ts, + output.epoch_state().clone(), + ), + HashValue::zero(), + ); + let li = generate_ledger_info_with_sig(&[self.signer.as_ref().unwrap().clone()], ledger_info); + executor.commit_blocks(vec![block_id], li.clone()).unwrap(); + + // write the transactions to the mempool + let mut core_pool = self.core_mempool.as_ref().unwrap().write().await; + for t in block_tx.iter() { + match t { + UserTransaction(t) => { + let sender = t.sender(); + let sequence_number = t.sequence_number(); + core_pool.commit_transaction(&AccountAddress::from(sender), sequence_number); + } + _ => {} + } + } + drop(core_pool); + self.update_build_block_status(0).await; + Ok(()) + } + + async fn init_aptos(&mut self) { + + // generate the test genesis + let (genesis, validators) = test_genesis_change_set_and_validators(Some(1)); + let signer = ValidatorSigner::new( + validators[0].data.owner_address, + validators[0].consensus_key.clone(), + ); + self.signer = Some(signer.clone()); + + // write the genesis transaction + let genesis_txn = Transaction::GenesisTransaction(WriteSetPayload::Direct(genesis)); + let p = format!("{}/{}", + dirs::home_dir().unwrap().to_str().unwrap(), + MOVE_DB_DIR); + if !fs::metadata(p.clone().as_str()).is_ok() { + fs::create_dir_all(p.as_str()).unwrap(); + } + + // initialize aptos db + let db = DbReaderWriter::wrap( + AptosDB::new_for_test(p.as_str())); + let waypoint = generate_waypoint::(&db.1, &genesis_txn); + match waypoint { + Ok(w) => { + maybe_bootstrap::(&db.1, &genesis_txn, w).unwrap(); + } + _ => {} + } + // BLOCK-STM + // AptosVM::set_concurrency_level_once(2); + self.db = Some(Arc::new(RwLock::new(db.1.clone()))); + let executor = BlockExecutor::new(db.1.clone()); + self.executor = Some(Arc::new(RwLock::new(executor))); + + // set up the mempool + let (mempool_client_sender, + mut mempool_client_receiver) = futures_mpsc::channel::(10); + let sender = MempoolClientSender::from(mempool_client_sender); + let node_config = NodeConfig::default(); + let context = Context::new(ChainId::test(), + db.1.reader.clone(), + sender, node_config.clone()); + + // initialze the raw api + self.api_context = Some(context.clone()); + let service = get_raw_api_service(Arc::new(context)); + self.api_service = Some(service); + self.core_mempool = Some(Arc::new(RwLock::new(CoreMempool::new(&node_config)))); + self.check_pending_tx().await; + + // sart the mempool client + tokio::task::spawn(async move { + while let Some(request) = mempool_client_receiver.next().await { + match request { + MempoolClientRequest::SubmitTransaction(_t, callback) => { + // accept all the transaction + let ms = MempoolStatus::new(MempoolStatusCode::Accepted); + let status: SubmissionStatus = (ms, None); + callback.send( + Ok(status) + ).unwrap(); + } + MempoolClientRequest::GetTransactionByHash(_, _) => {} + } + } + }); + + + } +} + +#[tonic::async_trait] +impl BatchedChainVm for Vm { + type Block = Block; + + async fn get_ancestors( + &self, + _block_id: ids::Id, + _max_block_num: i32, + _max_block_size: i32, + _max_block_retrival_time: Duration, + ) -> io::Result> { + Err(Error::new( + ErrorKind::Unsupported, + "get_ancestors not implemented", + )) + } + + async fn batched_parse_block(&self, _blocks: &[Vec]) -> io::Result> { + Err(Error::new( + ErrorKind::Unsupported, + "batched_parse_block not implemented", + )) + } +} + +#[tonic::async_trait] +impl ChainVm for Vm +{ + type Block = Block; + + async fn build_block( + &self, + ) -> io::Result<::Block> { + let vm_state = self.state.read().await; + if let Some(state_b) = vm_state.state.as_ref() { + let prnt_blk = state_b.get_block(&vm_state.preferred).await.unwrap(); + let unix_now = Utc::now().timestamp() as u64; + let tx_arr = self.get_pending_tx(10000).await; + println!("----build_block pool tx count-------{}------", tx_arr.clone().len()); + let executor = self.executor.as_ref().unwrap().read().await; + let signer = self.signer.as_ref().unwrap(); + let db = self.db.as_ref().unwrap().read().await; + let latest_ledger_info = db.reader.get_latest_ledger_info().unwrap(); + let next_epoch = latest_ledger_info.ledger_info().next_block_epoch(); + let block_id = HashValue::random(); + let block_meta = Transaction::BlockMetadata(BlockMetadata::new( + block_id, + next_epoch, + 0, + signer.author(), + vec![], + vec![], + unix_now, + )); + let mut txs = vec![]; + for tx in tx_arr.iter() { + txs.push(UserTransaction(tx.clone())); + } + let mut block_tx: Vec<_> = vec![]; + block_tx.push(block_meta); + block_tx.append(&mut txs); + block_tx.push(Transaction::StateCheckpoint(HashValue::random())); + let parent_block_id = executor.committed_block_id(); + let block_tx_bytes = serde_json::to_vec(&block_tx).unwrap(); + let data = AptosData(block_tx_bytes, + block_id.clone(), + parent_block_id, + next_epoch, + unix_now); + + let mut block_ = Block::new( + prnt_blk.id(), + prnt_blk.height() + 1, + unix_now, + serde_json::to_vec(&data).unwrap(), + choices::status::Status::Processing, + ).unwrap(); + block_.set_state(state_b.clone()); + println!("--------vm_build_block------{}---", block_.id()); + block_.verify().await.unwrap(); + return Ok(block_); + } + Err(Error::new( + ErrorKind::Other, + "not implement", + )) + } + + async fn issue_tx( + &self, + ) -> io::Result<::Block> { + Err(Error::new( + ErrorKind::Unsupported, + "issue_tx not implemented", + )) + } + + async fn set_preference(&self, id: ids::Id) -> io::Result<()> { + self.set_preference(id).await + } + + async fn last_accepted(&self) -> io::Result { + self.last_accepted().await + } +} + +#[tonic::async_trait] +impl NetworkAppHandler for Vm +{ + /// Currently, no app-specific messages, so returning Ok. + async fn app_request( + &self, + _node_id: &ids::node::Id, + _request_id: u32, + _deadline: DateTime, + _request: &[u8], + ) -> io::Result<()> { + Ok(()) + } + + /// Currently, no app-specific messages, so returning Ok. + async fn app_request_failed( + &self, + _node_id: &ids::node::Id, + _request_id: u32, + ) -> io::Result<()> { + Ok(()) + } + + /// Currently, no app-specific messages, so returning Ok. + async fn app_response( + &self, + _node_id: &ids::node::Id, + _request_id: u32, + _response: &[u8], + ) -> io::Result<()> { + Ok(()) + } + + async fn app_gossip(&self, _node_id: &ids::node::Id, msg: &[u8]) -> io::Result<()> { + match serde_json::from_slice::(msg) { + Ok(s) => { + self.add_pool(s).await; + } + Err(_) => {} + } + Ok(()) + } +} + +#[tonic::async_trait] +impl CrossChainAppHandler for Vm +{ + /// Currently, no cross chain specific messages, so returning Ok. + async fn cross_chain_app_request( + &self, + _chain_id: &ids::Id, + _request_id: u32, + _deadline: DateTime, + _request: &[u8], + ) -> io::Result<()> { + Ok(()) + } + + /// Currently, no cross chain specific messages, so returning Ok. + async fn cross_chain_app_request_failed( + &self, + _chain_id: &ids::Id, + _request_id: u32, + ) -> io::Result<()> { + Ok(()) + } + + /// Currently, no cross chain specific messages, so returning Ok. + async fn cross_chain_app_response( + &self, + _chain_id: &ids::Id, + _request_id: u32, + _response: &[u8], + ) -> io::Result<()> { + Ok(()) + } +} + +impl AppHandler for Vm {} + +#[tonic::async_trait] +impl Connector for Vm + +{ + async fn connected(&self, _id: &ids::node::Id) -> io::Result<()> { + // no-op + Ok(()) + } + + async fn disconnected(&self, _id: &ids::node::Id) -> io::Result<()> { + // no-op + Ok(()) + } +} + +#[tonic::async_trait] +impl Checkable for Vm +{ + async fn health_check(&self) -> io::Result> { + Ok("200".as_bytes().to_vec()) + } +} + +#[tonic::async_trait] +impl Getter for Vm +{ + type Block = Block; + + async fn get_block( + &self, + blk_id: ids::Id, + ) -> io::Result<::Block> { + let vm_state = self.state.read().await; + if let Some(state) = &vm_state.state { + let mut block = state.get_block(&blk_id).await?; + let mut new_state = state.clone(); + new_state.set_vm(self.clone()); + block.set_state(new_state); + return Ok(block); + } + Err(Error::new(ErrorKind::NotFound, "state manager not found")) + } +} + +#[tonic::async_trait] +impl Parser for Vm +{ + type Block = Block; + async fn parse_block( + &self, + bytes: &[u8], + ) -> io::Result<::Block> { + let vm_state = self.state.read().await; + if let Some(state) = vm_state.state.as_ref() { + let mut new_block = Block::from_slice(bytes)?; + new_block.set_status(choices::status::Status::Processing); + let mut new_state = state.clone(); + new_state.set_vm(self.clone()); + new_block.set_state(new_state); + return match state.get_block(&new_block.id()).await { + Ok(prev) => { + Ok(prev) + } + Err(_) => { + Ok(new_block) + } + }; + } + Err(Error::new(ErrorKind::NotFound, "state manager not found")) + } +} + +#[tonic::async_trait] +impl CommonVm for Vm +{ + type DatabaseManager = DatabaseManager; + type AppSender = AppSenderClient; + type ChainHandler = ChainHandler; + type StaticHandler = StaticHandler; + type ValidatorState = ValidatorStateClient; + + async fn initialize( + &mut self, + ctx: Option>, + db_manager: Self::DatabaseManager, + genesis_bytes: &[u8], + _upgrade_bytes: &[u8], + _config_bytes: &[u8], + to_engine: Sender, + _fxs: &[snow::engine::common::vm::Fx], + app_sender: Self::AppSender, + ) -> io::Result<()> { + let mut vm_state = self.state.write().await; + vm_state.ctx = ctx.clone(); + let current = db_manager.current().await?; + let state = state::State { + db: Arc::new(RwLock::new(current.clone().db)), + verified_blocks: Arc::new(RwLock::new(HashMap::new())), + vm: None, + }; + vm_state.state = Some(state.clone()); + self.to_engine = Some(Arc::new(RwLock::new(to_engine))); + self.app_sender = Some(app_sender); + drop(vm_state); + + self.init_aptos().await; + let mut vm_state = self.state.write().await; + let genesis = "hello world"; + let has_last_accepted = state.has_last_accepted_block().await?; + if has_last_accepted { + let last_accepted_blk_id = state.get_last_accepted_block_id().await?; + vm_state.preferred = last_accepted_blk_id; + } else { + let genesis_bytes = genesis.as_bytes().to_vec(); + let data = AptosData(genesis_bytes.clone(), + HashValue::zero(), + HashValue::zero(), + 0, + 0); + let mut genesis_block = Block::new( + ids::Id::empty(), + 0, + 0, + serde_json::to_vec(&data).unwrap(), + choices::status::Status::default(), + ).unwrap(); + genesis_block.set_state(state.clone()); + genesis_block.accept().await?; + + let genesis_blk_id = genesis_block.id(); + vm_state.preferred = genesis_blk_id; + } + log::info!("successfully initialized Vm"); + Ok(()) + } + + async fn set_state(&self, snow_state: snow::State) -> io::Result<()> { + self.set_state(snow_state).await + } + + /// Called when the node is shutting down. + async fn shutdown(&self) -> io::Result<()> { + Ok(()) + } + + async fn version(&self) -> io::Result { + Ok(String::from(VERSION)) + } + + async fn create_static_handlers( + &mut self, + ) -> io::Result>> { + let handler = StaticHandler::new(StaticService::new()); + let mut handlers = HashMap::new(); + handlers.insert( + "/static".to_string(), + HttpHandler { + lock_option: LockOptions::WriteLock, + handler, + server_addr: None, + }, + ); + + Ok(handlers) + } + + async fn create_handlers( + &mut self, + ) -> io::Result>> { + let handler = ChainHandler::new(ChainService::new(self.clone())); + let mut handlers = HashMap::new(); + handlers.insert( + "/rpc".to_string(), + HttpHandler { + lock_option: LockOptions::WriteLock, + handler, + server_addr: None, + }, + ); + + Ok(handlers) + } +} \ No newline at end of file diff --git a/rsc/movement_logo.png b/rsc/movement_logo.png new file mode 100644 index 0000000000000000000000000000000000000000..829f2a5970f4909a0a8345f0f55a5f87b0cc9b95 GIT binary patch literal 18016 zcmXt=V_+R^w6>$hjqNtJZ8mOf+l_5Dw%ypaoixUdZQE$hyytx1kDdKzX79y}Nr*jFh@F(u&p75KcvLIZ#Gn%YQ!FF1QiO(!rgEb{+8;8IFt*I-~I zU{YejD(+bqx-jl4ria44{F`Rb@A;u>mnz2CpvqC$y%5wi#X zf=T^nd`?YRNI{bvOUP3Mj;huANqtcF^CLOwPVgwxi_(oEQMzB2<%nx^!5pkl z%70hi0!a#SqKDP6BEQs3;vzynreA|B*y_Ddv3*@#iQRuvB!+L$Zc?JgFU27)OG7oc z(gbMMELnTx#G*ju<4@nsLkVjP-@O~=9Q{NHysm;(LV*ImR@Hj$syH|`XaDGq{L~Iz z8lEQcO)-TIGRzn=IW1l~BX!SNlJ-x%q-0`>OT3&nV4|YOLO9yn+q)bQ5?il`YUwMU!yuI$vJd( zO{FrGgul^B+R-GbD)!zWt0p{^4v(B%I@ta*zG?k^4H3s0Asw2s9W=@8J{m9L z(}r?hj6qe3H-hJM`()L8xb35Es?9x9rj|5a_;n!_p zgC4;XkO9+n*DrP)#Jx&2YmD;Sxo=!Pug=9Ac@aNP`Qv}Yt5qZuSX!CLQ^IKBP2b%C zuLoEFLOsXcq{uuW1=8LN{}d?&3#0KJqQSRbuf$8`*Yx|h9=JdS1CGoiguuAax9~@P zX)iK|pP2MojrLE*6t^6X5G;3bG70$n6M-Zq*L|4Tx&Bq-;1vN?^`PV z)%(PK5=p<^1(NWEdr@69)zmN&6(0{8%ggX-IDCfx#tWJ86=PRcn#}Ikci@|z-t+jE zlsHI}YGZAlxw--7+_I2b_CBE#FAJU?;U|p}?9tub?Eq`%aFuTLiw!T?cqHZk8WBfm zUEcYPtfg)U1vPQg=vijl!#DW&8>7P)JM88rPRLH4UYP`a$Lr*AF#W(BZ(`zH)Yc=M zI-yCadVFj3=9lhpR^Nm%TVYN{b_f$SXJaK+X00Q?z1ug-c2@Da8_Br5M7GlvY=TBGU_z6=XPcG?1 z7q-Lt^za~_tY^xRnS6OD_(y`Gj7FA@L2(=`XZ-G+QWZ=m=j_66xLEbecJ_iNOmF7o z;m`Mv52snZA0^om1&T0GP_?(3V-7zSqR3h`h$7N$l%5e2bzD(_Bf-DBt*SzFj+v~I zlW*mXnQ>&QvYJh7Yc<H`4_SLJ7IOBPyC=?FL{wn3V z2#q*RwphPX?W}85lxp_l{e9t?eRz}kBy==LvN*$-5_@bXRLMJcLd|#xZFh-ms0-)Z zJA`qVVZ+DU`%zp>Ecp55g}KC5Qt~%M5pTeTD6BG3(J70jvPe|;C!K(f+9y4DmWZhx zOeSTeBQCKd|1HzXp?L*}2a%MGkjGgFQ}4j;-=k-Oic1|TxH8Cvhwzbfjm#_W5k0f8 zb*=BfvSNLVciMf(fGN*;i_Lyp=&Q7Ir5`b?8DeT&oKOspTls0)G@bOlismVel*1Eb znUsuMi3tG&B_QM}52kyBz9r4{yli`Xp6j#t2c;c%$e(Gt}E;Z*MOFR0j|g zfRpPURZUtr zH&@k_4PGIm6(sXo{{XvQjB00*8a3!7d|{`hMVL2NEgd#Kv;5=fuR|c|UK%lz-hu0% z;&vh2#s?L4_44{kq*ZScUTnM8OtqqlWBmg^r_`0iJjcaBvLCK&H%jQzRicDk1-c_Q zcPo6e%M0N+3jrQp$@l>uOC1#ivClXC7O@xXpq##xC@l?Vk#ktMCm!1g96S_5$ou*y zn+OMI6xuH0phj$pWTexm&QJ_&`cC}-Un<9?g_V;&-?R4Z`B_I!QW6#gU!$~+njF-u zj+8GIgb-B7z=R<{YR~?yc1xsIn=Zw=O1lLq;`eWoTJ@spGiM$@h_@ zt-;8WzrVlF=qJ|=I|qkaz=jo5U-1g0?qaA%5FPHYVWPz>5gT@D$vJ-l#GRatxOHM#sd}>Rx?JyTd zmJUlx!@2LRs7vV@3){%D{_Bx55}bA=ZZ9)%2DcAYYvwb0EAMms{{G%Q5{wsB_d`O0 z=B7!v!&N7doViFf;1o{2X5ZtV=uK$)UG!^5TP~a>oZX4a-qqFTH>3TMT;E+KWo68Q zAF+oSWMl39l0KV@abem12SYG2ZR_Z<8>T=$wJ7Pc*Jfsv8tZsltBf=G5%ojH9)Ee3 zTivKmBhTe*v0yNYTWus}h)FIqi>G<`j_)(mJt6)s8tm`_Mz$@{>9WQ3Vs)s+az3%qB3)_gyp9=pIC9}`$!*q`?#?&Fr4w~8r-h*8kgE0&X%0y|L}vkT?MS`jhJMNj*cc96~L#+&bPZ*PwnAL{;uiDPB0o}p!9DeGzc_b zJu9-Pp`@UAl)w~VV33biHmE(FR1TO5r8|D`M4!IX=YGLv&|Yn4IMxndLg{QNAu>NWY4XO0 ztXPF*$hx#*2r?4Vzt;d5BBrmnEKnEDAssii$OUkLd1$ux*!wiRD+hMwBwBLj$bS!L zya6#m;JO)FT2c=QVZ?EDqMb2y^=N($aNxk4q;eD7o3S$T>QyhjHq|&tNl#DDj*ssn zr=+a#e7d|)i)*ME3dE#l4pnhQHo()8(<)E>BSCMHZgJspxk0VNo}UdL5wjo;tUg587BJ6*mc z2#{3#kGCh}iShBhgoK34EMufTAVu0oYJDJ^+?XP0%&0SF;NkJ)Z~iSYLf{&A0eJ2n zq^`~U1kW&R;LKE+7cKd4Yp#?9JL8B>TW&6~^VMdT?$J~>^LnxObsr2q_C8e~vo*{b zd*qyV?sI-A%Oqr4kXl31DLEBrZ+BPYm%9yimq8x{QrAtiL}~qazoo}5B3DFuM>ews zyK-6e{E4(;xw)z7fy?OvTv2=ZtkzE{kYykV)nJWaoIJq`oLJ5Q|5TW34A`8&bog1U3z)z&eNK0F3BzG3I#c`b#N`vTD| z3>(Hc%6HK=)pU5D2X{UQ*-zL6s#2r9Kb|GUw!N>cqyqtt^NaXc-r=rmSA1+<9*oi- z;WE6!n04&<__*u4Sn^u4Z3ll`5EHBN^S%&NIR5mdWX8lYm-g>?^{@J zgL(9Hu_0S&wq3*Js-frO)6r$jli4u7js=Px3~&aIwG+d6A{1oU%N0n}Ai4kw28L>d zQxWAFSLOOREdk6L6y^Co3immpWl`H@?P}!``IoV=v9!19awm$c3Ah$HK5?qT!+$Wc zfbW`!5^I!J0rI%k-{0RlGozr|m1XbQB@lP?lDR8LPD#LwOUM>Dat`kFH~*Cg1;yU! z{rM(fAPo7)(aA{@$=f!7q3j}2yeT_*OpzjMjhKOc6*5l7C0kxSFx`=xn>(?gzP|Qb zNV(tVzb551pbS-U@*OPm55bB4&%mzJ_^DD4dWccKRpPK&C|rPCbB>G;#+dhQ-QGn1 z)1qpY1EyjkJUlHHpV#~4x2v99d@lPuqus%9kc^Y&xi^KJ7`Z)aYcK_>@{b38dU{)m z#S%c}%GIlNG#geKQVOu*Rska#f~;MTK;uL?(}9G1^u3qM<`(FY9>nbh4; ze;@c0Eican*V&5LT;oE+ehk9fcJbVH;hCXU&0Dg7J8$uDzxC{DO~rd`(yREAEj{#@ zhNP4)U3>QMigr$&8XwP0E+CK-86B;vno33r$|jEUm!H!y5`$_3zt?X_-<5+;YHIMMS`)vZwr8kjK*7=P`eCjSXgY@H_HW5Yn_D3~zxdOcqUez?QY2=Ho|J(|*wT&Y!;l>ykUA}r8(BiAwnclC@3vDiWA~4M3FZr` z=%Q8Z7bcvqD6EcL`#$zKuX_By*7T3MJ}O~v1^>Ye#^72i-!dR1s(@CGtg{{0x_t9}j^|krc{*FMUVVK7q;Q+b%19FhqL>-80}5AI`z)Fg z#A>8;mn^j(cKKpQAuV+)a6tI#nlCS!o6yU?%bBgmLsZS=3B zqE@RjN$Kc9SMQ>vP#z{FPiR=jrzWdnFe<_7gl;9?0iD<<&EsUlhmojF%&FAWm&jT0 z6^S*3ELyU==JsmktcZ1HZy5%Kxl}OHlPh?q(<@oVuo;l|xwp#>Mxus2CYh;Z9|1kJ z!QCd37RI37eqQj4RhBS?tz9QomIBu0H;mk%33U#pWa3X4=EA#8JfRW=p?Bh8R;i2A zYv;BW_uqQl$lC{Bg|URn-i2U~H&%Oh8~XdMk>qXbE@=U3BLR%9IX~h@wXSpRWCaAz+E)M1 z{7>xEZyq~*t_mnKU&4IEc?nC}OiWB{dhs5Bu#5?j^AI{9MQ4=r#{F(uu9j$qtC~&Ad41GblyZhY@t* zoJQ^@Kes#*g}Y~v`vd3*>7yPeJ<=#|`1EUwOlfzV3%Qk<@vI%IPl=w^Q>Hb9XGv5~ ze1|+%rTN|N#dOc5*lY6(M!S;+|6|wk*yRytFdF}hXL5!=gUWf?zWTfcTiVmVtyFzq zZljCUru8IhWsFm$jD7>Rm)p0s?WjgCa0_>W;nKJ#PP$e|MY18k;5XRFgA= zj#eth$93H`{Z-mXUdra?o)(?z*4^Hs>0=(0Kp-p0k|>~#{{7qEVQ=_>h}j70u#5J< zvB|!86U(^+hHJ)(&y5IQvdl#165K2uAkh}g}aul8aSme)P4G3fCL|W zkNJ}&Iy_`W=i&lbo~K()i=JD{hH<0=PU(_vTNRXR{pfkYGRlJ;Qx=-N z=2xzitF3s02+IatA07%Rc+xyK!W#ArEO=ut*}%(Gr|f-XLcaI)xX{9$tiy?PxP-Bvh41*1$^L^Z6zo8(*YhP>tu}2VR!VHV?2izfDdeKte zJ)5N7JI;PrORlNg1D7wxt)KveuB%;8uoQYP9`1>=B%AO^(F&p=f4k^#yK0=N1`GD3@M8S=H{EEOh^JV^!_lPLV>cB5Wyf(@eS^jdV~#k)@y zerDaRNbhTn<)ejn_GNZ8m*`LZ7~9CU|L#$EJ7;18p2*6!=lG4;zMI}a)CTh$mh+}@ z!kWBop3**I`CaaKeRtO&jQ|Qi7<6>>3pAhTN>sA4i)<;bqyoSr)6HR zl%lu|z|;&s@UEky%jR+(2FbzeK>LEVIGmB;>g_*9PP_NN-1Y9OY^cxsc)kk267748 z9H)<&1OkYs?`&WMyS>rnTr6DH--7 zdaUCcH#~U$CSq>0hmOe4PXzsvD=|AeI>#-A$#~o2AMazYd-nvSfJuW!0#9Vli?qizXn(XJ7dt!n z_dlNuF&C@~YGhdd7;U{k-zA3HKZ&R1Z?B_D!5xrW?*2{>~$5enBLhx}%B?NE;<6eEeRZVJ}?ZSBZ}zgN7Pc?;{>D=!nR znNp`&R0SKYvHxmf;~lId8PoBzu{GZJ!4S)qvwf~_M9rW3cl*k1`1|f)ynP&dT^iZO z@)I5N5IzqNiI`8Qe^!<*K7|WS`&^L(yPm9*A+hC$hc4H0`0S6yzrfxLFWCG+1tVF_ zPLm{cYhTf?P}*S33C$@H2u#`NU=L@Jbp1w#%fsY0HvZl$nEiFT zb<>R5Yf?GwOV$?cdy>@Wee%zsK8rv5*!eg9mseNFdn2)a7z5Tyh}o+@*cT{d##nd= z$1|)y*QHZce@(HUS1!IzF#pEql}4;#&xV|2{6^&Xc+tFaB43qHz=7qeeKqJKbN;&ryv@xD142&Rw>US2fxM_??|9%)t_t`EGCxSmsK zzw1Z`xCkhilnq9E}e*dJY{#YdsHb!Ry2W)$+6Dpl%#NCF^#b_JSS@^@OlS z%A024P&35{&&JjDF%0&8dWe%+vzP+x_rpF_fVYUYkbxtw#cUPr8QK{(_Bc&-77X~` z^fcxw>7nxackLljp*1nJ`iP7;S9(3@SrKU!dn)LY)`krSlZE~D{o#3I!JS9!L zd~B|{Wz;AcOJ7BJ%<2c=w^GX|XtvD7U2=a$Vxi0wvqLpn4z*v@di4qXs{SWWw>RX| z-?)<(<$RN&dHzdr(tO?u|5^b(<3&W)7|%vaYxM~BK6j9lI#dsGG#(YTu+;Mp*`@D& zZPohyGECglCb*<_chZ4(hYg`=jU9UCPYbpQ)S*I0Of)43g2&JiC_EeYfFb6W++}G& zr>%|YM###i>N;IjLg5$~6wf){tM%at_!9xehv9FD0n}rds$Qz4ZfkcC?^e^NY>@G7%CIexM&M*!N$Q z#8CzjkjtFT%V>mPt!6zkP}Mum6Xm``?ovh>7nMyRp3FT~QFsq8Y%f-&!B6bYS1#OT z4X``7d~P!ha_i2wMKv;zJpHtR)Wly}BM82=#!_E5ZwdM(azE1{v`q-Qmj3LCcyo5$ z-+6Ch@M>1?E}j^^a=re#xL!c5`UrPE2?ybOjt*SmgzCDx;am3a) z9O)eLkKmo_l?Vnr!|kOzl}{o6#s=vtCYX1e6)S$St0UCH|c zHB-Pu>LR!2l&_}AEXZF>RpGr13wX!hzI|)q z8snf0*P9GIxkur4CrR8-P7umU6pa`x%e^8F#LBKex2$Zk^x5BDm|JTvXm8gm3QiKy zjF)s2kC)Uf%r0ze^l|n~X0{)(UKKIrN<;#^zc%Wb<#;B-&E@$1aW(Da>fCAgeq3uv zZ|x{Nr&EeK^A%bYmlCUN4_VU1?F-Mj-{IBBDrbkRXW1XbhAohY1^>anS8%W*4-G1l zt7Wx^8Cdh{{6SiQkqp5FO1CWXuE7yudS)!}|IY$U{o`-G8M?)Fs+P7YGK4?+DKs%^ z=t3MzmLW>d;}*P53PUs5(D z-7^*?jq@=qc9Xbq=M9>>ThqPx{ZlX7nsC!rhrJ(b?Q<&1f3>9=TAiU+WqtCbFg{@I zbXa6lS;HB8_AZeR!x3<~$U0b^YewRR82q-7y6aqWyF!Ix30Rh)+s8R(KpL;qtF@(z zTK2A7;|yX*y*-4$pv6%YXg$IAxw zQw15~c_VT-t$mUmW0>xwrUT;&Ox(R=e`>>eTaar6I5rG5m3s)2#1Z z{On}NL%%RZ)Xi@*$W2i~01@&Z0Cn2p1|v!NC3C?Kq3L;I^@?_03+eC6%y%(|t8k!z zEN~qNKJGm$bgjF)kbBi}!T>#8++x^?)*Iu~112M+-d?cd_tj#&ip}#Sv61DIpBfRi zKG_awUzSXt^e^(8GNi3;gpT#D0ku9_yvjKKw3x?h?`~Raf}9y*cM!CPCPMOz`#F$L z-kJo2nZ3OMGMYpe__pQ&R+}jE1WG4yZyi0G7eMmw5%{<45y3+;zAUvF}Sp+L*<>dBL z8}v{(@CEc3(Np7IM6LqCBanK1ea#}BR`%t((EZnYVK~RORKkXM8+9N7Ig$)nS>(}L zGR4jr!Ua*Laqdm%&8_rAETDI^M#bV9dQ-Mi+$lea37`PWtd~+r{aCRbqicr7&D0S4 zGgVM%VwcSuX5^vi_7=eH8(#fNaty>7cnmMc=YK%3ThMa4KtHpTHw1s$Ej;*-$)LScZR2LPEciL0M^0bMJt90k{aun&>X+d>Y1I z4{PM{*i&E8!awCG<}u!!8}F0pO}zUTM#iV zcZsWSP1}>&UV+tb9AGX$v^7?XAGW!DW$n>X8vE2MQCjQ8Lk0g=g{HAurfK4>O*MI< zU=#*5?Kw@pE(phC>k(cwdPwlhi;oiE@n*PjXJg0=S1XTcnidzxe%sBgSh;}lLp=Tc zUE^IV2gY(?G`sqD|$8bzTj|~zpz(=|vRX)42kZVV+9Ze3=Kg@ ze3evXAQiSHi)~+5q$Ovny=2{a$X-&m0P>3>y&6XL%*mM^zfTA&a@8fd%6zB>W->Xs zgR10nhb|(m$5HcSJ)Gux-q!s{@`vTu1#(8usItka4O6}k0ZTY`j*mM6iWxr^BI`7_-whZ^sDH%l7rmX!2@G!N#wAgO`l12B3c#Zl?m4CiMz6 zuG3FjeuCCWQWrUy@y+M3U6UYI#U-ZG$&)rEq8_eVL*0&SBz5K<8`9Ilf=V`rFFo4i&$r0QFUOwz z2yLcU?jSwXL#st?^1KE(=3F7Mo=qs{{1-6Yg4v{I%Xw%buXAP&llWl@PV7A6$DBT} z^s`T;SwA_7rV6^UE@$Z5O+l!9-3HAyOL$W_;G%Ky@rtjvOVPJC9g9~4W&Lqy9>OR*_EUH; zD(22mrH`*G$0lxmlCio?^m+C_e*bM-$MC8){)SkQKXOvc08LaMQM>CjA;U;~ejMXf z>f!)TJ}Ff7EHrpqYdEmE9p|1iLOwM2D_*RI+@Dz$Jq*!^JPitk)7*g@V+#inh1ZJf ze+$&piY~m$X9A~1!?jYN9kaTs+g2AYTGW+r`cy>2`!_Dg6H%ixgfuY>nYf#Rk}@oj z>@}!f**`;Uu{Ww&Q4~+DAV$v)U0U>pp9675v^NHltiRpG6r``=5sI$cj@=2V`*dX} zif1$uWe`Mt{&7%_ljvO{#4vrp`TXq9O%{wfD>)>hT6{bQVV-BrmXQGL@TLj)yg~v< zRFlj-!|rf0(3vIT(+I?iTZYyWbSF*>CFH0KKsx(`ppL|@{f2BtK&P-Iffx--1cgK- zsR0+7H*$y}d@*km!$}ZImHE|e1msc@!{fdTZy+(2YT-b$#aGxFCj(QMMc_L9qX zF^pz8IO7Rm&yr}9|IPs@0PqB7?UlM6i%9ntCD0gTplo3V7Nh-baJpE60oCPV994sR zt-U3Fc?_Y$Q1(ag;K$D$=kRlD?DQr8Y78qz0^Qm`YqC<|1qp1e<@Y$?(cLJ=3( zuByg!dfMsw<69O|TD+z_ycnShe}A0vfoTMqS2-q2yU+><5FJkE_Shdzq_2x2^A}3^ z8}}o2KT?EcSzE?D8eisjVQ%Q_ssyXz+1MZO6*71QHVOZ+X`q4TDpf&YcV@bOZtNAf zsw&P$ZHCI-L-D)Pz}qHil?`A3c(=K^r&tU$Re)2d2q-UO*E$$7iW0=5LH zQo2dhV*sRl8~k`WtLce(K5v+Aoy_E<)gaI2KnbfN| ziyO*naShVhoH!@>bb zFz#7_qs1EiX=Z-_tVAPp#20(WCUBJPYLvS7Z?@H_57z}U>^LN}u4e-0^;yA=i}fIfkM zr?3;{o4%t_1PDUNc6Ax&v4V=F6Ai(em^F^Y6^Rk4{C&b|ykm>GVAqR&X%pJ7iOvVd z7{WQ9V{a(4Eu86MfigdIq(np@IZEKq5AI*!UaqY6q{)Ls-m*OrcL}Qt7ey&1Q_u9n&jP*Ou8Q)s#dDk8V_$ z0hD)ibd*ZO=g#E2)$Ox>doYganCAo^5*Qg9>sCA8rCZJ$MQw!(jeDlH)-O~Y3jR02 zY$j%qS(@wJKdtT%LzfSwqd+CKeNnBhrS<6r=x(~#5V6&Xmp0}e3o#B2lOhW|lQc{- zE1CgP?wZA0dKE}gnY`fU205{3)a_6gce5^i?DJ1doITCt2Uc#wBrNX(^Brs^B<Ill^}1iN`A|fcGkBV8-%n9EI16tfKXM z+)|wv{j{j084L#v{D_2MIWtG);A_BtEf=2yWt7z>R)ld78`5gz#oZi`E@^amK5U!k z`lM*L+ULvU(au2DES*6DMz6UpdCIj{{#UvN##)6;4DlG@27OrM{1_D_Bhxr>w?2Z& zXpqNWmril$#WA3448`Ti;5V@#^q(*!Ly+sUKq!ypuP>=2oH&T4G|BM8F@$fdU{HCh zzCeEt{!QmQ^902~7cmOw)>+q8HCYGQMx7mAwk;Rfi~yMw_8_7-kn;)IgOW`PJK% z{E-7gAR7o%+(nc8H=(hSO#=WCl7J?(gVW(eS!HtvY{O8*5PcHTS!Wwn*;#*W<0-Q1 zs&=W&hX-&?yNy-{(#J4j-)+~+bxlp%)QT(UAYnr$oU9RQ<+NxpnXr`YI@U#LMcZ6H z561vl(;>lsH?cGh3GpOU-3rm#!b*VzVCIQ%`9MWsG5Ab>bsbj(*NeZ-C z6}Hb>BrQCV7It@h7|sEZ&k=uIqm9a1UDaLS7larCSK9K6wU%E;R%CRV$);m+uH|N= z57}ukNJM>IUL+%bw5k{^m#PUnZ~xLJ&aQO>90+K!O4}N`J@J(YU_j+obT}Y*z_a}s z+||%W;m|48>FDXd4w9uW>3PX1(&mwVfrkDAUxSut(HyVx|MubKNno=4h)_|%$5R=% z0G?*7sIah9V`Qm4j?RN(i+Vyr>HmJes+>16+lfGZ6~G&~oR1a$#gqPMRoC+fkRJ=S{M9pY*?+w7=S>y%YDti}Y-={Wv{3q%>bduQAYDg84ENyJ58 zlX7TUYWOpQ?F1VPR>Yr}-&4im{#dbE4>-CW^hMZR7^GO$1M4Dy^G1lf+en{_ zVCKJzW9N9DQeRH{{cAW~ZL)rVY$F2|&8Y&IJDs(rs66+RptIH{E-&Mf1=~Ro^w+?j zf*))zi9ZE)092z`r^}PwO}kL3?A!nw%8)T+b^UY3(xZkA*i0e^T0gZ$LlIr5sHj;W zD^=gSNlu+06tlnKUl`luH{Fzg1{_#pr!n&1qPSH62HIJ!Gj99xiIAEU^MPMqCWCk`WaZWfqpWZKpmxu&gOkuI_mTmn=tb zg1_zAEWX^ju?wkF;_m_X_HF^g~M0}-IhNL3=eI*A`)2H;x8(?4lwyjp7k$HQ%LIzmTkI2Z#Heg$yy^<{z za${s9Ot}IC-=r12(~LX{`4hln5TfB7XF1Gs9#t#UeDXk6s4?(}6+ZW~KXeLEcn%cG zjg{dGX48HsW&6tuVdLp$RDe$BfL8PXfLh#cdR_6KlN7XjzpyM>1cL!!*dZHUG#b8& zg3kXPzQ$obubAui@1@Jb!$Zg2y%kOMv1QXU(pp92VZ(xrhS_?`f~`5xhl<)bhDhK7 z{KkKVX-=m|DS69n%g6pd>Ra&R9{uUE#DS6OxbK|awRps(~ac~;;LxBdv(ed>&#sO2MRcP0)S`-pSvHKmvUABP{kp16l zI<2bT`(4BmITR>I?2v+IjqbA3A5tF1!dBb`j7TgzTd|}Uj}-w9n3jdav*W7bx>(WW z^>nGx%7WQ3a=zZC9G&yXE01G0aEMXzGdIQGFC)PMb{on*|69Lh5fG?r%_i}n8Ilh5 zeP761d_O#==*d#*>dEkcmq{<}Qrw+bC7(w*gYe+eo4nz5)nyOtFrWALi`W4S84bb2 zHiw8mDL^A(hOQw(2Q6d;s<;DUpj(;?$pFx|uII~r8Pa^$y^!V1#>4jP@_KFPCUqJZ zz^Fo2O-+pyN^s2q4WE>&kvq?Qu-4Fo!)XM7r_pIO9$SMk<^TQna+JH}0i&Y|oHL`R zUNC7_rvcq&fh?!z4#5YmS31YgV%{4~$cqJ!fB=(U?z`O!{s6>3uGnv7^latVc+t>3 zThfUd;dLkW=%@RpS2OvL=YSucb$Z+Z9lqf@qKjYMFBC_L3xC8B5F4n0K?k$xbVTR@ z_5BdaTJ+@*t+MmJzP@W9`|>3(J5OcQw_7eo`zj7=Kan)B%p>g{IW(tT-LHH`4Z$Cv zSDJ)F+7#w_oFBFgfJIl)hK+8U9zT4}mzgi0Z}zf&pp{=p@?^(+^<~POq^0Ih0h0&A z&LVHqH60N~OvAaG;=QW$J}F9V)od^;b8>WS?fJO)mGwipMz3o*PJ-Yy6c`mK62rsm zlKpQrF!R|eWdV-~!q)em&A}n9q7W_})~-@Z23}uE(=a|_=vJYQyA_qx#N{Lm5##DX z+PED@rs2t4Q4hEfU*Ov*sG|JVabZ6b*02tNA#tp5SUqxxMVjLvfJ-Z|Lx`a% zl&0^zB@&x4=JkI5=WNH6A$m=A>6WQ(4lliz+Q#tB=KtKSYg3q_3tT)`a6#E}S zF*Qo0;cZSw{WU%RR>9Xqe1KD|D&=`SZpoJDB&Camj|f&TeSLjNZ-Nua=b0WtabkOy zvfTY^EGk0L6=cjOGrny3JZ(s3)dx5}j1RY^@1iNdv4PD48=kD~u1 zv5qlh6GHX^GKuY~md({#?-$46H#r*WVgU>^wElc$0DPagG`)X#*r2qVy1JEfd>R09 z1(J1vQNOFT7DfUdXN%m1u#dH0JqH==fZo@ALjfYiUnD{v`Q{RVUFC0eS&cGW@mOiNBuwC7r384ks4kGykyi0{^bGe{>Xk7~MV`En@1%)^~ z+o$JecRyerBD(j|_ppe3;fHPyCNVKGFoV-UK|Y9qiCKoa|09;@pfjVk|0{w%Fc1TR zeO+E&CaC~P8&KA=Q^x_7w>BVFK>)5vIvRwsPx&)PC6~(~a6p_a#&~!*foerD8W1rr z|DK*t)%ul<>50Z*EsrH$m($>n&1wiCut0)_O(3|9UaT~PF)>g=@f43XTg-}VG}~%3 z2nud-l8|hF2Q>Qdgk*YrJP05ZbfCcWr4p{TdUDs=U1$T=G4oGjXn|pcjbb@`yX)<~ zK;W0)(kJC{or!vnH#^-&qFS_p(}Ds1F&7L>*ygl!_|-8Mr*(TQ*pM^n91!R7!a_2; z%k{Pl0H7Cj_dI>@?b?(l=!JEG|?aI(Es8W*W7X z!OO_i;YF7sVW6V+0WsNdRPF^Z3~2*DB`972Fetu&4M_iO`o~s)2^tdk6)W&cg@KJN z*k&{Y4}A+rsYG}}R_ED6VQ?^1HNdUwC@G;$ItZe4QcDmK z5`G;|p$`Jw(6fJMAS_-S^Ydle4@2MQ0W-{%d)5|3+6Br>O$}?Yv4ZKR06>0H0&lzc znTg3X<=hn+OoG+5{UawOC3MA(b9{WCm=Mr&)UT&W|s$AMIP2eATClY4qIr3Xm6(V|bpd?c25E4VXw zFCK6Liu@(|_spyLBFbh60W6^A60FE!Iw1 zu+Wbq0wp7(Kb{k5%;#tF**qKgSXhsd?w+ZCSx>3-H?7IIMulT=?P!wA*#rfN^?V*c z`o4hld;Ql)pctCY6`a;$-dh4vpdzpIj8ZErP&wvOo$(JkCsiYBUBn>S3^r;qZf=hs z0Nv5BQM8NGhIeFVRtT~nVN3;!(J#P=jgAfhGLJtPTLDT0cdR_F=cVr_CAmq*-UDUD z!06lSu~|yQ!6lObPD98^-MZIXosdCC)XXA<>%t^Q{5x zz7m;OfB;`i=M6}LVC+^K!=)|(9Au5q4sgs%GyOm9Xv1rDMb1D+L9xAp2M{e}<)v<) zw;`Y=AlJwP66A3*7d;)aoyta9)l@K!xVbTD%Oa{c0UV`ZP%I|Z0GM)9u!(OZ) zjiPuee97+&stuPV-Su+Sie<~}oSbUU&yOc&b)vKNJ>ZF2X@L=+vb*DKZ1cXcBHprl zIoUuNC7lY;A40vkZXd7SbaOzEH@<*55c}N6Zxk>{-AoGtOpcB8f%^MpTZO8v0>bqnd z0V7fWYq!?z!&`m=pLDDeH5WzA!jf>d(ZR{-dahAC^>n_H*d^$EJY$H9iz_ZHEF2YX zpVEYQ?MQv)N>QSk|A(~Ip$v4Y6e7}h7;CKj8xUO#pjJ1=5zq$PR;$#=Z}PP>TvUF*Nb6~2Tg<(XVrP_ejxB+l{${)hm7~UirzUkE8Q5NR1 z5S{~I1bdrCGt4_>KXV1H0cJP(a;wMB&C=46)0-nPD z(_+MJpcGU4YkQsR`x@SwdHiSztmgVcnQ|M@B+^&`x*BGXyTbmBPDfL%fCWwAGa2-j z0U=)qL`2@DRV7g}OMec@z|zQHRsfB{7O6VqBeiW7MjSdcbGdlpe1L-|3L>z*0_Nkh zfztMtPu8(-;GyHH+t`1z-KF9cD0QUaw#CGvMKr#(cs<$K{RamJ!=WowsRQbtI6gLJ zD5zg=A~g)$Q11Cw&*#g+f=Vw|Uz&D}3XLW)%S10L(1w%G`-Rn8gS0GB5Kr%1YRH9% z&BHl67QUUs>&6iH_6`tbxpa(-)4g4Tola19A5U8~d>*%jr{^mTX{l3~7#JvS=o^|m zKCf&oK<7vtAm}wb_JF=qD{!BgALmU=dNN+S;gp9@fQ^yR(w?>K@0$o=5_b(t7bS%J zW-_5vzSHG-*avfb`bIJ?=>%uhmK}}Igv+-rW}mwEVZ6zWl{%5hdG$X5%>y$0w>RE+ zqu|1Y3-fQ>xRHi?_V>U4-7cx$efM4U&6_tBVh?p9BF`XHF&}u~0p-Af1DR_7$}6w1 zupQD@Uwswz+;h*38#{LF)@#4kG$&)9eq*q|!#LAT`v12XkZTHSQ z@1)G1KY#MHY13YxFk!;QK7IP!LRCLJJe)~=v9b;&e;GQcR4UaiF=>a*oXUAuOv3knKY6&cyxL`|0F&!5k2+_+IOckWzOe0)6f z6r(zd&O97O0RZZE-+edw`RAX%=b2}oS@ihhj~^T}W=wI99z9ChwQC3H;KNWJwmrfg z_L!-M;bV^D(B5S!peRLA8^DxZxi)|RHJME1X0y4{!uKTrOHvujcG0B3!$kygiFgg{ zt;Vj8>A~2%0S~4gJ$k^HF=Jr<{P`to*RCzzzkmP1Lx&D6Dl9C#2LPh+-kx~k37i>j zk9V(Ly;_AfcVS_n0&Q+0qGm^@PMzZ3c;gN3x#yl!VjJOvgaiisg9i_CfBDN_xXF_z zv*7>$pgn&4c*NUpzuj%ejvdohty;Bg_Uze*$BY?sdf2dG-wqfspbSBlo14pQUIbl2 zLIPmft-rrNEA&8fKs*qtT<9c30HcIQHM?cYmhAtax}BY!4LJXM(4axgrk*ruQrWzD z^S*uNnP*PF_S$QQ3knLBUAuN|+CTsK&u*oqr4jg<`}OOG>Nl$llYl*A#thY-J$rE4 zsPg*t>onbj$h&Cu>ebl%0K;>bji*?;bg63W*s<8Bo`sLlyS!`HE^hPY&D^Y6vpBVy z`TGz&z5vkb(xpo&M~)mBvT4(%8H*P$UUc7m_dWga!w>IVvSi8GrAwE7vv~31e=l6P z@VD8sXP3^JHLHB~?Ai8(-~ai~e=c9TbZO}mPdxG4vSrKuy?XWPZ`Q0?b9Uv*l{=q( z_SvVmY}vBtqmMqC@xu>43=!4tR(Ngv3>alV6&%L|<0Mf`R>9AloSdv&wrrW|@ZrO% kqM{-kLP-7BIQP^412$OIqRpvC8vp Date: Wed, 26 Jul 2023 23:02:32 -0700 Subject: [PATCH 03/13] fixing merge conflict. --- README.md | 316 ------------------------------------------------------ 1 file changed, 316 deletions(-) diff --git a/README.md b/README.md index eea9cee4..af8a4dc8 100644 --- a/README.md +++ b/README.md @@ -1,318 +1,3 @@ -<<<<<<< HEAD -# Movement VM - -The Move programming language poses numerous benefits to builders including direct -interaction with digital assets through custom resource types, flexibility with transaction script -declaration, on-chain verification, and bytecode safety privileges. However, the benefits of Move -are currently limited to Aptos and Sui, creating a divide for builders who also want to reap the -benefits of brilliant platforms like Avalanche. The Movement MoveVM is designed for the -modern-day Avalanche subnet, allowing users to seamlessly interact with and build on the Move -language underneath the established Avalanche umbrella. Through Avalanche warp messaging, -the Movement subnet will fundamentally be the first L1 built on Avalanche, creating a moat -around the Move language by launching multiple blockchains to adapt to Aptos MoveVM as -well as Sui MoveVM. Through multi-chain dynamic load balancing to address throughput -bottlenecks and parallel processing on top of the Avalanche consensus layer, Movement will be -able to hit 160,000+ theoretical TPS as the project scales to provide much needed performance to -protocols. In addition, Move bytecode verifiers and interpreters provide native solvency for the -reentrancy attacks and security woes that have plagued Solidity developers for years, resulting in -$3 billion lost last year. Movement will be the hub of smart contract development providing -performance to Aptos developers and security for Avalanche protocols. - -## Status -`Movement VM` is considered **ALPHA** software and is not safe to use in -production. The framework is under active development and may change -significantly over the coming months as its modules are optimized and -audited. - -We created the `Movement VM` with Avalanche's [`hypersdk`](https://github.com/ava-labs/hypersdk) -to test out our design decisions and abstractions. - -## Terminology -* `aptos-vm`: our first implementation of move execution layer, based on [`Aptos Move`](https://github.com/aptos-labs/aptos-core) -* `smart contract`: refers to smart contracts using [`Move`](https://github.com/aptos-labs/move) language -* `transactions`: refers to aptos transaction built by [`aptos-client`](https://github.com/aptos-labs/aptos-core) - -**Note** - -The `Movement VM` is still under very early stage, there will be some testing purpose codes -in the repo for development. Also the compiling & testinng actions may fail because of frequently -commit and integration. - - -## Features -### Movement CLI -To allow builders to develop on the subnet locally, we offer a Movement command line interface -where users can launch smart contracts, debug, and operate nodes. Our website will feature an -in-depth documentation guide to show builders how to utilize the CLI to make requests to the -Avalanche subnet. Some critical functions of the CLI include `run` (executes a Move function), -`download` (downloads a package and stores it in a directory), `init` (creates a new Move package -at the given location), `prove` (validates Move package), `publish` (pushes Move modules in a -package to the Movement MoveVM subnet), and `test` (runs Move unit tests for a particular -package). The CLI will primarily be used for running local testnets, monitoring and executing -transactions between accounts, publishing Move packages, and debugging modules. - -### Move Smart Contracts -For the enthusiastic Move developer looking to bring their talents to the Avalanche network, we -provide seamless integration with our MoveVM, bridging the gap between `Aptos` developers and -`Avalanche` builders. The Movement subnet will utilize the MoveVM for all operations: `account -management`, `Move module publishing`, and `fund transfer`. After a user writes the Move contract -designating the functionality of the module, the user then compiles the module through the CLI -preparing it for deployment. Finally, the user publishes the module to the designated account,s -storing it on the Movement `Move subnet` running on the `Avalanche` network. Developers can -then write smart contracts interacting with these published modules or use the CLI to see the -transaction and access the data. Move modules have built-in entry functions which are access -points that allow other transactions to make requests to the modules. The Movement team will -have detailed documentation on the website in the coming months outlining how to learn the -Move language, implement smart contracts, and connect to the Avalanche network. - -### Movement SDK -For a builder looking to reap the benefits of the MoveVM without the technical expertise to write -Move smart contracts, the Movement TypeScript SDK is key. Bridging the gap between Web2 -builders and blockchain technologies, Movement will allow developers to seamlessly interact -with the Move subnet without writing a single line of Move code. Furthermore, the SDK is not -just limited to `Typescript`. In the future, we will launch integrations like `Python` and `Rust` to bring -in developers from different realms into the ecosystem. - -## Movement VM Transactions -### Transaction -```rust -pub struct RawTransaction { - /// Sender's address. - sender: AccountAddress, - - /// Sequence number of this transaction. This must match the sequence number - /// stored in the sender's account at the time the transaction executes. - sequence_number: u64, - - /// The transaction payload, e.g., a script to execute. - payload: TransactionPayload, - - /// Maximal total gas to spend for this transaction. - max_gas_amount: u64, - - /// Price to be paid per gas unit. - gas_unit_price: u64, - - /// Expiration timestamp for this transaction, represented - /// as seconds from the Unix Epoch. If the current blockchain timestamp - /// is greater than or equal to this time, then the transaction has - /// expired and will be discarded. This can be set to a large value far - /// in the future to indicate that a transaction does not expire. - expiration_timestamp_secs: u64, - - /// Chain ID of the Movement VM network this transaction is intended for. - chain_id: ChainId, -} -``` - -### TransactionPayload -```rust -pub enum TransactionPayload { - /// A transaction that executes code. - Script(Script), - /// A transaction that publishes multiple modules at the same time. - ModuleBundle(ModuleBundle), - /// A transaction that executes an existing entry function published on-chain. - EntryFunction(EntryFunction), -} -``` - -`Movement VM` will have three different transactions supported, this might change due -to the progress of the development. - -## Movement VM Blocks -### BlockInfo -```rust -pub struct BlockInfo { - /// The epoch to which the block belongs. - epoch: u64, - /// The consensus protocol is executed in rounds, which monotonically increase per epoch. - round: Round, - /// The identifier (hash) of the block. - id: HashValue, - /// The accumulator root hash after executing this block. - executed_state_id: HashValue, - /// The version of the latest transaction after executing this block. - version: Version, - /// The timestamp this block was proposed by a proposer. - timestamp_usecs: u64, - /// An optional field containing the next epoch info - next_epoch_state: Option, -} -``` - -### BlockMetadata -```rust -/// Struct that will be persisted on chain to store the information of the current block. -/// -/// The flow will look like following: -/// 1. The executor will pass this struct to VM at the end of a block proposal. -/// 2. The VM will use this struct to create a special system transaction that will emit an event -/// represents the information of the current block. This transaction can't -/// be emitted by regular users and is generated by each of the validators on the fly. Such -/// transaction will be executed before all of the user-submitted transactions in the blocks. -/// 3. Once that special resource is modified, the other user transactions can read the consensus -/// info by calling into the read method of that resource, which would thus give users the -/// information such as the current leader. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct BlockMetadata { - id: HashValue, - epoch: u64, - round: u64, - proposer: AccountAddress, - #[serde(with = "serde_bytes")] - previous_block_votes_bitvec: Vec, - failed_proposer_indices: Vec, - timestamp_usecs: u64, -} -``` - -## Avalanche HyperVM SDK -### Controller -```golang -type Controller interface { - Initialize( - inner *VM, - snowCtx *snow.Context, - gatherer metrics.MultiGatherer, - genesisBytes []byte, - upgradeBytes []byte, - configBytes []byte, - ) ( - config Config, - genesis Genesis, - builder builder.Builder, - gossiper gossiper.Gossiper, - blockDB KVDatabase, - stateDB database.Database, - handler Handlers, - actionRegistry chain.ActionRegistry, - authRegistry chain.AuthRegistry, - err error, - ) - - Rules(t int64) chain.Rules - - Accepted(ctx context.Context, blk *chain.StatelessBlock) error - Rejected(ctx context.Context, blk *chain.StatelessBlock) error -} -``` - -The controller interface is the key component that integrates Avalanche Network -and Movement VM. - -You can view what this looks like in the indexvm by clicking this [`link`](https://github.com/ava-labs/indexvm/blob/main/controller/controller.go). - -### Genesis -```golang -type Genesis interface { - GetHRP() string - Load(context.Context, atrace.Tracer, chain.Database) error -} -``` - -Genesis is typically the list of initial balances that accounts have at the start -of the network and a list of default configurations that exist at the start of the -network (fee price, enabled txs, etc.). The serialized genesis of any hyperchain is -persisted on the P-Chain for anyone to see when the network is created. - -You can view what this looks like in the indexvm by clicking this [`link`](https://github.com/ava-labs/indexvm/blob/main/genesis/genesis.go). - -### Action -```golang -type Action interface { - MaxUnits(Rules) uint64 - ValidRange(Rules) (start int64, end int64) - - StateKeys(Auth) [][]byte - Execute(ctx context.Context, r Rules, db Database, timestamp int64, auth Auth, txID ids.ID) (result *Result, err error) - - Marshal(p *codec.Packer) -} -``` - -Actions are the heart of Movement VM. They define how users interact with the blockchain -runtime. - -### Auth -```golang -type Auth interface { - MaxUnits(Rules) uint64 - ValidRange(Rules) (start int64, end int64) - - StateKeys() [][]byte - AsyncVerify(msg []byte) error - Verify(ctx context.Context, r Rules, db Database, action Action) (units uint64, err error) - - Payer() []byte - CanDeduct(ctx context.Context, db Database, amount uint64) error - Deduct(ctx context.Context, db Database, amount uint64) error - Refund(ctx context.Context, db Database, amount uint64) error - - Marshal(p *codec.Packer) -} -``` - -`Movement VM` will need to implement the `Auth` interface of `hypersdk` to bridge Aptos's ED25519 -signature verification. - -## Running the `Movement VM` - -1. set up avalanch subnet env, you can refer to -[avalanche-network-runner](https://github.com/ava-labs/avalanche-network-runner#network-runner-rpc-server-timestampvm-example) and [timestampvm-rs](https://github.com/ava-labs/timestampvm-rs) - -2. clone this repo and build subnet binary -``` -cd movement-subnet/vm/aptos-vm/subnet -cargo build -``` -3 start network and install subnet. move this subnet binary to /home/ubuntu/aavx-ruuner/plugins and create file name genesis.json - -``` -# start runner -avalanche-network-runner server --log-level debug --port=":8080" --grpc-gateway-port=":8081" - -# install subnet -curl -X POST -k http://localhost:8081/v1/control/start -d '{"execPath":"'${AVALANCHEGO_EXEC_PATH}'","numNodes":5,"logLevel":"INFO","pluginDir":"/home/ubuntu/aavx-ruuner/plugins","blockchainSpecs":[{"vmName":"subnet","genesis":"/home/ubuntu/aavx-ruuner/genesis.json","blockchain_alias":"timestamp"}]}' - -``` - -4 create account and faucet -``` -# create account -curl -X POST --data '{ - "jsonrpc": "2.0", - "id" : 1, - "method" : "aptosvm.createAccount", - "params" : [{"account":"0x61c66dea4265716facb3484ac5e2f366cf6c6e52e58120626f3434babb375eb1"}] -}' -H 'content-type:application/json;' 127.0.0.1:9650/ext/bc/241UUZZ1gqpynKomM7DPJP4sm91XT8zwi3ttexHMFs8DznzVDs/rpc - - -{"jsonrpc":"2.0","result":{"data":"99ddf6ae010fc534e848b5fdf9d3cb5d49407de99db36415c022e6e110e4b121"},"id":1} - - -# faucet aptos token -curl -X POST --data '{ - "jsonrpc": "2.0", - "id" : 1, - "method" : "aptosvm.faucet", - "params" : [{"account":"7e95b0c90bf89fab82a8f98fbf8062f7bed3ca721aaa2d91dbb712a5b7e8ea6a"}] -}' -H 'content-type:application/json;' 127.0.0.1:9650/ext/bc/241UUZZ1gqpynKomM7DPJP4sm91XT8zwi3ttexHMFs8DznzVDs/rpc - -{"jsonrpc":"2.0","result":{"data":"3b1d120f5cb3c2ab25062541193b2e72dbbb4f2dafca0020c9375a68c33a918d"},"id":1} - -# check balance -curl -X POST --data '{ - "jsonrpc": "2.0", - "id" : 1, - "method" : "aptosvm.getBalance", - "params" : [{"account":"7e95b0c90bf89fab82a8f98fbf8062f7bed3ca721aaa2d91dbb712a5b7e8ea6a"}] -}' -H 'content-type:application/json;' 127.0.0.1:9650/ext/bc/241UUZZ1gqpynKomM7DPJP4sm91XT8zwi3ttexHMFs8DznzVDs/rpc - -{"jsonrpc":"2.0","result":{"data":100000000000},"id":1} - -``` -=======
Project Logo @@ -384,4 +69,3 @@ Please submit and review/comment on issues before contributing. Review [CONTRIBU This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. ->>>>>>> fix From 533e5421dc624fa9163ded9ac270346c5ab20ae7 Mon Sep 17 00:00:00 2001 From: Liam Monninger <79056955+l-monninger@users.noreply.github.com> Date: Thu, 27 Jul 2023 07:31:41 -0700 Subject: [PATCH 04/13] Update check.yml --- .github/workflows/check.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 9ee4c8d2..8e259b71 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -19,9 +19,9 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: nightly override: true - name: Run Cargo Check - working-directory: ./subnet # Ensures we're in the correct directory - run: cargo check + working-directory: subnet # Ensures we're in the correct directory + run: RUSTFLAGS="--cfg tokio_unstable" cargo check From a5d71f0e01c2c6e525a2ecc5bcf46829b431e853 Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 10:16:11 -0700 Subject: [PATCH 05/13] testing --- .github/workflows/check.yml | 2 +- .github/workflows/coverage.yml | 8 +++-- .github/workflows/mirror.yml | 9 +++--- .github/workflows/release.yml | 11 ++++--- .github/workflows/rust.yml | 29 ------------------- .github/workflows/test.yml | 10 +++---- .../workflows/platform => platform}/dev.yml | 0 .../workflows/platform => platform}/prod.yml | 0 .../workflows/platform => platform}/test.yml | 0 9 files changed, 21 insertions(+), 48 deletions(-) delete mode 100644 .github/workflows/rust.yml rename {.github/workflows/platform => platform}/dev.yml (100%) rename {.github/workflows/platform => platform}/prod.yml (100%) rename {.github/workflows/platform => platform}/test.yml (100%) diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 9ee4c8d2..6ab02b46 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -23,5 +23,5 @@ jobs: override: true - name: Run Cargo Check - working-directory: ./subnet # Ensures we're in the correct directory + working-directory: ./m1 # Ensures we're in the correct directory run: cargo check diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index b983ae85..3d712151 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -1,13 +1,15 @@ name: Code Coverage on: - push: - branches: - - main + workflow_run: + workflows: ["Cargo Check"] + types: + - completed jobs: coverage: runs-on: ubuntu-latest + if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Checkout Repository uses: actions/checkout@v2 diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index a7700c0b..2503efea 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -1,14 +1,15 @@ name: Force Push to Movement Framework Mirror on: - push: - branches: - - main # Change this to the branch you want to trigger the action on + workflow_run: + workflows: ["Cargo Check"] + types: + - completed jobs: force-push: runs-on: ubuntu-latest - + if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4cec4e8e..ade40ecf 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,14 +1,15 @@ name: Binary Release on: - push: - branches: - - main + workflow_run: + workflows: ["Cargo Check"] + types: + - completed jobs: build: runs-on: ubuntu-latest - + if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -47,8 +48,6 @@ jobs: - name: Set up cross-compilation environment uses: crazy-max/ghaction-docker-buildx@v3 - with: - version: latest - name: Build binaries for macOS run: | diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml deleted file mode 100644 index d6a6d319..00000000 --- a/.github/workflows/rust.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: Rust - -on: - push: - branches: [ "main" ] - pull_request: - branches: [ "main" ] - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Install Rust - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - profile: minimal - override: true - - - name: Check Rust version - run: rustc --version - - - name: current directory - run: echo ${PWD} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4a662d6c..cfe47e5d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,15 +1,15 @@ name: Rust Tests on: - push: - branches: - - main - pull_request: + workflow_run: + workflows: ["Cargo Check"] + types: + - completed jobs: test: runs-on: ubuntu-latest - + if: ${{ github.event.workflow_run.conclusion == 'success' }} steps: - name: Checkout code uses: actions/checkout@v2 diff --git a/.github/workflows/platform/dev.yml b/platform/dev.yml similarity index 100% rename from .github/workflows/platform/dev.yml rename to platform/dev.yml diff --git a/.github/workflows/platform/prod.yml b/platform/prod.yml similarity index 100% rename from .github/workflows/platform/prod.yml rename to platform/prod.yml diff --git a/.github/workflows/platform/test.yml b/platform/test.yml similarity index 100% rename from .github/workflows/platform/test.yml rename to platform/test.yml From 6c9a7c59b428761ba2d94f2a8780b34e59764c3b Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 10:33:23 -0700 Subject: [PATCH 06/13] simplifying. --- .github/workflows/check.yml | 2 +- .github/workflows/coverage.yml | 2 +- .github/workflows/mirror.yml | 26 -------------------------- .github/workflows/release.yml | 2 +- 4 files changed, 3 insertions(+), 29 deletions(-) delete mode 100644 .github/workflows/mirror.yml diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index e397b207..cb36e4d8 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -24,4 +24,4 @@ jobs: - name: Run Cargo Check working-directory: ./m1 # Ensures we're in the correct directory - run: cargo check + run: RUSTFLAGS="--cfg tokio_unstable" cargo check diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 1a99086e..3dc4ef29 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -20,7 +20,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: nightly components: rustfmt override: true diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml deleted file mode 100644 index 2503efea..00000000 --- a/.github/workflows/mirror.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Force Push to Movement Framework Mirror - -on: - workflow_run: - workflows: ["Cargo Check"] - types: - - completed - -jobs: - force-push: - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Pushes to another repository - uses: cpina/github-action-push-to-another-repository@main - env: - API_TOKEN_GITHUB: ${{ secrets.API_TOKEN_GITHUB }} - with: - source-directory: 'vm/aptos-vm/aptos-move' - destination-github-username: 'movemntdev' - destination-repository-name: 'movement-framework-mirror' - user-email: l.mak.monninger@gmail.com - target-branch: main # maybe this should be changed diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ade40ecf..f7b0d51d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -20,7 +20,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: stable + toolchain: nightly override: true - name: Install Dependencies From 272eedc271d066de52fcadbb1b7680ccc11b87ed Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 10:37:38 -0700 Subject: [PATCH 07/13] caching reintroduced. --- .github/workflows/check.yml | 20 +++++++++++++++++++- platform/dev.yml | 0 platform/prod.yml | 0 platform/test.yml | 0 4 files changed, 19 insertions(+), 1 deletion(-) delete mode 100644 platform/dev.yml delete mode 100644 platform/prod.yml delete mode 100644 platform/test.yml diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index cb36e4d8..145ba63c 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -22,6 +22,24 @@ jobs: toolchain: nightly override: true + - name: Cache cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v2 + with: + path: ./m1/target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + - name: Run Cargo Check - working-directory: ./m1 # Ensures we're in the correct directory + working-directory: ./m1 run: RUSTFLAGS="--cfg tokio_unstable" cargo check diff --git a/platform/dev.yml b/platform/dev.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/platform/prod.yml b/platform/prod.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/platform/test.yml b/platform/test.yml deleted file mode 100644 index e69de29b..00000000 From 74c15080561df7ed3b1ec8168fe49b7a7eee404b Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 11:04:02 -0700 Subject: [PATCH 08/13] adjusting kcov strategy. --- .github/workflows/coverage.yml | 18 ++++++++++++++++++ .github/workflows/release.yml | 18 ++++++++++++++++++ .github/workflows/test.yml | 20 ++++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 3dc4ef29..62d35740 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -23,6 +23,24 @@ jobs: toolchain: nightly components: rustfmt override: true + + - name: Cache cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v2 + with: + path: ./m1/target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} - name: Install kcov run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f7b0d51d..288d8b92 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,6 +23,24 @@ jobs: toolchain: nightly override: true + - name: Cache cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v2 + with: + path: ./m1/target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + - name: Install Dependencies run: | sudo apt-get update diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e6c02b58..2abc6d1d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,6 +13,8 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v2 + with: + submodules: 'recursive' - name: Set up Rust uses: actions-rs/toolchain@v1 @@ -20,6 +22,24 @@ jobs: profile: minimal toolchain: nightly + - name: Cache cargo registry + uses: actions/cache@v2 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v2 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v2 + with: + path: ./m1/target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} + - name: Build and Test Subnet run: | cd ./m1 From adf889df50664388e2e98711f7a403e031ede7cb Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 11:44:37 -0700 Subject: [PATCH 09/13] contributors template. --- .github/mirror.sh | 22 -------------------- CODE_OF_CONDUCT.md | 43 +++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 44 ++++++++++++++++++++++++++++++++++++++++ LICENSE | 2 +- LICENSE.header | 2 +- PULL_REQUEST_TEMPLATE.md | 33 ++++++++++++++++++++++++++++++ 6 files changed, 122 insertions(+), 24 deletions(-) delete mode 100755 .github/mirror.sh create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 PULL_REQUEST_TEMPLATE.md diff --git a/.github/mirror.sh b/.github/mirror.sh deleted file mode 100755 index 9b826131..00000000 --- a/.github/mirror.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -git config --global user.email "l.mak.monninger@gmail.com" -git config --global user.name "CI Bot" - -# Clone the Movement Framework Mirror repository -git clone https://github.com/movemntdev/movement-framework-mirror.git - -# Copy the aptos-move directory to the Movement Framework Mirror repository -cp -r vm/aptos-vm/aptos-move movement-framework-mirror/ - -# Change directory to the Movement Framework Mirror repository -cd movement-framework-mirror/ - -# Add all changes to Git -git add -A - -# Commit the changes -git commit -m "Mirror aptos-move directory from current repo" - -# Push the changes with force -git push --force diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..7f5e3190 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [INSERT EMAIL ADDRESS]. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html), version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..972456a4 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,44 @@ +# Contributing to Our Project + +Firstly, thank you for your interest in contributing to our project! Every contribution helps, and we truly appreciate the effort. This document explains our guidelines and the steps for contributing. Following these guidelines helps to communicate that you respect the time of the developers managing and developing this open source project. In return, they should reciprocate that respect in addressing your issue, assessing changes, and helping you finalize your pull requests. + +## Using GitHub Issues + +We use GitHub issues to track public bugs and suggest enhancements. These are a great place to ask questions and get help for the project. + +### Before Submitting a New Issue + +Before you submit an issue, please do a search in open issues to see if the issue or enhancement has already been filed. If it has and the issue is still open, add a comment to the existing issue instead of opening a new one. If you find a Closed issue that seems like it is the same thing that you're experiencing, open a new issue and include a link to the original issue. + +### How to Submit a New Issue + +1. Navigate to the main page of the repository. +2. Click the "Issues" tab. +3. Click the "New Issue" button. +4. Choose "Bug Report" to create a report to help us improve or "Feature Request" to suggest an idea for the project. +5. Use the provided template to create your issue. + +## Code Contributions + +Here's a quick guide on how to contribute code: + +1. Fork the repo. +2. Clone your fork down to your local environment. +3. Make your changes in your local environment. +4. Commit and push your changes to your fork. +5. Submit a pull request with a comprehensive description of the changes. +6. Wait for the maintainers to review your pull request and merge it. + +## Pull Requests + +When you submit a pull request, please make sure to link the issue you're addressing. For example, by writing "Resolves #123" in the pull request description, the issue #123 will automatically close when the pull request is merged. + +## Community + +You can chat with the core team on [insert communication platform]. Please use this community to ask for help, discuss features, and bounce ideas around. + +## Code of Conduct + +We have a [Code of Conduct](./CODE_OF_CONDUCT.md) in place. Please follow it in all your interactions with the project. + +Again, thank you for your contributions! We appreciate your work and are excited to see what you will bring to the project. diff --git a/LICENSE b/LICENSE index dc33b08c..5d5a068b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2022, Ava Labs, Inc. +Copyright (c) 2023, Movement Labs, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/LICENSE.header b/LICENSE.header index 6a6e5707..bac3732e 100644 --- a/LICENSE.header +++ b/LICENSE.header @@ -1,2 +1,2 @@ -Copyright (C) 2023, Ava Labs, Inc. All rights reserved. +Copyright (C) 2023, Movement Labs, Inc. All rights reserved. See the file LICENSE for licensing terms. diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..1ea7e89f --- /dev/null +++ b/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,33 @@ +## Pull Request + +### Description + +- [ ] This contribution represent a trivial change (skip the following sections). + +### Motivation and Context + + +### Changes Made + + +### Testing + + +### Screenshots (if applicable) + + +### Related Issues + + +### Checklist + +- [ ] I have read and followed the project's contributing guidelines. +- [ ] I have tested the changes thoroughly. +- [ ] My code follows the style and formatting guidelines of the project. +- [ ] I have updated the relevant documentation, if applicable. +- [ ] I have added tests, if appropriate. +- [ ] The code builds without any errors or warnings. +- [ ] All existing tests pass successfully. + +### Additional Notes + \ No newline at end of file From cce5d0be757113d54174e4b525b2d4f34112b04f Mon Sep 17 00:00:00 2001 From: Liam Monninger <79056955+l-monninger@users.noreply.github.com> Date: Thu, 27 Jul 2023 12:13:54 -0700 Subject: [PATCH 10/13] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index af8a4dc8..f28a2645 100644 --- a/README.md +++ b/README.md @@ -67,5 +67,5 @@ Please submit and review/comment on issues before contributing. Review [CONTRIBU ## License -This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. +This project is licensed under the BSD-3-Clause License - see the [LICENSE](LICENSE) file for details. From c0a4640ee4ee089c59cb430a4983eb95e47a5cbd Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 12:16:04 -0700 Subject: [PATCH 11/13] readme changes. --- .github/workflows/release.yml | 22 +++++++++++----------- README.md | 8 ++++---- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 288d8b92..bea0a652 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -52,9 +52,9 @@ jobs: chmod -R 755 ./scripts echo "yes" | ./scripts/dev_setup.sh - - name: Build subnet binaries + - name: Build binaries run: | - cd "$GITHUB_WORKSPACE/subnet" + cd "$GITHUB_WORKSPACE/m1" cargo build --release - name: Archive binaries @@ -62,14 +62,14 @@ jobs: with: name: binaries path: | - $GITHUB_WORKSPACE/subnet/target/release/* + $GITHUB_WORKSPACE/m1/target/release/* - name: Set up cross-compilation environment uses: crazy-max/ghaction-docker-buildx@v3 - name: Build binaries for macOS run: | - cd "$GITHUB_WORKSPACE/subnet" + cd "$GITHUB_WORKSPACE/m1" docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap @@ -80,11 +80,11 @@ jobs: with: name: macos-binaries path: | - $GITHUB_WORKSPACE/subnet/target/x86_64-apple-darwin/release/* + $GITHUB_WORKSPACE/m1/target/x86_64-apple-darwin/release/* - name: Build binaries for Linux arm64 run: | - cd "$GITHUB_WORKSPACE/subnet" + cd "$GITHUB_WORKSPACE/m1" docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap @@ -95,11 +95,11 @@ jobs: with: name: linux-arm64-binaries path: | - $GITHUB_WORKSPACE/subnet/target/aarch64-unknown-linux-gnu/release/* + $GITHUB_WORKSPACE/m1/target/aarch64-unknown-linux-gnu/release/* - name: Build binaries for Linux x86_64 run: | - cd "$GITHUB_WORKSPACE/subnet" + cd "$GITHUB_WORKSPACE/m1" docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap @@ -110,11 +110,11 @@ jobs: with: name: linux-x86_64-binaries path: | - $GITHUB_WORKSPACE/subnet/target/x86_64-unknown-linux-gnu/release/* + $GITHUB_WORKSPACE/m1/target/x86_64-unknown-linux-gnu/release/* - name: Build binaries for Windows run: | - cd "$GITHUB_WORKSPACE/subnet" + cd "$GITHUB_WORKSPACE/m1" docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap @@ -125,4 +125,4 @@ jobs: with: name: windows-binaries path: | - $GITHUB_WORKSPACE/subnet/target/x86_64-pc-windows-gnu/release/*.exe + $GITHUB_WORKSPACE/m1/target/x86_64-pc-windows-gnu/release/*.exe diff --git a/README.md b/README.md index af8a4dc8..1df135ee 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,13 @@ # M1 -[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://opensource.org/licenses/MIT) +[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://opensource.org/license/bsd-3-clause/) [![Tests](https://img.shields.io/badge/tests-Passing-brightgreen)](#) [![Build Status](https://img.shields.io/badge/build-Passing-brightgreen)](#) [![Coverage](https://img.shields.io/codecov/c/github/username/project.svg)](https://codecov.io/gh/username/project) -[![Windows](https://img.shields.io/badge/Windows-Download-blue)](https://example.com/releases/windows) -[![macOS](https://img.shields.io/badge/macOS-Download-blue)](https://example.com/releases/macos) -[![Linux](https://img.shields.io/badge/Linux-Download-blue)](https://example.com/releases/linux) +[![Windows](https://img.shields.io/badge/Windows-Download-blue)](https://github.com/movemntdev/m1/releases) +[![macOS](https://img.shields.io/badge/macOS-Download-blue)](https://github.com/movemntdev/m1/releases) +[![Linux](https://img.shields.io/badge/Linux-Download-blue)](https://github.com/movemntdev/m1/releases) **An L1 for Move VM built on Avalanche.** From 63237eb88a1e4391195cb6d67f93c12776768280 Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 12:21:05 -0700 Subject: [PATCH 12/13] reduce caching. --- .github/workflows/coverage.yml | 18 ---- .github/workflows/release.yml | 18 ---- .github/workflows/test.yml | 18 ---- scripts/install.sh | 154 +++++++++++++++++++++++++++++++++ scripts/movementctl.sh | 105 ++++++++++++++++++++++ 5 files changed, 259 insertions(+), 54 deletions(-) create mode 100644 scripts/install.sh create mode 100644 scripts/movementctl.sh diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 62d35740..208394e2 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -24,24 +24,6 @@ jobs: components: rustfmt override: true - - name: Cache cargo registry - uses: actions/cache@v2 - with: - path: ~/.cargo/registry - key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo index - uses: actions/cache@v2 - with: - path: ~/.cargo/git - key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo build - uses: actions/cache@v2 - with: - path: ./m1/target - key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} - - name: Install kcov run: | sudo apt-get update diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bea0a652..a06c52df 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,24 +23,6 @@ jobs: toolchain: nightly override: true - - name: Cache cargo registry - uses: actions/cache@v2 - with: - path: ~/.cargo/registry - key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo index - uses: actions/cache@v2 - with: - path: ~/.cargo/git - key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo build - uses: actions/cache@v2 - with: - path: ./m1/target - key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} - - name: Install Dependencies run: | sudo apt-get update diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2abc6d1d..e217ade8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,24 +22,6 @@ jobs: profile: minimal toolchain: nightly - - name: Cache cargo registry - uses: actions/cache@v2 - with: - path: ~/.cargo/registry - key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo index - uses: actions/cache@v2 - with: - path: ~/.cargo/git - key: ${{ runner.os }}-cargo-git-${{ hashFiles('**/Cargo.lock') }} - - - name: Cache cargo build - uses: actions/cache@v2 - with: - path: ./m1/target - key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} - - name: Build and Test Subnet run: | cd ./m1 diff --git a/scripts/install.sh b/scripts/install.sh new file mode 100644 index 00000000..46a8985b --- /dev/null +++ b/scripts/install.sh @@ -0,0 +1,154 @@ +#!/bin/bash +################################################################################ +# This script is intended to be run on a portable environment. +# It assumes that Rust, Cargo, and developer dependencies are already installed. +# +# This script performs the following tasks: +# - ... +# +# Prerequisites: +# - Rust and Cargo installed +# - Developer dependencies installed +# +# Usage: +# - Run this script on the portable environment: +# $ ./install-movement.sh +# +# Note: Make sure to review and customize the script variables and paths +# according to your specific requirements before running. +# +# Author: Liam Monninger +# Version: 1.0 +################################################################################ + +set -e + +# Set the URL for fetching movementctl +MOVEMENTCTL_URL="https://raw.githubusercontent.com/movemntdev/movement-hack/main/bin/movementctl.sh" + +# Set the AvalancheGo version +AVALANCHEGO_VERSION="v1.10.3" + +# Set the directory paths +AVALANCHEGO_DIR="$HOME/.avalanchego" +MOVEMENT_DIR="$HOME/.movement" +MOVEMENT_WORKSPACE="$MOVEMENT_DIR/workspace" +PLUGINS_DIR="$MOVEMENT_DIR/plugins" +BIN_DIR="$MOVEMENT_DIR/bin" + +# Create the necessary directories +mkdir -p "$AVALANCHEGO_DIR" "$MOVEMENT_DIR" "$PLUGINS_DIR" "$BIN_DIR" "$MOVEMENT_WORKSPACE" + +cd $MOVEMENT_WORKSPACE + +# Detect the OS +OS=$(uname -s) +case $OS in + Linux*) OS=linux-;; + Darwin*) OS=macos-;; + CYGWIN*) OS=windows-;; + *) echo "Unsupported OS: $OS"; exit 1;; +esac + +# Detect the architecture +ARCH=$(uname -m) +case $ARCH in + x86_64*) ARCH=amd64-;; + aarch64*) ARCH=arm64-;; + arm64*) ARCH="";; # Apple M1 + *) echo "Unsupported architecture: $ARCH"; exit 1;; +esac + +# Download and install avalanche-network-runner +curl -sSfL https://raw.githubusercontent.com/ava-labs/avalanche-network-runner/main/scripts/install.sh | sh -s + +# Add avalanche-network-runner binary to PATH +echo 'export PATH="$HOME/bin:$PATH"' >> "$HOME/.bashrc" + +# Reload the bash profile +source "$HOME/.bashrc" + +# Download and install AvalancheGo +if [ "$OS" == "linux-" ]; then + AVALANCHEGO_RELEASE_URL="https://github.com/ava-labs/avalanchego/releases/download/$AVALANCHEGO_VERSION/avalanchego-linux-$ARCH-$AVALANCHEGO_VERSION.tar.gz" + AVALANCHEGO_ARCHIVE="avalanchego-linux-$ARCH-$AVALANCHEGO_VERSION.tar.gz" + wget "$AVALANCHEGO_RELEASE_URL" -O "$AVALANCHEGO_ARCHIVE" + mkdir -p "$AVALANCHEGO_DIR" + tar xvf "$AVALANCHEGO_ARCHIVE" -C "$AVALANCHEGO_DIR" --strip-components=1 +elif [ "$OS" == "macos-" ]; then + AVALANCHEGO_RELEASE_URL="https://github.com/ava-labs/avalanchego/releases/download/$AVALANCHEGO_VERSION/avalanchego-macos-$AVALANCHEGO_VERSION.zip" + AVALANCHEGO_ARCHIVE="avalanchego-macos-$AVALANCHEGO_VERSION.zip" + wget "$AVALANCHEGO_RELEASE_URL" -O "$AVALANCHEGO_ARCHIVE" + mkdir -p "$AVALANCHEGO_DIR" + unzip "$AVALANCHEGO_ARCHIVE" -d "$AVALANCHEGO_DIR" +else + echo "Unsupported OS: $OS" + exit 1 +fi + +# Add AvalancheGo binary directory to PATH +echo 'export PATH="$HOME/.movement/avalanchego:$PATH"' >> "$HOME/.bashrc" + +# Reload the bash profile +source "$HOME/.bashrc" + +# Clone the subnet repository if not already cloned +if [ ! -d "$MOVEMENT_DIR/movement-subnet" ]; then + git clone https://github.com/movemntdev/movement-subnet "$MOVEMENT_DIR/movement-subnet" +fi + +# Set up the developer environment if not already set up +cd "$MOVEMENT_DIR/movement-subnet/vm/aptos-vm" +./script/dev_setup.sh + +# Build the subnet binary +cargo build --release -p subnet + +# Move the subnet binary to the plugin directory +mv "$MOVEMENT_DIR/movement-subnet/vm/aptos-vm/target/release/subnet" "$PLUGINS_DIR/subnet" + +# Symlink the subnet binary with its subnet ID +ln -s "$PLUGINS_DIR/qCP4kDnEWVorqyoUmcAtAmJybm8gXZzhHZ7pZibrJJEWECooU" "$PLUGINS_DIR/subnet" +ln -s "$AVALANCHEGO_DIR/plugins/qCP4kDnEWVorqyoUmcAtAmJybm8gXZzhHZ7pZibrJJEWECooU" "$PLUGINS_DIR/subnet" + +# Clone the movement repository if not already cloned +if [ ! -d "$MOVEMENT_DIR/movement-subnet" ]; then + git clone https://github.com/movemntdev/movement-subnet "$MOVEMENT_DIR/movement-subnet" +fi + +# Set up the developer environment if not already set up +cd "$MOVEMENT_DIR/movement-subnet/vm/aptos-vm" +./script/dev_setup.sh + +# Build the movement binary +cargo build --release -p movement + +# Move the movement binary to the appropriate directory +mv "$MOVEMENT_DIR/movement-subnet/vm/aptos-vm/target/release/movement" "$BIN_DIR" + +# Add movement binary directory to PATH +echo 'export PATH="$HOME/.movement/bin:$PATH"' >> "$HOME/.bashrc" + +# Reload the bash profile +source "$HOME/.bashrc" + +# Clone the subnet proxy repository if not already cloned +if [ ! -d "$MOVEMENT_DIR/subnet-request-proxy" ]; then + git clone https://github.com/movemntdev/subnet-request-proxy "$MOVEMENT_DIR/subnet-request-proxy" +fi + +# Download and install movementctl +curl -sSfL "$MOVEMENTCTL_URL" -o "$BIN_DIR/movementctl" +chmod +x "$BIN_DIR/movementctl" + +echo "movementctl installed successfully." + +# Add movement binary directory to PATH +echo 'export PATH="$HOME/.movement/bin:$PATH"' >> "$HOME/.bashrc" + +# Reload the bash profile +source "$HOME/.bashrc" + +# Clean up artifacts +cd $MOVEMENT_DIR +rm -rf $MOVEMENT_WORKSPACE \ No newline at end of file diff --git a/scripts/movementctl.sh b/scripts/movementctl.sh new file mode 100644 index 00000000..881782c2 --- /dev/null +++ b/scripts/movementctl.sh @@ -0,0 +1,105 @@ +#!/bin/bash + +################################################################################ +# Helper Script for Movement Control +# +# This script provides functions and commands for controlling the Movement +# environment. It includes functions to start and stop AvalancheGo, the +# avalanche-network-runner server, and the subnet-request-proxy Node.js server. +# +# Usage: movementctl [start/stop] [fuji/local/subnet-proxy] +# +# Author: Liam Monninger +# Version: 1.0 +################################################################################ + +PID_DIR="$HOME/.movement/pid" + +# Starts avalanchego with the specified network ID and subnet ID +function start_avalanchego() { + network_id="$1" + subnet_id="$2" + avalanchego --network-id="$network_id" --track-subnets "$subnet_id" & + echo $! >> "$PID_DIR/avalanchego.pid" +} + +# Starts the avalanche-network-runner server +function start_avalanche_network_runner() { + avalanche-network-runner server --log-level debug & + echo $! >> "$PID_DIR/avalanche_network_runner.pid" +} + +# Starts the subnet-request-proxy Node.js server +function start_subnet_proxy() { + cd "$HOME/.movement/subnet-request-proxy" + npm i + node app.js & + echo $! >> "$PID_DIR/subnet_proxy.pid" +} + +# Stops a process based on the provided PID file +function stop_process() { + local process_name="$1" + local pid_file="$PID_DIR/$process_name.pid" + + if [ -f "$pid_file" ]; then + while read -r pid; do + kill "$pid" || true + done < "$pid_file" + rm "$pid_file" + else + echo "No $process_name process found." + fi +} + +# Handle the start command +function start() { + case $1 in + fuji) + start_avalanchego "fuji" "qCP4kDnEWVorqyoUmcAtAmJybm8gXZzhHZ7pZibrJJEWECooU" + ;; + local) + start_avalanche_network_runner + ;; + subnet-proxy) + start_subnet_proxy + ;; + *) + echo "Invalid start command. Usage: movementctl start [fuji/local/subnet-proxy]" + exit 1 + ;; + esac +} + +# Handle the stop command +function stop() { + case $1 in + fuji) + stop_process "avalanchego" + ;; + local) + stop_process "avalanche_network_runner" + ;; + subnet-proxy) + stop_process "subnet_proxy" + ;; + *) + echo "Invalid stop command. Usage: movementctl stop [fuji/local/subnet-proxy]" + exit 1 + ;; + esac +} + +# Handle the provided command +case $1 in + start) + start "${@:2}" + ;; + stop) + stop "${@:2}" + ;; + *) + echo "Invalid command. Usage: movementctl [start/stop]" + exit 1 + ;; +esac \ No newline at end of file From 21fe19705a3e147909a14eee5e3c97e68dabd1a1 Mon Sep 17 00:00:00 2001 From: Liam Monninger Date: Thu, 27 Jul 2023 12:53:44 -0700 Subject: [PATCH 13/13] release. --- .github/workflows/release.yml | 10 +++++----- README.md | 4 +++- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a06c52df..a152e062 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,7 +37,7 @@ jobs: - name: Build binaries run: | cd "$GITHUB_WORKSPACE/m1" - cargo build --release + RUSTFLAGS="--cfg tokio_unstable" cargo build --release - name: Archive binaries uses: actions/upload-artifact@v2 @@ -55,7 +55,7 @@ jobs: docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap - cargo build --release --target=x86_64-apple-darwin + RUSTFLAGS="--cfg tokio_unstable" cargo build --release --target=x86_64-apple-darwin - name: Archive macOS binaries uses: actions/upload-artifact@v2 @@ -70,7 +70,7 @@ jobs: docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap - cargo build --release --target=aarch64-unknown-linux-gnu + RUSTFLAGS="--cfg tokio_unstable" cargo build --release --target=aarch64-unknown-linux-gnu - name: Archive Linux arm64 binaries uses: actions/upload-artifact@v2 @@ -85,7 +85,7 @@ jobs: docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap - cargo build --release --target=x86_64-unknown-linux-gnu + RUSTFLAGS="--cfg tokio_unstable" cargo build --release --target=x86_64-unknown-linux-gnu - name: Archive Linux x86_64 binaries uses: actions/upload-artifact@v2 @@ -100,7 +100,7 @@ jobs: docker buildx create --name mybuilder docker buildx use mybuilder docker buildx inspect --bootstrap - cargo build --release --target=x86_64-pc-windows-gnu + RUSTFLAGS="--cfg tokio_unstable" cargo build --release --target=x86_64-pc-windows-gnu - name: Archive Windows binaries uses: actions/upload-artifact@v2 diff --git a/README.md b/README.md index 051f5c63..0571fbf2 100644 --- a/README.md +++ b/README.md @@ -41,10 +41,12 @@ This repository contains the code and contributor documentation for M1. If you w ## Features Currently, M1 consists of... -- A testnet with bootstrap nodes at `https://seed1-node.movementlabs.xyz` +- A testnet with bootstrap nodes at [https://seed1-node.movementlabs.xyz](https://seed1-node.movementlabs.xyz). - An Aptos-compatible cient called `movement`. - A fork of Aptos framework. +M1 also has its own DEX, with a web client currently available at [https://movemnt-dex-client.vercel.app/](https://movemnt-dex-client.vercel.app/). + ## Installation See [docs.movementlabs.xyx](docs.movementlabs.xyz) for a more complete installation guide. We recommend working with our Docker containers or using our installer.