diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bf7f2ad72a5df..dead3d91af38f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -127,7 +127,7 @@ jobs: echo "PG_EXAMPLE_DATABASE_URL=postgres://postgres:root@localhost/diesel_example" >> $GITHUB_ENV - name: Install postgres (MacOS arm64) - if: ${{ matrix.os == 'macos-latest-xlarge' && env.gcloud_archive_exist == '' }} + if: ${{ matrix.os == 'macos-latest-xlarge' && env.s3_archive_exist == '' }} shell: bash env: PQ_LIB_DIR: "$(brew --prefix libpq)/lib" @@ -138,7 +138,7 @@ jobs: brew install postgresql - name: Remove unused apps (MacOS arm64) - if: ${{ matrix.os == 'macos-latest-xlarge' && env.gcloud_archive_exist == '' }} + if: ${{ matrix.os == 'macos-latest-xlarge' && env.s3_archive_exist == '' }} continue-on-error: true shell: bash run: | diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index bedf945dab32a..0f9a0679bb68d 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -156,6 +156,8 @@ jobs: with: ref: ${{ github.event.inputs.sui_repo_ref || github.ref }} - uses: taiki-e/install-action@nextest + - name: Add postgres to PATH + run: echo "/usr/lib/postgresql/14/bin" >> $GITHUB_PATH - name: Set Swap Space uses: pierotofy/set-swap-space@master with: @@ -179,6 +181,14 @@ jobs: - name: sui-execution run: | ./scripts/execution_layer.py generate-lib + - name: Install diesel CLI, and cache the binary + uses: baptiste0928/cargo-install@1cd874a5478fdca35d868ccc74640c5aabbb8f1b # pin@v3.0.0 + with: + crate: diesel_cli + locked: true + - name: Indexer schema + run: | + ./scripts/generate_indexer_schema.sh # Ensure there are no uncommitted changes in the repo after running tests - run: scripts/changed-files.sh shell: bash @@ -227,6 +237,8 @@ jobs: with: ref: ${{ github.event.inputs.sui_repo_ref || github.ref }} - uses: taiki-e/install-action@nextest + - name: Add postgres to PATH + run: echo "/usr/lib/postgresql/14/bin" >> $GITHUB_PATH - name: Set Swap Space uses: pierotofy/set-swap-space@master with: @@ -401,23 +413,3 @@ jobs: run: ./scripts/execution_layer.py cut for_ci_test - name: Check execution builds run: cargo build -p sui-execution - - graphql-rpc: - name: graphql-rpc - needs: diff - if: needs.diff.outputs.isRust == 'true' - timeout-minutes: 45 - runs-on: [ ubuntu-ghcloud ] - steps: - - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # Pin v4.1.1 - with: - ref: ${{ github.event.inputs.sui_repo_ref || github.ref }} - - uses: taiki-e/install-action@nextest - - name: Add postgres to PATH - run: echo "/usr/lib/postgresql/14/bin" >> $GITHUB_PATH - - name: tests-requiring-postgres - run: | - - # The tests in these packages have been converted to use a temporary, ephemoral postgres database and so can be run in parallel - cargo nextest run --profile ci --package sui-indexer --package sui-graphql-e2e-tests --package sui-cluster-test --package sui-graphql-rpc --features pg_integration - diff --git a/Cargo.lock b/Cargo.lock index 85d22bac7e4f0..6da0322745617 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -323,18 +323,6 @@ dependencies = [ "serde", ] -[[package]] -name = "ark-bls12-381" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c775f0d12169cba7aae4caeb547bb6a50781c7449a8aa53793827c9ec4abf488" -dependencies = [ - "ark-ec", - "ark-ff", - "ark-serialize", - "ark-std", -] - [[package]] name = "ark-bn254" version = "0.4.0" @@ -1858,6 +1846,19 @@ dependencies = [ "syn 1.0.107", ] +[[package]] +name = "bigdecimal" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +dependencies = [ + "autocfg", + "libm", + "num-bigint 0.4.4", + "num-integer", + "num-traits", +] + [[package]] name = "bimap" version = "0.6.3" @@ -2501,7 +2502,7 @@ dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.0", + "strsim 0.11.1", "terminal_size", ] @@ -2714,7 +2715,7 @@ dependencies = [ "hyper 1.4.1", "hyper-rustls 0.27.2", "hyper-util", - "itertools 0.10.5", + "itertools 0.13.0", "mockall", "mysten-common", "mysten-metrics", @@ -3656,8 +3657,6 @@ dependencies = [ "chrono", "diesel_derives", "itoa", - "pq-sys", - "r2d2", "serde_json", ] @@ -4166,9 +4165,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.3" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" +checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" dependencies = [ "anstream", "anstyle", @@ -4623,7 +4622,7 @@ dependencies = [ [[package]] name = "fastcrypto" version = "0.1.8" -source = "git+https://github.com/MystenLabs/fastcrypto?rev=5f2c63266a065996d53f98156f0412782b468597#5f2c63266a065996d53f98156f0412782b468597" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=3366c26a746b72707572c4f3f05915e20d5c16c2#3366c26a746b72707572c4f3f05915e20d5c16c2" dependencies = [ "aes", "aes-gcm", @@ -4677,7 +4676,7 @@ dependencies = [ [[package]] name = "fastcrypto-derive" version = "0.1.3" -source = "git+https://github.com/MystenLabs/fastcrypto?rev=5f2c63266a065996d53f98156f0412782b468597#5f2c63266a065996d53f98156f0412782b468597" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=3366c26a746b72707572c4f3f05915e20d5c16c2#3366c26a746b72707572c4f3f05915e20d5c16c2" dependencies = [ "quote 1.0.35", "syn 1.0.107", @@ -4686,7 +4685,7 @@ dependencies = [ [[package]] name = "fastcrypto-tbls" version = "0.1.0" -source = "git+https://github.com/MystenLabs/fastcrypto?rev=5f2c63266a065996d53f98156f0412782b468597#5f2c63266a065996d53f98156f0412782b468597" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=3366c26a746b72707572c4f3f05915e20d5c16c2#3366c26a746b72707572c4f3f05915e20d5c16c2" dependencies = [ "bcs", "digest 0.10.7", @@ -4705,7 +4704,7 @@ dependencies = [ [[package]] name = "fastcrypto-vdf" version = "0.1.0" -source = "git+https://github.com/MystenLabs/fastcrypto?rev=5f2c63266a065996d53f98156f0412782b468597#5f2c63266a065996d53f98156f0412782b468597" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=3366c26a746b72707572c4f3f05915e20d5c16c2#3366c26a746b72707572c4f3f05915e20d5c16c2" dependencies = [ "bcs", "fastcrypto", @@ -4722,9 +4721,8 @@ dependencies = [ [[package]] name = "fastcrypto-zkp" version = "0.1.3" -source = "git+https://github.com/MystenLabs/fastcrypto?rev=5f2c63266a065996d53f98156f0412782b468597#5f2c63266a065996d53f98156f0412782b468597" +source = "git+https://github.com/MystenLabs/fastcrypto?rev=3366c26a746b72707572c4f3f05915e20d5c16c2#3366c26a746b72707572c4f3f05915e20d5c16c2" dependencies = [ - "ark-bls12-381", "ark-bn254", "ark-ec", "ark-ff", @@ -4732,7 +4730,7 @@ dependencies = [ "ark-relations", "ark-serialize", "ark-snark", - "blst", + "bcs", "byte-slice-cast", "derive_more", "fastcrypto", @@ -6694,9 +6692,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" dependencies = [ "serde", ] @@ -7129,6 +7127,7 @@ name = "move-bytecode-utils" version = "0.1.0" dependencies = [ "anyhow", + "indexmap 2.2.6", "move-binary-format", "move-core-types", "petgraph 0.5.1", @@ -7925,9 +7924,18 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" name = "mysten-common" version = "0.1.0" dependencies = [ + "anyhow", + "fastcrypto", "futures", + "mysten-metrics", "parking_lot 0.12.1", + "prometheus", + "reqwest 0.12.5", + "snap", + "sui-tls", + "sui-types", "tokio", + "tracing", ] [[package]] @@ -8200,7 +8208,7 @@ dependencies = [ "futures", "governor", "indexmap 2.2.6", - "itertools 0.10.5", + "itertools 0.13.0", "mockall", "mysten-common", "mysten-metrics", @@ -8264,7 +8272,7 @@ dependencies = [ "fastcrypto", "fdlimit", "indexmap 2.2.6", - "itertools 0.10.5", + "itertools 0.13.0", "mysten-metrics", "mysten-network", "narwhal-config", @@ -8346,7 +8354,7 @@ dependencies = [ "fastcrypto", "futures", "governor", - "itertools 0.10.5", + "itertools 0.13.0", "mysten-metrics", "mysten-network", "narwhal-config", @@ -8625,6 +8633,7 @@ dependencies = [ "num-integer", "num-traits", "rand 0.8.5", + "serde", ] [[package]] @@ -9805,15 +9814,6 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" -[[package]] -name = "pq-sys" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b845d6d8ec554f972a2c5298aad68953fd64e7441e846075450b44656a016d1" -dependencies = [ - "vcpkg", -] - [[package]] name = "precomputed-hash" version = "0.1.1" @@ -10274,9 +10274,9 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.6" +version = "0.11.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba92fb39ec7ad06ca2582c0ca834dfeadcaf06ddfc8e635c80aa7e1c05315fdd" +checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6" dependencies = [ "bytes", "rand 0.8.5", @@ -10319,17 +10319,6 @@ dependencies = [ "proc-macro2 1.0.78", ] -[[package]] -name = "r2d2" -version = "0.8.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" -dependencies = [ - "log", - "parking_lot 0.12.1", - "scheduled-thread-pool", -] - [[package]] name = "radium" version = "0.6.2" @@ -11411,15 +11400,6 @@ dependencies = [ "windows-sys 0.42.0", ] -[[package]] -name = "scheduled-thread-pool" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "977a7519bff143a44f842fd07e80ad1329295bd71686457f18e496736f4bf9bf" -dependencies = [ - "parking_lot 0.12.1", -] - [[package]] name = "schemars" version = "0.8.21" @@ -11620,6 +11600,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-env" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d13536c0c431652192b75c7d5afa83dedae98f91d7e687ff30a009e9d15284fb" +dependencies = [ + "anyhow", + "serde", +] + [[package]] name = "serde-name" version = "0.2.1" @@ -12366,9 +12356,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strsim" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" @@ -12822,7 +12812,7 @@ dependencies = [ "futures", "hdrhistogram", "indicatif", - "itertools 0.10.5", + "itertools 0.13.0", "move-core-types", "mysten-metrics", "prometheus", @@ -12879,6 +12869,7 @@ dependencies = [ "lru 0.10.0", "maplit", "move-core-types", + "mysten-common", "mysten-metrics", "num_enum 0.6.1", "once_cell", @@ -12889,7 +12880,6 @@ dependencies = [ "serde_json", "serde_with 3.9.0", "shared-crypto", - "snap", "sui-authority-aggregation", "sui-config", "sui-json-rpc-api", @@ -12897,7 +12887,6 @@ dependencies = [ "sui-keys", "sui-sdk 1.34.0", "sui-test-transaction-builder", - "sui-tls", "sui-types", "tap", "telemetry-subscribers", @@ -12965,6 +12954,7 @@ dependencies = [ "sui-types", "tap", "telemetry-subscribers", + "tempfile", "test-cluster", "tokio", "tracing", @@ -13069,7 +13059,7 @@ dependencies = [ "futures", "im", "indexmap 2.2.6", - "itertools 0.10.5", + "itertools 0.13.0", "jsonrpsee", "lru 0.10.0", "mockall", @@ -13223,6 +13213,41 @@ dependencies = [ "url", ] +[[package]] +name = "sui-deepbook-indexer" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "backoff", + "bcs", + "bigdecimal", + "bin-version", + "clap", + "diesel", + "diesel-async", + "futures", + "hex-literal 0.3.4", + "mysten-metrics", + "prometheus", + "serde", + "serde_yaml 0.8.26", + "sui-bridge", + "sui-config", + "sui-data-ingestion-core", + "sui-indexer-builder", + "sui-json-rpc-types", + "sui-sdk 1.34.0", + "sui-test-transaction-builder", + "sui-types", + "tap", + "telemetry-subscribers", + "tempfile", + "test-cluster", + "tokio", + "tracing", +] + [[package]] name = "sui-e2e-tests" version = "1.34.0" @@ -13510,7 +13535,7 @@ dependencies = [ "hyper 1.4.1", "im", "insta", - "itertools 0.10.5", + "itertools 0.13.0", "lru 0.10.0", "move-binary-format", "move-bytecode-utils", @@ -13602,7 +13627,7 @@ dependencies = [ "fastcrypto", "futures", "hex", - "itertools 0.10.5", + "itertools 0.13.0", "jsonrpsee", "move-binary-format", "move-bytecode-utils", @@ -13701,7 +13726,7 @@ dependencies = [ "http-body 0.4.5", "hyper 1.4.1", "indexmap 2.2.6", - "itertools 0.10.5", + "itertools 0.13.0", "jsonrpsee", "mockall", "move-binary-format", @@ -13804,7 +13829,7 @@ dependencies = [ "colored", "enum_dispatch", "fastcrypto", - "itertools 0.10.5", + "itertools 0.13.0", "json_to_table", "move-binary-format", "move-bytecode-utils", @@ -13852,19 +13877,23 @@ dependencies = [ "bcs", "bytes", "clap", + "env_logger", + "log", "move-binary-format", "move-core-types", + "object_store", + "reqwest 0.12.5", "serde", "serde_json", "serde_yaml 0.8.26", "sui-config", - "sui-json", "sui-json-rpc-types", "sui-package-resolver", "sui-rest-api", "sui-sdk 1.34.0", "sui-types", "tokio", + "url", ] [[package]] @@ -14121,7 +14150,6 @@ dependencies = [ "prometheus", "reqwest 0.12.5", "serde", - "snap", "sui-archival", "sui-config", "sui-core", @@ -14175,7 +14203,7 @@ name = "sui-open-rpc-macros" version = "0.1.0" dependencies = [ "derive-syn-parse", - "itertools 0.10.5", + "itertools 0.13.0", "proc-macro2 1.0.78", "quote 1.0.35", "syn 1.0.107", @@ -14288,6 +14316,7 @@ dependencies = [ "move-vm-config", "schemars", "serde", + "serde-env", "serde_with 3.9.0", "sui-protocol-config-macros", "tracing", @@ -14315,10 +14344,11 @@ dependencies = [ "clap", "const-str", "fastcrypto", + "futures", "hex", "hyper 1.4.1", "ipnetwork", - "itertools 0.10.5", + "itertools 0.13.0", "mime", "multiaddr", "mysten-metrics", @@ -14343,6 +14373,7 @@ dependencies = [ "tower", "tower-http", "tracing", + "url", ] [[package]] @@ -14403,7 +14434,7 @@ dependencies = [ "bcs", "diffy", "fastcrypto", - "itertools 0.10.5", + "itertools 0.13.0", "mime", "mysten-network", "openapiv3", @@ -14438,6 +14469,7 @@ dependencies = [ "fastcrypto", "futures", "hyper 1.4.1", + "move-cli", "move-core-types", "mysten-metrics", "once_cell", @@ -14476,7 +14508,7 @@ dependencies = [ "dashmap", "dirs 4.0.0", "futures", - "itertools 0.10.5", + "itertools 0.13.0", "serde", "serde_json", "shared-crypto", @@ -14752,7 +14784,7 @@ dependencies = [ "hyper-rustls 0.27.2", "indicatif", "integer-encoding", - "itertools 0.10.5", + "itertools 0.13.0", "lru 0.10.0", "moka", "move-binary-format", @@ -14933,7 +14965,7 @@ dependencies = [ "futures", "hex", "indicatif", - "itertools 0.10.5", + "itertools 0.13.0", "move-core-types", "num_cpus", "object_store", @@ -15063,7 +15095,7 @@ dependencies = [ "fastcrypto-zkp", "im", "indexmap 2.2.6", - "itertools 0.10.5", + "itertools 0.13.0", "jsonrpsee", "lru 0.10.0", "move-binary-format", @@ -15246,8 +15278,10 @@ dependencies = [ "dirs 4.0.0", "docker-api", "field_names", + "futures", "include_dir", "inquire", + "itertools 0.13.0", "once_cell", "open", "prettytable-rs", @@ -15260,6 +15294,7 @@ dependencies = [ "serde_yaml 0.8.26", "sha2 0.10.8", "spinners", + "strsim 0.11.1", "strum 0.24.1", "tabled", "tempfile", @@ -16544,7 +16579,7 @@ dependencies = [ "eyre", "fdlimit", "hdrhistogram", - "itertools 0.10.5", + "itertools 0.13.0", "msim", "once_cell", "ouroboros 0.17.2", @@ -16572,7 +16607,7 @@ dependencies = [ name = "typed-store-derive" version = "0.3.0" dependencies = [ - "itertools 0.10.5", + "itertools 0.13.0", "proc-macro2 1.0.78", "quote 1.0.35", "syn 1.0.107", diff --git a/Cargo.toml b/Cargo.toml index 463dc0a36e668..52630588b8668 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -99,6 +99,7 @@ members = [ "crates/sui-cost", "crates/sui-data-ingestion", "crates/sui-data-ingestion-core", + "crates/sui-deepbook-indexer", "crates/sui-e2e-tests", "crates/sui-enum-compat-util", "crates/sui-faucet", @@ -238,7 +239,10 @@ overflow-checks = true opt-level = 1 [workspace.lints.rust] -unexpected_cfgs = { level = "warn", check-cfg = ['cfg(msim)', 'cfg(fail_points)'] } +unexpected_cfgs = { level = "warn", check-cfg = [ + 'cfg(msim)', + 'cfg(fail_points)', +] } # Dependencies that should be kept in sync through the whole workspace [workspace.dependencies] @@ -347,7 +351,12 @@ http-body = "1" humantime = "2.1.0" hyper = "1" hyper-util = "0.1.6" -hyper-rustls = { version = "0.27", default-features = false, features = ["webpki-roots", "http2", "ring", "tls12"] } +hyper-rustls = { version = "0.27", default-features = false, features = [ + "webpki-roots", + "http2", + "ring", + "tls12", +] } im = "15" impl-trait-for-tuples = "0.2.0" indexmap = { version = "2.1.0", features = ["serde"] } @@ -356,7 +365,7 @@ inquire = "0.6.0" insta = { version = "1.21.1", features = ["redactions", "yaml", "json"] } integer-encoding = "3.0.1" ipnetwork = "0.20.0" -itertools = "0.10.5" +itertools = "0.13.0" jemalloc-ctl = "^0.5" jsonrpsee = { git = "https://github.com/wlmyng/jsonrpsee.git", rev = "b1b300784795f6a64d0fcdf8f03081a9bc38bde8", features = [ "server", @@ -408,7 +417,7 @@ proptest-derive = "0.3.0" prost = "0.13" prost-build = "0.13" protobuf = { version = "2.28", features = ["with-bytes"] } -quinn-proto = "0.11.6" +quinn-proto = "0.11.7" quote = "1.0.23" rand = "0.8.5" rayon = "1.5.3" @@ -431,7 +440,11 @@ rusoto_kms = { version = "0.48.0", default-features = false, features = [ russh = "0.38.0" russh-keys = "0.38.0" rust-version = "1.56.1" -rustls = { version = "0.23", default-features = false, features = ["std", "tls12", "ring"] } +rustls = { version = "0.23", default-features = false, features = [ + "std", + "tls12", + "ring", +] } rustls-pemfile = "2" rustversion = "1.0.9" rustyline = "9.1.2" @@ -439,6 +452,7 @@ rustyline-derive = "0.7.0" schemars = { version = "0.8.21", features = ["either"] } scopeguard = "1.1" serde = { version = "1.0.144", features = ["derive", "rc"] } +serde-env = "0.2.0" serde-name = "0.2.1" serde-reflection = "0.3.6" serde_json = { version = "1.0.95", features = ["preserve_order"] } @@ -472,7 +486,10 @@ thiserror = "1.0.40" tiny-bip39 = "1.0.0" tokio = "1.36.0" tokio-retry = "0.3" -tokio-rustls = { version = "0.26", default-features = false, features = ["tls12", "ring"] } +tokio-rustls = { version = "0.26", default-features = false, features = [ + "tls12", + "ring", +] } tokio-stream = { version = "0.1.14", features = ["sync", "net"] } tokio-util = "0.7.10" toml = { version = "0.7.4", features = ["preserve_order"] } @@ -555,10 +572,12 @@ move-abstract-interpreter = { path = "external-crates/move/crates/move-abstract- move-abstract-stack = { path = "external-crates/move/crates/move-abstract-stack" } move-analyzer = { path = "external-crates/move/crates/move-analyzer" } -fastcrypto = { git = "https://github.com/MystenLabs/fastcrypto", rev = "5f2c63266a065996d53f98156f0412782b468597" } -fastcrypto-tbls = { git = "https://github.com/MystenLabs/fastcrypto", rev = "5f2c63266a065996d53f98156f0412782b468597" } -fastcrypto-zkp = { git = "https://github.com/MystenLabs/fastcrypto", rev = "5f2c63266a065996d53f98156f0412782b468597", package = "fastcrypto-zkp" } -fastcrypto-vdf = { git = "https://github.com/MystenLabs/fastcrypto", rev = "5f2c63266a065996d53f98156f0412782b468597", features = ["experimental"] } +fastcrypto = { git = "https://github.com/MystenLabs/fastcrypto", rev = "3366c26a746b72707572c4f3f05915e20d5c16c2" } +fastcrypto-tbls = { git = "https://github.com/MystenLabs/fastcrypto", rev = "3366c26a746b72707572c4f3f05915e20d5c16c2" } +fastcrypto-zkp = { git = "https://github.com/MystenLabs/fastcrypto", rev = "3366c26a746b72707572c4f3f05915e20d5c16c2", package = "fastcrypto-zkp" } +fastcrypto-vdf = { git = "https://github.com/MystenLabs/fastcrypto", rev = "3366c26a746b72707572c4f3f05915e20d5c16c2", features = [ + "experimental", +] } passkey-types = { version = "0.2.0" } passkey-client = { version = "0.2.0" } passkey-authenticator = { version = "0.2.0" } @@ -572,7 +591,11 @@ anemo-cli = { git = "https://github.com/mystenlabs/anemo.git", rev = "e609f7697e anemo-tower = { git = "https://github.com/mystenlabs/anemo.git", rev = "e609f7697ed6169bf0760882a0b6c032a57e4f3b" } # core-types with json format for REST api -sui-sdk2 = { package = "sui-sdk", git = "https://github.com/mystenlabs/sui-rust-sdk.git", rev = "bd233b6879b917fb95e17f21927c198e7a60c924", features = ["hash", "serde", "schemars"] } +sui-sdk2 = { package = "sui-sdk", git = "https://github.com/mystenlabs/sui-rust-sdk.git", rev = "bd233b6879b917fb95e17f21927c198e7a60c924", features = [ + "hash", + "serde", + "schemars", +] } ### Workspace Members ### anemo-benchmark = { path = "crates/anemo-benchmark" } diff --git a/bridge/runbook/validator_runbook.md b/bridge/runbook/validator_runbook.md index e8723b838694d..c6af260b6c2a8 100644 --- a/bridge/runbook/validator_runbook.md +++ b/bridge/runbook/validator_runbook.md @@ -48,3 +48,94 @@ To double check your registered the correct metadata onchain, run ``` sui-bridge-cli view-bridge-registration --sui-rpc-url {SUI_FULLNODE_URL} ``` + +## Bridge Node + +### Bridge Node Hardware Requirements + +Suggested hardware requirements: +* CPU: 6 physical cores +* Memory: 16GB +* Storage: 200GB +* Network: 100Mbps + +### WAF Protection for Bridge Node + +In order to protect against DDOS and attacks intended to expend validator resources, rate limit protection of the bridge server is required. +In addition to protection, this will give node operators fine-grained control over the rate of requests the receive, and observability into those requests. + +The currently recommended rate-limit is `50 requests/second per unique IP`. + +#### WAF Options + +You can use a managed cloud service, for example: +* [Cloudflare WAF](https://www.cloudflare.com/en-ca/application-services/products/waf/) +* [AWS WAF](https://aws.amazon.com/waf/) +* [GCP Cloud Armor](https://cloud.google.com/security/products/armor) + +It's also possible to use an open source load balancer such as haproxy for a simple, ip-based rate limit. +An example, shortened HAProxy config for this looks like: +``` +frontend http-in + bind *:80 + # Define an ACL to count requests per IP and block if over limit + acl too_many_requests src_http_req_rate() gt 50 + # Track the request rate per IP + stick-table type ip size 1m expire 1m store http_req_rate(1s) + # Check request rate and deny if the limit is exceeded + http-request track-sc0 src + http-request deny if too_many_requests + + default_backend bridgevalidator + +backend bridgevalidator + server bridgevalidator 0.0.0.0:9191 +``` + +If choosing to use an open source load-balancing option, make sure to set up metrics collection and alerting on the service. + +### Bridge Node Config +Use `sui-bridge-cli` command to create a template. If you want to run `BridgeClient` (see the following section), pass `--run-client` as a parameter. + +``` +sui-bridge-cli create-bridge-node-config-template {PATH} +sui-bridge-cli create-bridge-node-config-template --run-client {PATH} +``` + +In the generated config: +* `server-listen-port` : the port that Bridge Node listens to handle requests +* `metrics-port`: port to export prometheus metrics +* `bridge-authority-key-path` is the path to the Bridge Validator key, generated from `sui-bridge-cli create-bridge-validator-key` from above command. +* `run-client`: if Bridge Client should be enabled in Bridge Node (more instructions for this below) +* `approved-governance-actions` : a list of governance actions that you want to support. +* `sui:sui-rpc-url`: Sui RPC URL +* `sui:sui-bridge-chain-id`: 0 for Sui Mainnet, 1 for Sui Testnet +* `eth:eth-rpc-url`: Ethereum RPC URL +* `eth:eth-bridge-proxy-address`: The proxy address for Bridge Solidity contracts on Ethereum. +* `eth:eth-bridge-chain-id`: 10 for Ethereum Mainnet, 11 for Sepolia Testnet +* `eth:eth-contracts-start-block-fallback`: The starting block BridgeNodes queries for from Ethereum FullNode. This number should be the block where Solidity contracts are deployed or slightly before. +* `metrics:push-url`: The url of the remote Sui metrics pipeline: `https://metrics-proxy.[testnet|mainnet].sui.io:8443/publish/metrics` + +With `run-client: true`, these additional fields can be found in the generated config: +* `db-path`: path of BridgeClient DB, for BridgeClient +* `sui:bridge-client-key-path`: the file path of Bridge Client key. This key can be generated with `sui-bridge-cli create-bridge-client-key` as shown above. When `run-client` is true but `sui:bridge-client-key-path` not provided, it defaults to use Bridge Validator key to submit transactions on Sui. However this is not recommended for the sake of key separation. + +### Bridge Client +`BridgeClient` orchestrates bridge transfer requests. +* It is **optional** to run for a `BridgeNode`. +* `BridgeClient` submits transaction on Sui Network. Thus when it's enabled, a Sui Account Key with enough SUI balance is needed. + +To enable `bridge_client` feature on a `BridgeNode`, set the following parameters in `BridgeNodeConfig`: +```yaml +run-client: true +db-path: +sui: + bridge-client-key-path: # optional, when absent, use bridge-authority-key-path as the keypair for BridgeClient +``` + + +To create a `BridgeClient` keypair, run +``` +sui-bridge-cli create-bridge-client-key +``` +This prints the newly created Sui Address. Then we need to fund this address with some SUI for operations. diff --git a/consensus/config/src/parameters.rs b/consensus/config/src/parameters.rs index 6df7da3cb21e0..b2fa514470602 100644 --- a/consensus/config/src/parameters.rs +++ b/consensus/config/src/parameters.rs @@ -38,6 +38,26 @@ pub struct Parameters { #[serde(default = "Parameters::default_max_blocks_per_fetch")] pub max_blocks_per_fetch: usize, + /// Time to wait during node start up until the node has synced the last proposed block via the + /// network peers. When set to `0` the sync mechanism is disabled. This property is meant to be + /// used for amnesia recovery. + #[serde(default = "Parameters::default_sync_last_known_own_block_timeout")] + pub sync_last_known_own_block_timeout: Duration, + + /// Interval in milliseconds to probe highest received rounds of peers. + #[serde(default = "Parameters::default_round_prober_interval_ms")] + pub round_prober_interval_ms: u64, + + /// Timeout in milliseconds for a round prober request. + #[serde(default = "Parameters::default_round_prober_request_timeout_ms")] + pub round_prober_request_timeout_ms: u64, + + /// Proposing new block is stopped when the propagation delay is greater than this threshold. + /// Propagation delay is the difference between the round of the last proposed block and the + /// the highest round from this authority that is received by all validators in a quorum. + #[serde(default = "Parameters::default_propagation_delay_stop_proposal_threshold")] + pub propagation_delay_stop_proposal_threshold: u32, + /// The number of rounds of blocks to be kept in the Dag state cache per authority. The larger /// the number the more the blocks that will be kept in memory allowing minimising any potential /// disk access. @@ -69,12 +89,6 @@ pub struct Parameters { /// Tonic network settings. #[serde(default = "TonicParameters::default")] pub tonic: TonicParameters, - - /// Time to wait during node start up until the node has synced the last proposed block via the - /// network peers. When set to `0` the sync mechanism is disabled. This property is meant to be - /// used for amnesia recovery. - #[serde(default = "Parameters::default_sync_last_known_own_block_timeout")] - pub sync_last_known_own_block_timeout: Duration, } impl Parameters { @@ -97,6 +111,38 @@ impl Parameters { Duration::from_millis(500) } + pub(crate) fn default_max_blocks_per_fetch() -> usize { + if cfg!(msim) { + // Exercise hitting blocks per fetch limit. + 10 + } else { + 1000 + } + } + + pub(crate) fn default_sync_last_known_own_block_timeout() -> Duration { + if cfg!(msim) { + Duration::from_millis(500) + } else { + // Here we prioritise liveness over the complete de-risking of block equivocation. 5 seconds + // in the majority of cases should be good enough for this given a healthy network. + Duration::from_secs(5) + } + } + + pub(crate) fn default_round_prober_interval_ms() -> u64 { + 5000 + } + + pub(crate) fn default_round_prober_request_timeout_ms() -> u64 { + 2000 + } + + pub(crate) fn default_propagation_delay_stop_proposal_threshold() -> u32 { + // Propagation delay is usually 0 round in production. + 20 + } + pub(crate) fn default_dag_state_cached_rounds() -> u32 { if cfg!(msim) { // Exercise reading blocks from store. @@ -119,30 +165,11 @@ impl Parameters { } } - pub(crate) fn default_max_blocks_per_fetch() -> usize { - if cfg!(msim) { - // Exercise hitting blocks per fetch limit. - 10 - } else { - 1000 - } - } - pub(crate) fn default_commit_sync_batches_ahead() -> usize { // This is set to be a multiple of default commit_sync_parallel_fetches to allow fetching ahead, // while keeping the total number of inflight fetches and unprocessed fetched commits limited. 80 } - - pub(crate) fn default_sync_last_known_own_block_timeout() -> Duration { - if cfg!(msim) { - Duration::from_millis(500) - } else { - // Here we prioritise liveness over the complete de-risking of block equivocation. 5 seconds - // in the majority of cases should be good enough for this given a healthy network. - Duration::from_secs(5) - } - } } impl Default for Parameters { @@ -152,10 +179,14 @@ impl Default for Parameters { leader_timeout: Parameters::default_leader_timeout(), min_round_delay: Parameters::default_min_round_delay(), max_forward_time_drift: Parameters::default_max_forward_time_drift(), - dag_state_cached_rounds: Parameters::default_dag_state_cached_rounds(), max_blocks_per_fetch: Parameters::default_max_blocks_per_fetch(), sync_last_known_own_block_timeout: Parameters::default_sync_last_known_own_block_timeout(), + round_prober_interval_ms: Parameters::default_round_prober_interval_ms(), + round_prober_request_timeout_ms: Parameters::default_round_prober_request_timeout_ms(), + propagation_delay_stop_proposal_threshold: + Parameters::default_propagation_delay_stop_proposal_threshold(), + dag_state_cached_rounds: Parameters::default_dag_state_cached_rounds(), commit_sync_parallel_fetches: Parameters::default_commit_sync_parallel_fetches(), commit_sync_batch_size: Parameters::default_commit_sync_batch_size(), commit_sync_batches_ahead: Parameters::default_commit_sync_batches_ahead(), diff --git a/consensus/config/tests/snapshots/parameters_test__parameters.snap b/consensus/config/tests/snapshots/parameters_test__parameters.snap index 60ed151ba6303..5d43553d982de 100644 --- a/consensus/config/tests/snapshots/parameters_test__parameters.snap +++ b/consensus/config/tests/snapshots/parameters_test__parameters.snap @@ -12,6 +12,12 @@ max_forward_time_drift: secs: 0 nanos: 500000000 max_blocks_per_fetch: 1000 +sync_last_known_own_block_timeout: + secs: 5 + nanos: 0 +round_prober_interval_ms: 5000 +round_prober_request_timeout_ms: 2000 +propagation_delay_stop_proposal_threshold: 20 dag_state_cached_rounds: 500 commit_sync_parallel_fetches: 20 commit_sync_batch_size: 100 @@ -25,6 +31,3 @@ tonic: connection_buffer_size: 33554432 excessive_message_size: 16777216 message_size_limit: 67108864 -sync_last_known_own_block_timeout: - secs: 5 - nanos: 0 diff --git a/consensus/core/build.rs b/consensus/core/build.rs index 4ea4252ac3d3a..27ba97349f00d 100644 --- a/consensus/core/build.rs +++ b/consensus/core/build.rs @@ -75,6 +75,15 @@ fn build_tonic_services(out_dir: &Path) { .server_streaming() .build(), ) + .method( + tonic_build::manual::Method::builder() + .name("get_latest_rounds") + .route_name("GetLatestRounds") + .input_type("crate::network::tonic_network::GetLatestRoundsRequest") + .output_type("crate::network::tonic_network::GetLatestRoundsResponse") + .codec_path(codec_path) + .build(), + ) .build(); tonic_build::manual::Builder::new() @@ -128,6 +137,15 @@ fn build_anemo_services(out_dir: &Path) { .codec_path(codec_path) .build(), ) + .method( + anemo_build::manual::Method::builder() + .name("get_latest_rounds") + .route_name("GetLatestRounds") + .request_type("crate::network::anemo_network::GetLatestRoundsRequest") + .response_type("crate::network::anemo_network::GetLatestRoundsResponse") + .codec_path(codec_path) + .build(), + ) .build(); anemo_build::manual::Builder::new() diff --git a/consensus/core/src/authority_node.rs b/consensus/core/src/authority_node.rs index f896dc1a0bd89..006d65f2e0cb4 100644 --- a/consensus/core/src/authority_node.rs +++ b/consensus/core/src/authority_node.rs @@ -28,11 +28,12 @@ use crate::{ anemo_network::AnemoManager, tonic_network::TonicManager, NetworkClient as _, NetworkManager, }, + round_prober::{RoundProber, RoundProberHandle}, storage::rocksdb_store::RocksDBStore, subscriber::Subscriber, synchronizer::{Synchronizer, SynchronizerHandle}, transaction::{TransactionClient, TransactionConsumer, TransactionVerifier}, - CommitConsumer, + CommitConsumer, CommitConsumerMonitor, }; /// ConsensusAuthority is used by Sui to manage the lifetime of AuthorityNode. @@ -111,6 +112,13 @@ impl ConsensusAuthority { } } + pub async fn replay_complete(&self) { + match self { + Self::WithAnemo(authority) => authority.replay_complete().await, + Self::WithTonic(authority) => authority.replay_complete().await, + } + } + #[cfg(test)] fn context(&self) -> &Arc { match self { @@ -136,7 +144,10 @@ where start_time: Instant, transaction_client: Arc, synchronizer: Arc, + commit_consumer_monitor: Arc, + commit_syncer_handle: CommitSyncerHandle, + round_prober_handle: Option, leader_timeout_handle: LeaderTimeoutTaskHandle, core_thread_handle: CoreThreadHandle, // Only one of broadcaster and subscriber gets created, depending on @@ -203,6 +214,9 @@ where let store_path = context.parameters.db_path.as_path().to_str().unwrap(); let store = Arc::new(RocksDBStore::new(store_path)); let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store.clone()))); + + let highest_known_commit_at_startup = dag_state.read().last_commit_index(); + let sync_last_known_own_block = boot_counter == 0 && dag_state.read().highest_accepted_round() == 0 && !context @@ -235,6 +249,8 @@ where }; let commit_consumer_monitor = commit_consumer.monitor(); + commit_consumer_monitor + .set_highest_observed_commit_at_startup(highest_known_commit_at_startup); let commit_observer = CommitObserver::new( context.clone(), commit_consumer, @@ -259,7 +275,7 @@ where ); let (core_dispatcher, core_thread_handle) = - ChannelCoreThreadDispatcher::start(core, context.clone()); + ChannelCoreThreadDispatcher::start(context.clone(), &dag_state, core); let core_dispatcher = Arc::new(core_dispatcher); let leader_timeout_handle = LeaderTimeoutTask::start(core_dispatcher.clone(), &signals_receivers, context.clone()); @@ -280,13 +296,27 @@ where context.clone(), core_dispatcher.clone(), commit_vote_monitor.clone(), - commit_consumer_monitor, + commit_consumer_monitor.clone(), network_client.clone(), block_verifier.clone(), dag_state.clone(), ) .start(); + let round_prober_handle = if context.protocol_config.consensus_round_prober() { + Some( + RoundProber::new( + context.clone(), + core_dispatcher.clone(), + dag_state.clone(), + network_client.clone(), + ) + .start(), + ) + } else { + None + }; + let network_service = Arc::new(AuthorityService::new( context.clone(), block_verifier, @@ -328,6 +358,8 @@ where transaction_client: Arc::new(tx_client), synchronizer, commit_syncer_handle, + round_prober_handle, + commit_consumer_monitor, leader_timeout_handle, core_thread_handle, broadcaster, @@ -354,6 +386,9 @@ where ); }; self.commit_syncer_handle.stop().await; + if let Some(round_prober_handle) = self.round_prober_handle.take() { + round_prober_handle.stop().await; + } self.leader_timeout_handle.stop().await; // Shutdown Core to stop block productions and broadcast. // When using streaming, all subscribers to broadcasted blocks stop after this. @@ -377,6 +412,10 @@ where pub(crate) fn transaction_client(&self) -> Arc { self.transaction_client.clone() } + + pub(crate) async fn replay_complete(&self) { + self.commit_consumer_monitor.replay_complete().await; + } } #[cfg(test)] diff --git a/consensus/core/src/authority_service.rs b/consensus/core/src/authority_service.rs index 94fa311e01a6b..7e4b26ac99537 100644 --- a/consensus/core/src/authority_service.rs +++ b/consensus/core/src/authority_service.rs @@ -55,17 +55,10 @@ impl AuthorityService { dag_state: Arc>, store: Arc, ) -> Self { - // Set the subscribed peers by default to 0 - for (_, authority) in context.committee.authorities() { - context - .metrics - .node_metrics - .subscribed_peers - .with_label_values(&[authority.hostname.as_str()]) - .set(0); - } - - let subscription_counter = Arc::new(SubscriptionCounter::new(core_dispatcher.clone())); + let subscription_counter = Arc::new(SubscriptionCounter::new( + context.clone(), + core_dispatcher.clone(), + )); Self { context, block_verifier, @@ -254,7 +247,6 @@ impl NetworkService for AuthorityService { ); let broadcasted_blocks = BroadcastedBlockStream::new( - self.context.clone(), peer, self.rx_block_broadcaster.resubscribe(), self.subscription_counter.clone(), @@ -416,40 +408,96 @@ impl NetworkService for AuthorityService { Ok(result) } + + async fn handle_get_latest_rounds(&self, _peer: AuthorityIndex) -> ConsensusResult> { + fail_point_async!("consensus-rpc-response"); + + let mut highest_received_rounds = self.core_dispatcher.highest_received_rounds(); + // Own blocks do not go through the core dispatcher, so they need to be set separately. + highest_received_rounds[self.context.own_index] = self + .dag_state + .read() + .get_last_block_for_authority(self.context.own_index) + .round(); + + Ok(highest_received_rounds) + } +} + +struct Counter { + count: usize, + subscriptions_by_authority: Vec, } /// Atomically counts the number of active subscriptions to the block broadcast stream, /// and dispatch commands to core based on the changes. struct SubscriptionCounter { - counter: parking_lot::Mutex, + context: Arc, + counter: parking_lot::Mutex, dispatcher: Arc, } impl SubscriptionCounter { - fn new(dispatcher: Arc) -> Self { + fn new(context: Arc, dispatcher: Arc) -> Self { + // Set the subscribed peers by default to 0 + for (_, authority) in context.committee.authorities() { + context + .metrics + .node_metrics + .subscribed_peers + .with_label_values(&[authority.hostname.as_str()]) + .set(0); + } + Self { - counter: parking_lot::Mutex::new(0), + counter: parking_lot::Mutex::new(Counter { + count: 0, + subscriptions_by_authority: vec![0; context.committee.size()], + }), dispatcher, + context, } } - fn increment(&self) -> Result<(), ConsensusError> { + fn increment(&self, peer: AuthorityIndex) -> Result<(), ConsensusError> { let mut counter = self.counter.lock(); - *counter += 1; - if *counter == 1 { + counter.count += 1; + counter.subscriptions_by_authority[peer] += 1; + + let peer_hostname = &self.context.committee.authority(peer).hostname; + self.context + .metrics + .node_metrics + .subscribed_peers + .with_label_values(&[peer_hostname]) + .set(1); + + if counter.count == 1 { self.dispatcher - .set_consumer_availability(true) + .set_subscriber_exists(true) .map_err(|_| ConsensusError::Shutdown)?; } Ok(()) } - fn decrement(&self) -> Result<(), ConsensusError> { + fn decrement(&self, peer: AuthorityIndex) -> Result<(), ConsensusError> { let mut counter = self.counter.lock(); - *counter -= 1; - if *counter == 0 { + counter.count -= 1; + counter.subscriptions_by_authority[peer] -= 1; + + if counter.subscriptions_by_authority[peer] == 0 { + let peer_hostname = &self.context.committee.authority(peer).hostname; + self.context + .metrics + .node_metrics + .subscribed_peers + .with_label_values(&[peer_hostname]) + .set(0); + } + + if counter.count == 0 { self.dispatcher - .set_consumer_availability(false) + .set_subscriber_exists(false) .map_err(|_| ConsensusError::Shutdown)?; } Ok(()) @@ -463,7 +511,6 @@ type BroadcastedBlockStream = BroadcastStream; /// Adapted from `tokio_stream::wrappers::BroadcastStream`. The main difference is that /// this tolerates lags with only logging, without yielding errors. struct BroadcastStream { - context: Arc, peer: AuthorityIndex, // Stores the receiver across poll_next() calls. inner: ReusableBoxFuture< @@ -479,22 +526,17 @@ struct BroadcastStream { impl BroadcastStream { pub fn new( - context: Arc, peer: AuthorityIndex, rx: broadcast::Receiver, subscription_counter: Arc, ) -> Self { - let peer_hostname = &context.committee.authority(peer).hostname; - context - .metrics - .node_metrics - .subscribed_peers - .with_label_values(&[peer_hostname]) - .set(1); - // Failure can only be due to core shutdown. - let _ = subscription_counter.increment(); + if let Err(err) = subscription_counter.increment(peer) { + match err { + ConsensusError::Shutdown => {} + _ => panic!("Unexpected error: {err}"), + } + } Self { - context, peer, inner: ReusableBoxFuture::new(make_recv_future(rx)), subscription_counter, @@ -535,15 +577,12 @@ impl Stream for BroadcastStream { impl Drop for BroadcastStream { fn drop(&mut self) { - let peer_hostname = &self.context.committee.authority(self.peer).hostname; - self.context - .metrics - .node_metrics - .subscribed_peers - .with_label_values(&[peer_hostname]) - .set(0); - // Failure can only be due to core shutdown. - let _ = self.subscription_counter.decrement(); + if let Err(err) = self.subscription_counter.decrement(self.peer) { + match err { + ConsensusError::Shutdown => {} + _ => panic!("Unexpected error: {err}"), + } + } } } @@ -622,12 +661,21 @@ mod tests { Ok(Default::default()) } - fn set_consumer_availability(&self, _available: bool) -> Result<(), CoreError> { + fn set_subscriber_exists(&self, _exists: bool) -> Result<(), CoreError> { + todo!() + } + + fn set_propagation_delay(&self, _delay: Round) -> Result<(), CoreError> { todo!() } + fn set_last_known_proposed_round(&self, _round: Round) -> Result<(), CoreError> { todo!() } + + fn highest_received_rounds(&self) -> Vec { + todo!() + } } #[derive(Default)] @@ -682,6 +730,14 @@ mod tests { ) -> ConsensusResult> { unimplemented!("Unimplemented") } + + async fn get_latest_rounds( + &self, + _peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } #[tokio::test(flavor = "current_thread", start_paused = true)] diff --git a/consensus/core/src/block.rs b/consensus/core/src/block.rs index de5c779125423..9327560348b67 100644 --- a/consensus/core/src/block.rs +++ b/consensus/core/src/block.rs @@ -50,16 +50,31 @@ impl Transaction { } } +/// Index of a transaction in a block. +pub type TransactionIndex = u16; + +/// Votes on transactions in a specific block. +/// Reject votes are explicit. The rest of transactions in the block receive implicit accept votes. +// TODO: look into making fields `pub`. +#[derive(Clone, Deserialize, Serialize)] +pub(crate) struct BlockTransactionVotes { + pub(crate) block_ref: BlockRef, + pub(crate) rejects: Vec, +} + /// A block includes references to previous round blocks and transactions that the authority /// considers valid. /// Well behaved authorities produce at most one block per round, but malicious authorities can /// equivocate. +#[allow(private_interfaces)] #[derive(Clone, Deserialize, Serialize)] #[enum_dispatch(BlockAPI)] pub enum Block { V1(BlockV1), + V2(BlockV2), } +#[allow(private_interfaces)] #[enum_dispatch] pub trait BlockAPI { fn epoch(&self) -> Epoch; @@ -70,15 +85,15 @@ pub trait BlockAPI { fn ancestors(&self) -> &[BlockRef]; fn transactions(&self) -> &[Transaction]; fn commit_votes(&self) -> &[CommitVote]; + fn transaction_votes(&self) -> &[BlockTransactionVotes]; fn misbehavior_reports(&self) -> &[MisbehaviorReport]; } #[derive(Clone, Default, Deserialize, Serialize)] -pub struct BlockV1 { +pub(crate) struct BlockV1 { epoch: Epoch, round: Round, author: AuthorityIndex, - // TODO: during verification ensure that timestamp_ms >= ancestors.timestamp timestamp_ms: BlockTimestampMs, ancestors: Vec, transactions: Vec, @@ -156,6 +171,106 @@ impl BlockAPI for BlockV1 { &self.commit_votes } + fn transaction_votes(&self) -> &[BlockTransactionVotes] { + &[] + } + + fn misbehavior_reports(&self) -> &[MisbehaviorReport] { + &self.misbehavior_reports + } +} + +#[derive(Clone, Default, Deserialize, Serialize)] +pub(crate) struct BlockV2 { + epoch: Epoch, + round: Round, + author: AuthorityIndex, + timestamp_ms: BlockTimestampMs, + ancestors: Vec, + transactions: Vec, + transaction_votes: Vec, + commit_votes: Vec, + misbehavior_reports: Vec, +} + +#[allow(unused)] +impl BlockV2 { + pub(crate) fn new( + epoch: Epoch, + round: Round, + author: AuthorityIndex, + timestamp_ms: BlockTimestampMs, + ancestors: Vec, + transactions: Vec, + commit_votes: Vec, + transaction_votes: Vec, + misbehavior_reports: Vec, + ) -> BlockV2 { + Self { + epoch, + round, + author, + timestamp_ms, + ancestors, + transactions, + commit_votes, + transaction_votes, + misbehavior_reports, + } + } + + fn genesis_block(epoch: Epoch, author: AuthorityIndex) -> Self { + Self { + epoch, + round: GENESIS_ROUND, + author, + timestamp_ms: 0, + ancestors: vec![], + transactions: vec![], + commit_votes: vec![], + transaction_votes: vec![], + misbehavior_reports: vec![], + } + } +} + +impl BlockAPI for BlockV2 { + fn epoch(&self) -> Epoch { + self.epoch + } + + fn round(&self) -> Round { + self.round + } + + fn author(&self) -> AuthorityIndex { + self.author + } + + fn slot(&self) -> Slot { + Slot::new(self.round, self.author) + } + + fn timestamp_ms(&self) -> BlockTimestampMs { + self.timestamp_ms + } + + fn ancestors(&self) -> &[BlockRef] { + &self.ancestors + } + + fn transactions(&self) -> &[Transaction] { + &self.transactions + } + + fn transaction_votes(&self) -> &[BlockTransactionVotes] { + &self.transaction_votes + } + + fn commit_votes(&self) -> &[CommitVote] { + &self.commit_votes + } + fn misbehavior_reports(&self) -> &[MisbehaviorReport] { &self.misbehavior_reports } diff --git a/consensus/core/src/block_manager.rs b/consensus/core/src/block_manager.rs index d381f3c9da121..ba4a6e740e063 100644 --- a/consensus/core/src/block_manager.rs +++ b/consensus/core/src/block_manager.rs @@ -229,11 +229,26 @@ impl BlockManager { .or_default() .insert(block_ref); + let ancestor_hostname = &self.context.committee.authority(ancestor.author).hostname; + self.context + .metrics + .node_metrics + .block_manager_missing_ancestors_by_authority + .with_label_values(&[ancestor_hostname]) + .inc(); + // Add the ancestor to the missing blocks set only if it doesn't already exist in the suspended blocks - meaning // that we already have its payload. if !self.suspended_blocks.contains_key(ancestor) { - self.missing_blocks.insert(*ancestor); ancestors_to_fetch.insert(*ancestor); + if self.missing_blocks.insert(*ancestor) { + self.context + .metrics + .node_metrics + .block_manager_missing_blocks_by_authority + .with_label_values(&[ancestor_hostname]) + .inc(); + } } } } diff --git a/consensus/core/src/block_verifier.rs b/consensus/core/src/block_verifier.rs index 0c0387d536112..8582e06a5dc56 100644 --- a/consensus/core/src/block_verifier.rs +++ b/consensus/core/src/block_verifier.rs @@ -227,7 +227,7 @@ mod test { use super::*; use crate::{ - block::{BlockDigest, BlockRef, TestBlock, Transaction}, + block::{BlockDigest, BlockRef, TestBlock, Transaction, TransactionIndex}, context::Context, transaction::{TransactionVerifier, ValidationError}, }; @@ -247,6 +247,10 @@ mod test { } Ok(()) } + + fn vote_batch(&self, _batch: &[&[u8]]) -> Vec { + vec![] + } } #[tokio::test] diff --git a/consensus/core/src/broadcaster.rs b/consensus/core/src/broadcaster.rs index ee0e0b9bb8758..6a3a26042ead9 100644 --- a/consensus/core/src/broadcaster.rs +++ b/consensus/core/src/broadcaster.rs @@ -274,6 +274,14 @@ mod test { ) -> ConsensusResult> { unimplemented!("Unimplemented") } + + async fn get_latest_rounds( + &self, + _peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } #[tokio::test(flavor = "current_thread", start_paused = true)] diff --git a/consensus/core/src/commit.rs b/consensus/core/src/commit.rs index 406b482338f91..d40ee0a5a4ced 100644 --- a/consensus/core/src/commit.rs +++ b/consensus/core/src/commit.rs @@ -543,9 +543,22 @@ impl CommitRange { self.0.end.saturating_sub(1) } + pub(crate) fn extend_to(&mut self, other: CommitIndex) { + let new_end = other.saturating_add(1); + assert!(self.0.end <= new_end); + self.0 = self.0.start..new_end; + } + + pub(crate) fn size(&self) -> usize { + self.0 + .end + .checked_sub(self.0.start) + .expect("Range should never have end < start") as usize + } + /// Check whether the two ranges have the same size. pub(crate) fn is_equal_size(&self, other: &Self) -> bool { - self.0.end.wrapping_sub(self.0.start) == other.0.end.wrapping_sub(other.0.start) + self.size() == other.size() } /// Check if the provided range is sequentially after this range. @@ -669,15 +682,21 @@ mod tests { #[tokio::test] async fn test_commit_range() { telemetry_subscribers::init_for_testing(); - let range1 = CommitRange::new(1..=5); + let mut range1 = CommitRange::new(1..=5); let range2 = CommitRange::new(2..=6); let range3 = CommitRange::new(5..=10); let range4 = CommitRange::new(6..=10); let range5 = CommitRange::new(6..=9); + let range6 = CommitRange::new(1..=1); assert_eq!(range1.start(), 1); assert_eq!(range1.end(), 5); + // Test range size + assert_eq!(range1.size(), 5); + assert_eq!(range3.size(), 6); + assert_eq!(range6.size(), 1); + // Test next range check assert!(!range1.is_next_range(&range2)); assert!(!range1.is_next_range(&range3)); @@ -695,5 +714,16 @@ mod tests { assert!(range2 < range3); assert!(range3 < range4); assert!(range5 < range4); + + // Test extending range + range1.extend_to(10); + assert_eq!(range1.start(), 1); + assert_eq!(range1.end(), 10); + assert_eq!(range1.size(), 10); + + range1.extend_to(20); + assert_eq!(range1.start(), 1); + assert_eq!(range1.end(), 20); + assert_eq!(range1.size(), 20); } } diff --git a/consensus/core/src/commit_consumer.rs b/consensus/core/src/commit_consumer.rs index 47a96d80f0e2a..2d06208d03964 100644 --- a/consensus/core/src/commit_consumer.rs +++ b/consensus/core/src/commit_consumer.rs @@ -1,7 +1,8 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use std::sync::{atomic::AtomicU32, Arc}; +use std::sync::{Arc, RwLock}; +use tokio::sync::watch; use mysten_metrics::monitored_mpsc::UnboundedSender; @@ -38,24 +39,65 @@ impl CommitConsumer { } pub struct CommitConsumerMonitor { - highest_handled_commit: AtomicU32, + // highest commit that has been handled by Sui + highest_handled_commit: watch::Sender, + + // the highest commit found in local storage at startup + highest_observed_commit_at_startup: RwLock, } impl CommitConsumerMonitor { pub(crate) fn new(last_handled_commit: CommitIndex) -> Self { Self { - highest_handled_commit: AtomicU32::new(last_handled_commit), + highest_handled_commit: watch::Sender::new(last_handled_commit), + highest_observed_commit_at_startup: RwLock::new(0), } } pub(crate) fn highest_handled_commit(&self) -> CommitIndex { - self.highest_handled_commit - .load(std::sync::atomic::Ordering::Acquire) + *self.highest_handled_commit.borrow() } pub fn set_highest_handled_commit(&self, highest_handled_commit: CommitIndex) { self.highest_handled_commit - .store(highest_handled_commit, std::sync::atomic::Ordering::Release); + .send_replace(highest_handled_commit); + } + + pub fn highest_observed_commit_at_startup(&self) -> CommitIndex { + *self.highest_observed_commit_at_startup.read().unwrap() + } + + pub fn set_highest_observed_commit_at_startup( + &self, + highest_observed_commit_at_startup: CommitIndex, + ) { + let highest_handled_commit = self.highest_handled_commit(); + assert!( + highest_observed_commit_at_startup >= highest_handled_commit, + "we cannot have handled a commit that we do not know about: {} < {}", + highest_observed_commit_at_startup, + highest_handled_commit, + ); + + let mut commit = self.highest_observed_commit_at_startup.write().unwrap(); + + assert!( + *commit == 0, + "highest_known_commit_at_startup can only be set once" + ); + *commit = highest_observed_commit_at_startup; + } + + pub(crate) async fn replay_complete(&self) { + let highest_observed_commit_at_startup = self.highest_observed_commit_at_startup(); + let mut rx = self.highest_handled_commit.subscribe(); + loop { + let highest_handled = *rx.borrow_and_update(); + if highest_handled >= highest_observed_commit_at_startup { + return; + } + rx.changed().await.unwrap(); + } } } diff --git a/consensus/core/src/commit_syncer.rs b/consensus/core/src/commit_syncer.rs index f4ecf4fa2f951..c106d9445bec3 100644 --- a/consensus/core/src/commit_syncer.rs +++ b/consensus/core/src/commit_syncer.rs @@ -812,6 +812,14 @@ mod tests { ) -> ConsensusResult> { unimplemented!("Unimplemented") } + + async fn get_latest_rounds( + &self, + _peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } #[tokio::test(flavor = "current_thread", start_paused = true)] diff --git a/consensus/core/src/core.rs b/consensus/core/src/core.rs index e8d5fa9276ad9..39b6f7b672121 100644 --- a/consensus/core/src/core.rs +++ b/consensus/core/src/core.rs @@ -56,8 +56,18 @@ pub(crate) struct Core { /// The block manager which is responsible for keeping track of the DAG dependencies when processing new blocks /// and accept them or suspend if we are missing their causal history block_manager: BlockManager, - /// Whether there are consumers waiting to consume blocks produced by the core. - consumer_availability: bool, + /// Whether there are subscribers waiting for new blocks proposed by this authority. + /// Core stops proposing new blocks when there is no subscriber, because new proposed blocks + /// will likely contain only stale info when they propagate to peers. + subscriber_exists: bool, + /// Estimated delay by round for propagating blocks to a quorum. + /// Because of the nature of TCP and block streaming, propagation delay is expected to be + /// 0 in most cases, even when the actual latency of broadcasting blocks is high. + /// When this value is higher than the `propagation_delay_stop_proposal_threshold`, + /// most likely this validator cannot broadcast blocks to the network at all. + /// Core stops proposing new blocks in this case. + propagation_delay: Round, + /// Used to make commit decisions for leader blocks in the dag. committer: UniversalCommitter, /// The last produced block @@ -95,7 +105,7 @@ impl Core { leader_schedule: Arc, transaction_consumer: TransactionConsumer, block_manager: BlockManager, - consumer_availability: bool, + subscriber_exists: bool, commit_observer: CommitObserver, signals: CoreSignals, block_signer: ProtocolKeyPair, @@ -150,7 +160,8 @@ impl Core { leader_schedule, transaction_consumer, block_manager, - consumer_availability, + subscriber_exists, + propagation_delay: 0, committer, commit_observer, signals, @@ -302,17 +313,6 @@ impl Core { Ok(None) } - /// Sets the min propose round for the proposer allowing to propose blocks only for round numbers - /// `> last_known_proposed_round`. At the moment is allowed to call the method only once leading to a panic - /// if attempt to do multiple times. - pub(crate) fn set_last_known_proposed_round(&mut self, round: Round) { - if self.last_known_proposed_round.is_some() { - panic!("Should not attempt to set the last known proposed round if that has been already set"); - } - self.last_known_proposed_round = Some(round); - info!("Set last known proposed round to {round}"); - } - // Attempts to create a new block, persist and propose it to all peers. // When force is true, ignore if leader from the last round exists among ancestors and if // the minimum round delay has passed. @@ -550,7 +550,17 @@ impl Core { tracing::info!( "Leader schedule change triggered at commit index {last_commit_index}" ); - self.leader_schedule.update_leader_schedule(&self.dag_state); + if self + .context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + self.leader_schedule + .update_leader_schedule_v2(&self.dag_state); + } else { + self.leader_schedule + .update_leader_schedule_v1(&self.dag_state); + } commits_until_update = self .leader_schedule .commits_until_leader_schedule_update(self.dag_state.clone()); @@ -604,9 +614,18 @@ impl Core { // TODO: refcount subdags let subdags = self.commit_observer.handle_commit(sequenced_leaders)?; - self.dag_state - .write() - .add_unscored_committed_subdags(subdags.clone()); + if self + .context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + self.dag_state.write().add_scoring_subdags(subdags.clone()); + } else { + // TODO: Remove when DistributedVoteScoring is enabled. + self.dag_state + .write() + .add_unscored_committed_subdags(subdags.clone()); + } committed_subdags.extend(subdags); } @@ -620,28 +639,76 @@ impl Core { } /// Sets if there is consumer available to consume blocks produced by the core. - pub(crate) fn set_consumer_availability(&mut self, allow: bool) { - info!("Block consumer availability set to: {allow}"); - self.consumer_availability = allow; + pub(crate) fn set_subscriber_exists(&mut self, exists: bool) { + info!("Block subscriber exists: {exists}"); + self.subscriber_exists = exists; + } + + /// Sets the delay by round for propagating blocks to a quorum. + pub(crate) fn set_propagation_delay(&mut self, delay: Round) { + info!("Propagation round delay set to: {delay}"); + self.propagation_delay = delay; + } + + /// Sets the min propose round for the proposer allowing to propose blocks only for round numbers + /// `> last_known_proposed_round`. At the moment is allowed to call the method only once leading to a panic + /// if attempt to do multiple times. + pub(crate) fn set_last_known_proposed_round(&mut self, round: Round) { + if self.last_known_proposed_round.is_some() { + panic!("Should not attempt to set the last known proposed round if that has been already set"); + } + self.last_known_proposed_round = Some(round); + info!("Last known proposed round set to {round}"); } /// Whether the core should propose new blocks. - fn should_propose(&self) -> bool { + pub(crate) fn should_propose(&self) -> bool { let clock_round = self.threshold_clock.get_round(); - let skip_proposing = if let Some(last_known_proposed_round) = self.last_known_proposed_round + let core_skipped_proposals = &self.context.metrics.node_metrics.core_skipped_proposals; + + if !self.subscriber_exists { + debug!("Skip proposing for round {clock_round}, no subscriber exists."); + core_skipped_proposals + .with_label_values(&["no_subscriber"]) + .inc(); + return false; + } + + if self.propagation_delay + > self + .context + .parameters + .propagation_delay_stop_proposal_threshold { - if clock_round <= last_known_proposed_round { - debug!("Skip proposing for round {clock_round} as last known proposed round is {last_known_proposed_round}"); - true - } else { - false - } - } else { + debug!( + "Skip proposing for round {clock_round}, high propagation delay {} > {}.", + self.propagation_delay, + self.context + .parameters + .propagation_delay_stop_proposal_threshold + ); + core_skipped_proposals + .with_label_values(&["high_propagation_delay"]) + .inc(); + return false; + } + + let Some(last_known_proposed_round) = self.last_known_proposed_round else { debug!("Skip proposing for round {clock_round}, last known proposed round has not been synced yet."); - true + core_skipped_proposals + .with_label_values(&["no_last_known_proposed_round"]) + .inc(); + return false; }; + if clock_round <= last_known_proposed_round { + debug!("Skip proposing for round {clock_round} as last known proposed round is {last_known_proposed_round}"); + core_skipped_proposals + .with_label_values(&["higher_last_known_proposed_round"]) + .inc(); + return false; + } - self.consumer_availability && !skip_proposing + true } /// Retrieves the next ancestors to propose to form a block at `clock_round` round. @@ -911,13 +978,13 @@ mod test { use tokio::time::sleep; use super::*; - use crate::test_dag_builder::DagBuilder; use crate::{ block::{genesis_blocks, TestBlock}, block_verifier::NoopBlockVerifier, commit::{CommitAPI as _, CommitRange}, leader_scoring::ReputationScores, storage::{mem_store::MemStore, Store, WriteBatch}, + test_dag_builder::DagBuilder, transaction::TransactionClient, CommitConsumer, CommitIndex, }; @@ -1540,7 +1607,7 @@ mod test { } #[tokio::test] - async fn test_core_set_consumer_availability() { + async fn test_core_set_subscriber_exists() { telemetry_subscribers::init_for_testing(); let (context, mut key_pairs) = Context::new_for_test(4); let context = Arc::new(context); @@ -1577,6 +1644,7 @@ mod test { leader_schedule, transaction_consumer, block_manager, + // Set to no subscriber exists initially. false, commit_observer, signals, @@ -1585,18 +1653,89 @@ mod test { false, ); - // No proposal during recovery. + // There is no proposal during recovery because there is no subscriber. assert_eq!( core.last_proposed_round(), GENESIS_ROUND, "No block should have been created other than genesis" ); - // No proposal even with forced proposing. + // There is no proposal even with forced proposing. assert!(core.try_propose(true).unwrap().is_none()); - // Update core when consumer is available. - core.set_consumer_availability(true); + // Let Core know subscriber exists. + core.set_subscriber_exists(true); + + // Proposing now would succeed. + assert!(core.try_propose(true).unwrap().is_some()); + } + + #[tokio::test] + async fn test_core_set_propagation_delay() { + // TODO: create helper to avoid the duplicated code here. + telemetry_subscribers::init_for_testing(); + let (context, mut key_pairs) = Context::new_for_test(4); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store.clone()))); + + let block_manager = BlockManager::new( + context.clone(), + dag_state.clone(), + Arc::new(NoopBlockVerifier), + ); + let leader_schedule = Arc::new(LeaderSchedule::from_store( + context.clone(), + dag_state.clone(), + )); + + let (_transaction_client, tx_receiver) = TransactionClient::new(context.clone()); + let transaction_consumer = TransactionConsumer::new(tx_receiver, context.clone()); + let (signals, signal_receivers) = CoreSignals::new(context.clone()); + // Need at least one subscriber to the block broadcast channel. + let _block_receiver = signal_receivers.block_broadcast_receiver(); + + let (sender, _receiver) = unbounded_channel("consensus_output"); + let commit_observer = CommitObserver::new( + context.clone(), + CommitConsumer::new(sender.clone(), 0), + dag_state.clone(), + store.clone(), + leader_schedule.clone(), + ); + + let mut core = Core::new( + context.clone(), + leader_schedule, + transaction_consumer, + block_manager, + // Set to no subscriber exists initially. + false, + commit_observer, + signals, + key_pairs.remove(context.own_index.value()).1, + dag_state.clone(), + false, + ); + + // There is no proposal during recovery because there is no subscriber. + assert_eq!( + core.last_proposed_round(), + GENESIS_ROUND, + "No block should have been created other than genesis" + ); + + // Use a large propagation delay to disable proposing. + core.set_propagation_delay(1000); + + // Make propagation delay the only reason for not proposing. + core.set_subscriber_exists(true); + + // There is no proposal even with forced proposing. + assert!(core.try_propose(true).unwrap().is_none()); + + // Let Core know there is no propagation delay. + core.set_propagation_delay(0); // Proposing now would succeed. assert!(core.try_propose(true).unwrap().is_some()); @@ -1672,6 +1811,131 @@ mod test { last_round_blocks = this_round_blocks; } + for core_fixture in cores { + // Check commits have been persisted to store + let last_commit = core_fixture + .store + .read_last_commit() + .unwrap() + .expect("last commit should be set"); + // There are 28 leader rounds with rounds completed up to and including + // round 29. Round 30 blocks will only include their own blocks, so the + // 28th leader will not be committed. + assert_eq!(last_commit.index(), 27); + let all_stored_commits = core_fixture + .store + .scan_commits((0..=CommitIndex::MAX).into()) + .unwrap(); + assert_eq!(all_stored_commits.len(), 27); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .bad_nodes + .len(), + 1 + ); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .good_nodes + .len(), + 1 + ); + let expected_reputation_scores = + ReputationScores::new((11..=20).into(), vec![29, 29, 29, 29]); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .reputation_scores, + expected_reputation_scores + ); + } + } + + // TODO: Remove this when DistributedVoteScoring is enabled. + #[tokio::test(flavor = "current_thread", start_paused = true)] + async fn test_leader_schedule_change_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let default_params = Parameters::default(); + + let (mut context, _) = Context::new_for_test(4); + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + + // create the cores and their signals for all the authorities + let mut cores = create_cores(context, vec![1, 1, 1, 1]); + + // Now iterate over a few rounds and ensure the corresponding signals are created while network advances + let mut last_round_blocks = Vec::new(); + for round in 1..=30 { + let mut this_round_blocks = Vec::new(); + + // Wait for min round delay to allow blocks to be proposed. + sleep(default_params.min_round_delay).await; + + for core_fixture in &mut cores { + // add the blocks from last round + // this will trigger a block creation for the round and a signal should be emitted + core_fixture + .core + .add_blocks(last_round_blocks.clone()) + .unwrap(); + + // A "new round" signal should be received given that all the blocks of previous round have been processed + let new_round = receive( + Duration::from_secs(1), + core_fixture.signal_receivers.new_round_receiver(), + ) + .await; + assert_eq!(new_round, round); + + // Check that a new block has been proposed. + let block = tokio::time::timeout( + Duration::from_secs(1), + core_fixture.block_receiver.recv(), + ) + .await + .unwrap() + .unwrap(); + assert_eq!(block.round(), round); + assert_eq!(block.author(), core_fixture.core.context.own_index); + + // append the new block to this round blocks + this_round_blocks.push(core_fixture.core.last_proposed_block().clone()); + + let block = core_fixture.core.last_proposed_block(); + + // ensure that produced block is referring to the blocks of last_round + assert_eq!( + block.ancestors().len(), + core_fixture.core.context.committee.size() + ); + for ancestor in block.ancestors() { + if block.round() > 1 { + // don't bother with round 1 block which just contains the genesis blocks. + assert!( + last_round_blocks + .iter() + .any(|block| block.reference() == *ancestor), + "Reference from previous round should be added" + ); + } + } + } + + last_round_blocks = this_round_blocks; + } + for core_fixture in cores { // Check commits have been persisted to store let last_commit = core_fixture @@ -1866,6 +2130,158 @@ mod test { // create the cores and their signals for all the authorities let mut cores = create_cores(context, vec![1, 1, 1, 1, 1, 1]); + // Now iterate over a few rounds and ensure the corresponding signals are created while network advances + let mut last_round_blocks = Vec::new(); + for round in 1..=33 { + let mut this_round_blocks = Vec::new(); + + // Wait for min round delay to allow blocks to be proposed. + sleep(default_params.min_round_delay).await; + + for core_fixture in &mut cores { + // add the blocks from last round + // this will trigger a block creation for the round and a signal should be emitted + core_fixture + .core + .add_blocks(last_round_blocks.clone()) + .unwrap(); + + // A "new round" signal should be received given that all the blocks of previous round have been processed + let new_round = receive( + Duration::from_secs(1), + core_fixture.signal_receivers.new_round_receiver(), + ) + .await; + assert_eq!(new_round, round); + + // Check that a new block has been proposed. + let block = tokio::time::timeout( + Duration::from_secs(1), + core_fixture.block_receiver.recv(), + ) + .await + .unwrap() + .unwrap(); + assert_eq!(block.round(), round); + assert_eq!(block.author(), core_fixture.core.context.own_index); + + // append the new block to this round blocks + this_round_blocks.push(core_fixture.core.last_proposed_block().clone()); + + let block = core_fixture.core.last_proposed_block(); + + // ensure that produced block is referring to the blocks of last_round + assert_eq!( + block.ancestors().len(), + core_fixture.core.context.committee.size() + ); + for ancestor in block.ancestors() { + if block.round() > 1 { + // don't bother with round 1 block which just contains the genesis blocks. + assert!( + last_round_blocks + .iter() + .any(|block| block.reference() == *ancestor), + "Reference from previous round should be added" + ); + } + } + } + + last_round_blocks = this_round_blocks; + } + + for core_fixture in cores { + // Check commits have been persisted to store + let last_commit = core_fixture + .store + .read_last_commit() + .unwrap() + .expect("last commit should be set"); + // There are 31 leader rounds with rounds completed up to and including + // round 33. Round 33 blocks will only include their own blocks, so there + // should only be 30 commits. + // However on a leader schedule change boundary its is possible for a + // new leader to get selected for the same round if the leader elected + // gets swapped allowing for multiple leaders to be committed at a round. + // Meaning with multi leader per round explicitly set to 1 we will have 30, + // otherwise 31. + // NOTE: We used 31 leader rounds to specifically trigger the scenario + // where the leader schedule boundary occurred AND we had a swap to a new + // leader for the same round + let expected_commit_count = match num_leaders_per_round { + Some(1) => 30, + _ => 31, + }; + assert_eq!(last_commit.index(), expected_commit_count); + let all_stored_commits = core_fixture + .store + .scan_commits((0..=CommitIndex::MAX).into()) + .unwrap(); + assert_eq!(all_stored_commits.len(), expected_commit_count as usize); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .bad_nodes + .len(), + 1 + ); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .good_nodes + .len(), + 1 + ); + let expected_reputation_scores = + ReputationScores::new((21..=30).into(), vec![43, 43, 43, 43, 43, 43]); + assert_eq!( + core_fixture + .core + .leader_schedule + .leader_swap_table + .read() + .reputation_scores, + expected_reputation_scores + ); + } + } + + // TODO: Remove two tests below this when DistributedVoteScoring is enabled. + #[tokio::test(flavor = "current_thread", start_paused = true)] + async fn test_commit_on_leader_schedule_change_boundary_without_multileader_with_vote_scoring() + { + parameterized_test_commit_on_leader_schedule_change_boundary_with_vote_scoring(Some(1)) + .await; + } + + #[tokio::test(flavor = "current_thread", start_paused = true)] + async fn test_commit_on_leader_schedule_change_boundary_with_multileader_with_vote_scoring() { + parameterized_test_commit_on_leader_schedule_change_boundary_with_vote_scoring(None).await; + } + + async fn parameterized_test_commit_on_leader_schedule_change_boundary_with_vote_scoring( + num_leaders_per_round: Option, + ) { + telemetry_subscribers::init_for_testing(); + let default_params = Parameters::default(); + + let (mut context, _) = Context::new_for_test(6); + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_mysticeti_num_leaders_per_round_for_testing(num_leaders_per_round); + // create the cores and their signals for all the authorities + let mut cores = create_cores(context, vec![1, 1, 1, 1, 1, 1]); + // Now iterate over a few rounds and ensure the corresponding signals are created while network advances let mut last_round_blocks = Vec::new(); for round in 1..=63 { @@ -1940,7 +2356,7 @@ mod test { // However on a leader schedule change boundary its is possible for a // new leader to get selected for the same round if the leader elected // gets swapped allowing for multiple leaders to be committed at a round. - // Meaning with multi leader per round explicitly set to 1 we will have 60, + // Meaning with multi leader per round explicitly set to 1 we will have 30, // otherwise 61. // NOTE: We used 61 leader rounds to specifically trigger the scenario // where the leader schedule boundary occurred AND we had a swap to a new diff --git a/consensus/core/src/core_thread.rs b/consensus/core/src/core_thread.rs index 8c39d6059746f..1e9ec7e925c7b 100644 --- a/consensus/core/src/core_thread.rs +++ b/consensus/core/src/core_thread.rs @@ -1,14 +1,21 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use std::{collections::BTreeSet, fmt::Debug, sync::Arc}; +use std::{ + collections::BTreeSet, + fmt::Debug, + sync::{ + atomic::{AtomicU32, Ordering}, + Arc, + }, +}; use async_trait::async_trait; use mysten_metrics::{ monitored_mpsc::{channel, Receiver, Sender, WeakSender}, monitored_scope, spawn_logged_monitored_task, }; -use parking_lot::Mutex; +use parking_lot::{Mutex, RwLock}; use thiserror::Error; use tokio::sync::{oneshot, watch}; use tracing::warn; @@ -18,7 +25,9 @@ use crate::{ context::Context, core::Core, core_thread::CoreError::Shutdown, + dag_state::DagState, error::{ConsensusError, ConsensusResult}, + BlockAPI as _, }; const CORE_THREAD_COMMANDS_CHANNEL_SIZE: usize = 2000; @@ -54,9 +63,15 @@ pub trait CoreThreadDispatcher: Sync + Send + 'static { /// Informs the core whether consumer of produced blocks exists. /// This is only used by core to decide if it should propose new blocks. /// It is not a guarantee that produced blocks will be accepted by peers. - fn set_consumer_availability(&self, available: bool) -> Result<(), CoreError>; + fn set_subscriber_exists(&self, exists: bool) -> Result<(), CoreError>; + + /// Sets the estimated delay to propagate a block to a quorum of peers, in number of rounds. + fn set_propagation_delay(&self, delay: Round) -> Result<(), CoreError>; fn set_last_known_proposed_round(&self, round: Round) -> Result<(), CoreError>; + + /// Returns the highest round received for each authority by Core. + fn highest_received_rounds(&self) -> Vec; } pub(crate) struct CoreThreadHandle { @@ -75,7 +90,8 @@ impl CoreThreadHandle { struct CoreThread { core: Core, receiver: Receiver, - rx_consumer_availability: watch::Receiver, + rx_subscriber_exists: watch::Receiver, + rx_propagation_delay: watch::Receiver, rx_last_known_proposed_round: watch::Receiver, context: Arc, } @@ -114,12 +130,24 @@ impl CoreThread { self.core.set_last_known_proposed_round(round); self.core.new_block(round + 1, true)?; } - _ = self.rx_consumer_availability.changed() => { - let _scope = monitored_scope("CoreThread::loop::set_consumer_availability"); - let available = *self.rx_consumer_availability.borrow(); - self.core.set_consumer_availability(available); - if available { - // If a consumer becomes available, try to produce a new block to ensure liveness, + _ = self.rx_subscriber_exists.changed() => { + let _scope = monitored_scope("CoreThread::loop::set_subscriber_exists"); + let should_propose_before = self.core.should_propose(); + let exists = *self.rx_subscriber_exists.borrow(); + self.core.set_subscriber_exists(exists); + if !should_propose_before && self.core.should_propose() { + // If core cannnot propose before but can propose now, try to produce a new block to ensure liveness, + // because block proposal could have been skipped. + self.core.new_block(Round::MAX, true)?; + } + } + _ = self.rx_propagation_delay.changed() => { + let _scope = monitored_scope("CoreThread::loop::set_propagation_delay"); + let should_propose_before = self.core.should_propose(); + let delay = *self.rx_propagation_delay.borrow(); + self.core.set_propagation_delay(delay); + if !should_propose_before && self.core.should_propose() { + // If core cannnot propose before but can propose now, try to produce a new block to ensure liveness, // because block proposal could have been skipped. self.core.new_block(Round::MAX, true)?; } @@ -135,22 +163,43 @@ impl CoreThread { pub(crate) struct ChannelCoreThreadDispatcher { context: Arc, sender: WeakSender, - tx_consumer_availability: Arc>, + tx_subscriber_exists: Arc>, + tx_propagation_delay: Arc>, tx_last_known_proposed_round: Arc>, + highest_received_rounds: Arc>, } impl ChannelCoreThreadDispatcher { - pub(crate) fn start(core: Core, context: Arc) -> (Self, CoreThreadHandle) { + pub(crate) fn start( + context: Arc, + dag_state: &RwLock, + core: Core, + ) -> (Self, CoreThreadHandle) { + // Initialize highest received rounds to last accepted rounds. + let highest_received_rounds = { + let dag_state = dag_state.read(); + context + .committee + .authorities() + .map(|(index, _)| { + AtomicU32::new(dag_state.get_last_block_for_authority(index).round()) + }) + .collect() + }; + let (sender, receiver) = channel("consensus_core_commands", CORE_THREAD_COMMANDS_CHANNEL_SIZE); - let (tx_consumer_availability, mut rx_consumer_availability) = watch::channel(false); + let (tx_subscriber_exists, mut rx_subscriber_exists) = watch::channel(false); + let (tx_propagation_delay, mut rx_propagation_delay) = watch::channel(0); let (tx_last_known_proposed_round, mut rx_last_known_proposed_round) = watch::channel(0); - rx_consumer_availability.mark_unchanged(); + rx_subscriber_exists.mark_unchanged(); + rx_propagation_delay.mark_unchanged(); rx_last_known_proposed_round.mark_unchanged(); let core_thread = CoreThread { core, receiver, - rx_consumer_availability, + rx_subscriber_exists, + rx_propagation_delay, rx_last_known_proposed_round, context: context.clone(), }; @@ -171,8 +220,10 @@ impl ChannelCoreThreadDispatcher { let dispatcher = ChannelCoreThreadDispatcher { context, sender: sender.downgrade(), - tx_consumer_availability: Arc::new(tx_consumer_availability), + tx_subscriber_exists: Arc::new(tx_subscriber_exists), + tx_propagation_delay: Arc::new(tx_propagation_delay), tx_last_known_proposed_round: Arc::new(tx_last_known_proposed_round), + highest_received_rounds: Arc::new(highest_received_rounds), }; let handle = CoreThreadHandle { join_handle, @@ -200,6 +251,9 @@ impl CoreThreadDispatcher for ChannelCoreThreadDispatcher { &self, blocks: Vec, ) -> Result, CoreError> { + for block in &blocks { + self.highest_received_rounds[block.author()].fetch_max(block.round(), Ordering::AcqRel); + } let (sender, receiver) = oneshot::channel(); self.send(CoreThreadCommand::AddBlocks(blocks, sender)) .await; @@ -219,9 +273,15 @@ impl CoreThreadDispatcher for ChannelCoreThreadDispatcher { receiver.await.map_err(|e| Shutdown(e.to_string())) } - fn set_consumer_availability(&self, available: bool) -> Result<(), CoreError> { - self.tx_consumer_availability - .send(available) + fn set_subscriber_exists(&self, exists: bool) -> Result<(), CoreError> { + self.tx_subscriber_exists + .send(exists) + .map_err(|e| Shutdown(e.to_string())) + } + + fn set_propagation_delay(&self, delay: Round) -> Result<(), CoreError> { + self.tx_propagation_delay + .send(delay) .map_err(|e| Shutdown(e.to_string())) } @@ -230,6 +290,13 @@ impl CoreThreadDispatcher for ChannelCoreThreadDispatcher { .send(round) .map_err(|e| Shutdown(e.to_string())) } + + fn highest_received_rounds(&self) -> Vec { + self.highest_received_rounds + .iter() + .map(|round| round.load(Ordering::Relaxed)) + .collect() + } } // TODO: complete the Mock for thread dispatcher to be used from several tests @@ -282,7 +349,11 @@ impl CoreThreadDispatcher for MockCoreThreadDispatcher { Ok(result) } - fn set_consumer_availability(&self, _available: bool) -> Result<(), CoreError> { + fn set_subscriber_exists(&self, _exists: bool) -> Result<(), CoreError> { + todo!() + } + + fn set_propagation_delay(&self, _delay: Round) -> Result<(), CoreError> { todo!() } @@ -291,6 +362,10 @@ impl CoreThreadDispatcher for MockCoreThreadDispatcher { last_known_proposed_round.push(round); Ok(()) } + + fn highest_received_rounds(&self) -> Vec { + todo!() + } } #[cfg(test)] @@ -353,11 +428,12 @@ mod test { commit_observer, signals, key_pairs.remove(context.own_index.value()).1, - dag_state, + dag_state.clone(), false, ); - let (core_dispatcher, handle) = ChannelCoreThreadDispatcher::start(core, context); + let (core_dispatcher, handle) = + ChannelCoreThreadDispatcher::start(context, &dag_state, core); // Now create some clones of the dispatcher let dispatcher_1 = core_dispatcher.clone(); diff --git a/consensus/core/src/dag_state.rs b/consensus/core/src/dag_state.rs index aa7f3ca718f28..cb1112633287e 100644 --- a/consensus/core/src/dag_state.rs +++ b/consensus/core/src/dag_state.rs @@ -20,12 +20,13 @@ use crate::{ }, commit::{ load_committed_subdag_from_store, CommitAPI as _, CommitDigest, CommitIndex, CommitInfo, - CommitRef, CommitVote, CommittedSubDag, TrustedCommit, GENESIS_COMMIT_INDEX, + CommitRef, CommitVote, TrustedCommit, GENESIS_COMMIT_INDEX, }, context::Context, - leader_scoring::ReputationScores, + leader_scoring::{ReputationScores, ScoringSubdag}, stake_aggregator::{QuorumThreshold, StakeAggregator}, storage::{Store, WriteBatch}, + CommittedSubDag, }; /// DagState provides the API to write and read accepted blocks from the DAG. @@ -61,6 +62,11 @@ pub(crate) struct DagState { // Last committed rounds per authority. last_committed_rounds: Vec, + /// The committed subdags that have been scored but scores have not been used + /// for leader schedule yet. + scoring_subdag: ScoringSubdag, + + // TODO: Remove when DistributedVoteScoring is enabled. /// The list of committed subdags that have been sequenced by the universal /// committer but have yet to be used to calculate reputation scores for the /// next leader schedule. Until then we consider it as "unscored" subdags. @@ -101,8 +107,6 @@ impl DagState { .read_last_commit() .unwrap_or_else(|e| panic!("Failed to read from storage: {:?}", e)); - let mut unscored_committed_subdags = Vec::new(); - let commit_info = store .read_last_commit_info() .unwrap_or_else(|e| panic!("Failed to read from storage: {:?}", e)); @@ -115,6 +119,9 @@ impl DagState { (vec![0; num_authorities], GENESIS_COMMIT_INDEX + 1) }; + let mut unscored_committed_subdags = Vec::new(); + let mut scoring_subdag = ScoringSubdag::new(context.clone()); + if let Some(last_commit) = last_commit.as_ref() { store .scan_commits((commit_recovery_start_index..=last_commit.index()).into()) @@ -141,10 +148,17 @@ impl DagState { tracing::info!( "DagState was initialized with the following state: \ - {last_commit:?}; {last_committed_rounds:?}; {} unscored_committed_subdags;", + {last_commit:?}; {last_committed_rounds:?}; {} unscored committed subdags;", unscored_committed_subdags.len() ); + if context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + scoring_subdag.add_subdags(std::mem::take(&mut unscored_committed_subdags)); + } + let mut state = Self { context, genesis, @@ -158,6 +172,7 @@ impl DagState { blocks_to_write: vec![], commits_to_write: vec![], commit_info_to_write: vec![], + scoring_subdag, unscored_committed_subdags, store, cached_rounds, @@ -637,6 +652,11 @@ impl DagState { // We empty the unscored committed subdags to calculate reputation scores. assert!(self.unscored_committed_subdags.is_empty()); + // We create an empty scoring subdag once reputation scores are calculated. + // Note: It is okay for this to not be gated by protocol config as the + // scoring_subdag should be empty in either case at this point. + assert!(self.scoring_subdag.is_empty()); + let commit_info = CommitInfo { committed_rounds: self.last_committed_rounds.clone(), reputation_scores, @@ -820,6 +840,7 @@ impl DagState { .unwrap_or_else(|e| panic!("Failed to read from storage: {:?}", e)) } + // TODO: Remove four methods below this when DistributedVoteScoring is enabled. pub(crate) fn unscored_committed_subdags_count(&self) -> u64 { self.unscored_committed_subdags.len() as u64 } @@ -840,6 +861,34 @@ impl DagState { std::mem::take(&mut self.unscored_committed_subdags) } + pub(crate) fn add_scoring_subdags(&mut self, scoring_subdags: Vec) { + self.scoring_subdag.add_subdags(scoring_subdags); + } + + pub(crate) fn clear_scoring_subdag(&mut self) { + self.scoring_subdag.clear(); + } + + pub(crate) fn scoring_subdags_count(&self) -> usize { + self.scoring_subdag.scored_subdags_count() + } + + pub(crate) fn is_scoring_subdag_empty(&self) -> bool { + self.scoring_subdag.is_empty() + } + + pub(crate) fn calculate_scoring_subdag_scores(&self) -> ReputationScores { + self.scoring_subdag.calculate_scores() + } + + pub(crate) fn scoring_subdag_commit_range(&self) -> CommitIndex { + self.scoring_subdag + .commit_range + .as_ref() + .expect("commit range should exist for scoring subdag") + .end() + } + pub(crate) fn genesis_blocks(&self) -> Vec { self.genesis.values().cloned().collect() } @@ -1362,6 +1411,123 @@ mod test { assert_eq!(result, expected); } + // TODO: Remove when DistributedVoteScoring is enabled. + #[tokio::test] + async fn test_flush_and_recovery_with_unscored_subdag() { + telemetry_subscribers::init_for_testing(); + let num_authorities: u32 = 4; + let (mut context, _) = Context::new_for_test(num_authorities as usize); + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + let mut dag_state = DagState::new(context.clone(), store.clone()); + + // Create test blocks and commits for round 1 ~ 10 + let num_rounds: u32 = 10; + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder.layers(1..=num_rounds).build(); + let mut commits = vec![]; + let leaders = dag_builder + .leader_blocks(1..=num_rounds) + .into_iter() + .flatten() + .collect::>(); + + let mut last_committed_rounds = vec![0; 4]; + for (idx, leader) in leaders.into_iter().enumerate() { + let commit_index = idx as u32 + 1; + let (subdag, commit) = dag_builder.get_sub_dag_and_commit( + leader.clone(), + last_committed_rounds.clone(), + commit_index, + ); + for block in subdag.blocks.iter() { + last_committed_rounds[block.author().value()] = + max(block.round(), last_committed_rounds[block.author().value()]); + } + commits.push(commit); + } + + // Add the blocks from first 5 rounds and first 5 commits to the dag state + let temp_commits = commits.split_off(5); + dag_state.accept_blocks(dag_builder.blocks(1..=5)); + for commit in commits.clone() { + dag_state.add_commit(commit); + } + + // Flush the dag state + dag_state.flush(); + + // Add the rest of the blocks and commits to the dag state + dag_state.accept_blocks(dag_builder.blocks(6..=num_rounds)); + for commit in temp_commits.clone() { + dag_state.add_commit(commit); + } + + // All blocks should be found in DagState. + let all_blocks = dag_builder.blocks(6..=num_rounds); + let block_refs = all_blocks + .iter() + .map(|block| block.reference()) + .collect::>(); + let result = dag_state + .get_blocks(&block_refs) + .into_iter() + .map(|b| b.unwrap()) + .collect::>(); + assert_eq!(result, all_blocks); + + // Last commit index should be 10. + assert_eq!(dag_state.last_commit_index(), 10); + assert_eq!(dag_state.last_committed_rounds(), last_committed_rounds); + + // Destroy the dag state. + drop(dag_state); + + // Recover the state from the store + let dag_state = DagState::new(context.clone(), store.clone()); + + // Blocks of first 5 rounds should be found in DagState. + let blocks = dag_builder.blocks(1..=5); + let block_refs = blocks + .iter() + .map(|block| block.reference()) + .collect::>(); + let result = dag_state + .get_blocks(&block_refs) + .into_iter() + .map(|b| b.unwrap()) + .collect::>(); + assert_eq!(result, blocks); + + // Blocks above round 5 should not be in DagState, because they are not flushed. + let missing_blocks = dag_builder.blocks(6..=num_rounds); + let block_refs = missing_blocks + .iter() + .map(|block| block.reference()) + .collect::>(); + let retrieved_blocks = dag_state + .get_blocks(&block_refs) + .into_iter() + .flatten() + .collect::>(); + assert!(retrieved_blocks.is_empty()); + + // Last commit index should be 5. + assert_eq!(dag_state.last_commit_index(), 5); + + // This is the last_commmit_rounds of the first 5 commits that were flushed + let expected_last_committed_rounds = vec![4, 5, 4, 4]; + assert_eq!( + dag_state.last_committed_rounds(), + expected_last_committed_rounds + ); + // Unscored subdags will be recoverd based on the flushed commits and no commit info + assert_eq!(dag_state.unscored_committed_subdags_count(), 5); + } + #[tokio::test] async fn test_flush_and_recovery() { telemetry_subscribers::init_for_testing(); @@ -1472,7 +1638,7 @@ mod test { expected_last_committed_rounds ); // Unscored subdags will be recoverd based on the flushed commits and no commit info - assert_eq!(dag_state.unscored_committed_subdags_count(), 5); + assert_eq!(dag_state.scoring_subdags_count(), 5); } #[tokio::test] diff --git a/consensus/core/src/leader_schedule.rs b/consensus/core/src/leader_schedule.rs index bbbb68fe03978..756cc9361d86b 100644 --- a/consensus/core/src/leader_schedule.rs +++ b/consensus/core/src/leader_schedule.rs @@ -16,10 +16,6 @@ use crate::{ context::Context, dag_state::DagState, leader_scoring::{ReputationScoreCalculator, ReputationScores}, - leader_scoring_strategy::{ - CertificateScoringStrategy, CertifiedVoteScoringStrategyV1, CertifiedVoteScoringStrategyV2, - ScoringStrategy, VoteScoringStrategy, - }, CommitIndex, Round, }; @@ -31,7 +27,6 @@ pub(crate) struct LeaderSchedule { pub leader_swap_table: Arc>, context: Arc, num_commits_per_schedule: u64, - scoring_strategy: Arc, } impl LeaderSchedule { @@ -48,7 +43,6 @@ impl LeaderSchedule { context, num_commits_per_schedule: Self::CONSENSUS_COMMITS_PER_SCHEDULE, leader_swap_table: Arc::new(RwLock::new(leader_swap_table)), - scoring_strategy: Self::choose_scoring_strategy(), } } @@ -72,58 +66,110 @@ impl LeaderSchedule { }, ); - tracing::info!( + if context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + tracing::info!( + "LeaderSchedule recovered using {leader_swap_table:?}. There are {} committed subdags scored in DagState.", + dag_state.read().scoring_subdags_count(), + ); + } else { + // TODO: Remove when DistributedVoteScoring is enabled. + tracing::info!( "LeaderSchedule recovered using {leader_swap_table:?}. There are {} pending unscored subdags in DagState.", dag_state.read().unscored_committed_subdags_count(), ); + } // create the schedule Self::new(context, leader_swap_table) } - // TODO: remove this once scoring strategy is finalized - fn choose_scoring_strategy() -> Arc { - if let Ok(scoring_strategy) = std::env::var("CONSENSUS_SCORING_STRATEGY") { - tracing::info!( - "Using scoring strategy {scoring_strategy} for ReputationScoreCalculator" - ); - let scoring_strategy: Arc = match scoring_strategy.as_str() { - "vote" => Arc::new(VoteScoringStrategy {}), - "certified_vote_v1" => Arc::new(CertifiedVoteScoringStrategyV1 {}), - "certified_vote_v2" => Arc::new(CertifiedVoteScoringStrategyV2 {}), - "certificate" => Arc::new(CertificateScoringStrategy {}), - _ => Arc::new(VoteScoringStrategy {}), - }; - scoring_strategy - } else { - tracing::info!("Using scoring strategy vote for ReputationScoreCalculator"); - Arc::new(VoteScoringStrategy {}) - } - } - pub(crate) fn commits_until_leader_schedule_update( &self, dag_state: Arc>, ) -> usize { - let unscored_committed_subdags_count = dag_state.read().unscored_committed_subdags_count(); + let subdag_count = if self + .context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + dag_state.read().scoring_subdags_count() as u64 + } else { + // TODO: Remove when DistributedVoteScoring is enabled. + dag_state.read().unscored_committed_subdags_count() + }; + assert!( - unscored_committed_subdags_count <= self.num_commits_per_schedule, - "Unscored committed subdags count exceeds the number of commits per schedule" + subdag_count <= self.num_commits_per_schedule, + "Committed subdags count exceeds the number of commits per schedule" ); self.num_commits_per_schedule - .checked_sub(unscored_committed_subdags_count) + .checked_sub(subdag_count) .unwrap() as usize } - /// Checks whether the dag state unscored sub dags list is empty. If yes then that means that + /// Checks whether the dag state sub dags list is empty. If yes then that means that /// either (1) the system has just started and there is no unscored sub dag available (2) the /// schedule has updated - new scores have been calculated. Both cases we consider as valid cases /// where the schedule has been updated. pub(crate) fn leader_schedule_updated(&self, dag_state: &RwLock) -> bool { - dag_state.read().unscored_committed_subdags_count() == 0 + if self + .context + .protocol_config + .consensus_distributed_vote_scoring_strategy() + { + dag_state.read().is_scoring_subdag_empty() + } else { + // TODO: Remove when DistributedVoteScoring is enabled. + dag_state.read().unscored_committed_subdags_count() == 0 + } + } + + pub(crate) fn update_leader_schedule_v2(&self, dag_state: &RwLock) { + let _s = self + .context + .metrics + .node_metrics + .scope_processing_time + .with_label_values(&["LeaderSchedule::update_leader_schedule"]) + .start_timer(); + + let (reputation_scores, last_commit_index) = { + let dag_state = dag_state.read(); + let reputation_scores = dag_state.calculate_scoring_subdag_scores(); + + let last_commit_index = dag_state.scoring_subdag_commit_range(); + + (reputation_scores, last_commit_index) + }; + + { + let mut dag_state = dag_state.write(); + // Clear scoring subdag as we have updated the leader schedule + dag_state.clear_scoring_subdag(); + // Buffer score and last commit rounds in dag state to be persisted later + dag_state.add_commit_info(reputation_scores.clone()); + } + + self.update_leader_swap_table(LeaderSwapTable::new( + self.context.clone(), + last_commit_index, + reputation_scores.clone(), + )); + + reputation_scores.update_metrics(self.context.clone()); + + self.context + .metrics + .node_metrics + .num_of_bad_nodes + .set(self.leader_swap_table.read().bad_nodes.len() as i64); } - pub(crate) fn update_leader_schedule(&self, dag_state: &RwLock) { + // TODO: Remove when DistributedVoteScoring is enabled. + pub(crate) fn update_leader_schedule_v1(&self, dag_state: &RwLock) { let _s = self .context .metrics @@ -142,12 +188,8 @@ impl LeaderSchedule { .scope_processing_time .with_label_values(&["ReputationScoreCalculator::calculate"]) .start_timer(); - let reputation_scores = ReputationScoreCalculator::new( - self.context.clone(), - &unscored_subdags, - self.scoring_strategy.as_ref(), - ) - .calculate(); + let reputation_scores = + ReputationScoreCalculator::new(self.context.clone(), &unscored_subdags).calculate(); drop(score_calculation_timer); reputation_scores.update_metrics(self.context.clone()); @@ -624,7 +666,7 @@ mod tests { ); // Leader Scoring & Schedule Change is disabled, unscored subdags should not be accumulated. - assert_eq!(0, dag_state.read().unscored_committed_subdags_count()); + assert_eq!(0, dag_state.read().scoring_subdags_count()); let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); @@ -704,10 +746,10 @@ mod tests { last_committed_rounds, dag_state.read().last_committed_rounds() ); - let actual_unscored_subdags = dag_state.read().unscored_committed_subdags(); - assert_eq!(1, dag_state.read().unscored_committed_subdags_count()); - let actual_subdag = actual_unscored_subdags[0].clone(); - assert_eq!(*subdags.last().unwrap(), actual_subdag); + assert_eq!(1, dag_state.read().scoring_subdags_count()); + let recovered_scores = dag_state.read().calculate_scoring_subdag_scores(); + let expected_scores = ReputationScores::new((11..=11).into(), vec![0, 0, 0, 0]); + assert_eq!(recovered_scores, expected_scores); let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); @@ -747,7 +789,7 @@ mod tests { expected_last_committed_rounds, dag_state.read().last_committed_rounds() ); - assert_eq!(0, dag_state.read().unscored_committed_subdags_count()); + assert_eq!(0, dag_state.read().scoring_subdags_count()); let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); @@ -771,7 +813,7 @@ mod tests { let mut dag_builder = DagBuilder::new(context.clone()); dag_builder.layers(1..=2).build(); - let mut expected_unscored_subdags = vec![]; + let mut expected_scored_subdags = vec![]; let mut expected_commits = vec![]; let leaders = dag_builder .leader_blocks(1..=2) @@ -794,7 +836,7 @@ mod tests { max(block.round(), last_committed_rounds[block.author().value()]); } expected_commits.push(commit); - expected_unscored_subdags.push(subdag); + expected_scored_subdags.push(subdag); } // The CommitInfo for the first 2 commits are written to store. 10 commits @@ -817,15 +859,13 @@ mod tests { last_committed_rounds, dag_state.read().last_committed_rounds() ); - let actual_unscored_subdags = dag_state.read().unscored_committed_subdags(); assert_eq!( - expected_unscored_subdags.len() as u64, - dag_state.read().unscored_committed_subdags_count() + expected_scored_subdags.len(), + dag_state.read().scoring_subdags_count() ); - for (idx, expected_subdag) in expected_unscored_subdags.into_iter().enumerate() { - let actual_subdag = actual_unscored_subdags[idx].clone(); - assert_eq!(expected_subdag, actual_subdag); - } + let recovered_scores = dag_state.read().calculate_scoring_subdag_scores(); + let expected_scores = ReputationScores::new((1..=2).into(), vec![0, 0, 0, 0]); + assert_eq!(recovered_scores, expected_scores); let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); @@ -852,9 +892,7 @@ mod tests { CommitRef::new(1, CommitDigest::MIN), vec![], )]; - dag_state - .write() - .add_unscored_committed_subdags(unscored_subdags); + dag_state.write().add_scoring_subdags(unscored_subdags); let commits_until_leader_schedule_update = leader_schedule.commits_until_leader_schedule_update(dag_state.clone()); @@ -953,7 +991,7 @@ mod tests { let mut dag_state_write = dag_state.write(); dag_state_write.set_last_commit(last_commit); - dag_state_write.add_unscored_committed_subdags(unscored_subdags); + dag_state_write.add_scoring_subdags(unscored_subdags); drop(dag_state_write); assert_eq!( @@ -961,7 +999,7 @@ mod tests { AuthorityIndex::new_for_test(0) ); - leader_schedule.update_leader_schedule(&dag_state); + leader_schedule.update_leader_schedule_v2(&dag_state); let leader_swap_table = leader_schedule.leader_swap_table.read(); assert_eq!(leader_swap_table.good_nodes.len(), 1); @@ -1155,4 +1193,480 @@ mod tests { // Update leader from old swap table to new invalid swap table leader_schedule.update_leader_swap_table(leader_swap_table.clone()); } + + // TODO: Remove all tests below this when DistributedVoteScoring is enabled. + #[tokio::test] + async fn test_leader_schedule_from_store_with_no_scores_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_mysticeti_leader_scoring_and_schedule_for_testing(false); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + + let leader_timestamp = context.clock.timestamp_utc_ms(); + let blocks = vec![ + VerifiedBlock::new_for_test( + TestBlock::new(10, 2) + .set_timestamp_ms(leader_timestamp) + .build(), + ), + VerifiedBlock::new_for_test(TestBlock::new(9, 0).build()), + VerifiedBlock::new_for_test(TestBlock::new(9, 2).build()), + VerifiedBlock::new_for_test(TestBlock::new(9, 3).build()), + ]; + + let leader = blocks[0].clone(); + let leader_ref = leader.reference(); + let last_commit_index = 10; + let last_commit = TrustedCommit::new_for_test( + last_commit_index, + CommitDigest::MIN, + leader_timestamp, + leader_ref, + blocks + .iter() + .map(|block| block.reference()) + .collect::>(), + ); + + // The CommitInfo for the first 10 commits are written to store. This is the + // info that LeaderSchedule will be recovered from + let committed_rounds = vec![9, 9, 10, 9]; + let commit_ref = CommitRef::new(10, CommitDigest::MIN); + let commit_info = CommitInfo { + reputation_scores: ReputationScores::default(), + committed_rounds, + }; + + store + .write( + WriteBatch::default() + .commit_info(vec![(commit_ref, commit_info)]) + .blocks(blocks) + .commits(vec![last_commit]), + ) + .unwrap(); + + // CommitIndex '11' will be written to store. This should result in the cached + // last_committed_rounds & unscored subdags in DagState to be updated with the + // latest commit information on recovery. + let leader_timestamp = context.clock.timestamp_utc_ms(); + let blocks = vec![ + VerifiedBlock::new_for_test( + TestBlock::new(11, 3) + .set_timestamp_ms(leader_timestamp) + .build(), + ), + VerifiedBlock::new_for_test(TestBlock::new(10, 0).build()), + VerifiedBlock::new_for_test(TestBlock::new(10, 1).build()), + VerifiedBlock::new_for_test(TestBlock::new(10, 3).build()), + ]; + + let leader = blocks[0].clone(); + let leader_ref = leader.reference(); + let last_commit_index = 11; + let expected_last_committed_rounds = vec![10, 10, 10, 11]; + let last_commit = TrustedCommit::new_for_test( + last_commit_index, + CommitDigest::MIN, + leader_timestamp, + leader_ref, + blocks + .iter() + .map(|block| block.reference()) + .collect::>(), + ); + store + .write( + WriteBatch::default() + .blocks(blocks) + .commits(vec![last_commit]), + ) + .unwrap(); + + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store))); + + // Check that DagState recovery from stored CommitInfo worked correctly + assert_eq!( + expected_last_committed_rounds, + dag_state.read().last_committed_rounds() + ); + + // Leader Scoring & Schedule Change is disabled, unscored subdags should not be accumulated. + assert_eq!(0, dag_state.read().unscored_committed_subdags_count()); + + let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); + + // Check that LeaderSchedule recovery from stored CommitInfo worked correctly + let leader_swap_table = leader_schedule.leader_swap_table.read(); + assert_eq!(leader_swap_table.good_nodes.len(), 0); + assert_eq!(leader_swap_table.bad_nodes.len(), 0); + } + + #[tokio::test] + async fn test_leader_schedule_from_store_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_consensus_bad_nodes_stake_threshold_for_testing(33); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + + // Populate fully connected test blocks for round 0 ~ 11, authorities 0 ~ 3. + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder.layers(1..=11).build(); + let mut subdags = vec![]; + let mut expected_commits = vec![]; + let leaders = dag_builder + .leader_blocks(1..=11) + .into_iter() + .flatten() + .collect::>(); + let mut blocks_to_write = vec![]; + + let mut last_committed_rounds = vec![0; 4]; + for (idx, leader) in leaders.into_iter().enumerate() { + let commit_index = idx as u32 + 1; + let (sub_dag, commit) = dag_builder.get_sub_dag_and_commit( + leader.clone(), + last_committed_rounds.clone(), + commit_index, + ); + for block in sub_dag.blocks.iter() { + blocks_to_write.push(block.clone()); + last_committed_rounds[block.author().value()] = + max(block.round(), last_committed_rounds[block.author().value()]); + } + + expected_commits.push(commit); + subdags.push(sub_dag); + } + + // The CommitInfo for the first 10 commits are written to store. This is the + // info that LeaderSchedule will be recovered from + let commit_range = (1..=10).into(); + let reputation_scores = ReputationScores::new(commit_range, vec![4, 1, 1, 3]); + let committed_rounds = vec![9, 9, 10, 9]; + let commit_ref = expected_commits[9].reference(); + let commit_info = CommitInfo { + reputation_scores, + committed_rounds, + }; + + // CommitIndex '11' will be written to store. This should result in the cached + // last_committed_rounds & unscored subdags in DagState to be updated with the + // latest commit information on recovery. + store + .write( + WriteBatch::default() + .commit_info(vec![(commit_ref, commit_info)]) + .blocks(blocks_to_write) + .commits(expected_commits), + ) + .unwrap(); + + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store))); + + // Check that DagState recovery from stored CommitInfo worked correctly + assert_eq!( + last_committed_rounds, + dag_state.read().last_committed_rounds() + ); + let actual_unscored_subdags = dag_state.read().unscored_committed_subdags(); + assert_eq!(1, dag_state.read().unscored_committed_subdags_count()); + let actual_subdag = actual_unscored_subdags[0].clone(); + assert_eq!(*subdags.last().unwrap(), actual_subdag); + + let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); + + // Check that LeaderSchedule recovery from stored CommitInfo worked correctly + let leader_swap_table = leader_schedule.leader_swap_table.read(); + assert_eq!(leader_swap_table.good_nodes.len(), 1); + assert_eq!( + leader_swap_table.good_nodes[0].0, + AuthorityIndex::new_for_test(0) + ); + assert_eq!(leader_swap_table.bad_nodes.len(), 1); + assert!( + leader_swap_table + .bad_nodes + .contains_key(&AuthorityIndex::new_for_test(2)), + "{:?}", + leader_swap_table.bad_nodes + ); + } + + #[tokio::test] + async fn test_leader_schedule_from_store_no_commits_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_consensus_bad_nodes_stake_threshold_for_testing(33); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store))); + + let expected_last_committed_rounds = vec![0, 0, 0, 0]; + + // Check that DagState recovery from stored CommitInfo worked correctly + assert_eq!( + expected_last_committed_rounds, + dag_state.read().last_committed_rounds() + ); + assert_eq!(0, dag_state.read().unscored_committed_subdags_count()); + + let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); + + // Check that LeaderSchedule recovery from stored CommitInfo worked correctly + let leader_swap_table = leader_schedule.leader_swap_table.read(); + assert_eq!(leader_swap_table.good_nodes.len(), 0); + assert_eq!(leader_swap_table.bad_nodes.len(), 0); + } + + #[tokio::test] + async fn test_leader_schedule_from_store_no_commit_info_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_consensus_bad_nodes_stake_threshold_for_testing(33); + let context = Arc::new(context); + let store = Arc::new(MemStore::new()); + + // Populate fully connected test blocks for round 0 ~ 2, authorities 0 ~ 3. + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder.layers(1..=2).build(); + + let mut expected_unscored_subdags = vec![]; + let mut expected_commits = vec![]; + let leaders = dag_builder + .leader_blocks(1..=2) + .into_iter() + .flatten() + .collect::>(); + let mut blocks_to_write = vec![]; + + let mut last_committed_rounds = vec![0; 4]; + for (idx, leader) in leaders.into_iter().enumerate() { + let commit_index = idx as u32 + 1; + let (subdag, commit) = dag_builder.get_sub_dag_and_commit( + leader.clone(), + last_committed_rounds.clone(), + commit_index, + ); + for block in subdag.blocks.iter() { + blocks_to_write.push(block.clone()); + last_committed_rounds[block.author().value()] = + max(block.round(), last_committed_rounds[block.author().value()]); + } + expected_commits.push(commit); + expected_unscored_subdags.push(subdag); + } + + // The CommitInfo for the first 2 commits are written to store. 10 commits + // would have been required for a leader schedule update so at this point + // no commit info should have been persisted and no leader schedule should + // be recovered. However dag state should have properly recovered the + // unscored subdags & last committed rounds. + store + .write( + WriteBatch::default() + .blocks(blocks_to_write) + .commits(expected_commits), + ) + .unwrap(); + + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store))); + + // Check that DagState recovery from stored CommitInfo worked correctly + assert_eq!( + last_committed_rounds, + dag_state.read().last_committed_rounds() + ); + let actual_unscored_subdags = dag_state.read().unscored_committed_subdags(); + assert_eq!( + expected_unscored_subdags.len() as u64, + dag_state.read().unscored_committed_subdags_count() + ); + for (idx, expected_subdag) in expected_unscored_subdags.into_iter().enumerate() { + let actual_subdag = actual_unscored_subdags[idx].clone(); + assert_eq!(expected_subdag, actual_subdag); + } + + let leader_schedule = LeaderSchedule::from_store(context.clone(), dag_state.clone()); + + // Check that LeaderSchedule recovery from stored CommitInfo worked correctly + let leader_swap_table = leader_schedule.leader_swap_table.read(); + assert_eq!(leader_swap_table.good_nodes.len(), 0); + assert_eq!(leader_swap_table.bad_nodes.len(), 0); + } + + #[tokio::test] + async fn test_leader_schedule_commits_until_leader_schedule_update_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + let context = Arc::new(context); + let leader_schedule = LeaderSchedule::new(context.clone(), LeaderSwapTable::default()); + + let dag_state = Arc::new(RwLock::new(DagState::new( + context.clone(), + Arc::new(MemStore::new()), + ))); + let unscored_subdags = vec![CommittedSubDag::new( + BlockRef::new(1, AuthorityIndex::ZERO, BlockDigest::MIN), + vec![], + context.clock.timestamp_utc_ms(), + CommitRef::new(1, CommitDigest::MIN), + vec![], + )]; + dag_state + .write() + .add_unscored_committed_subdags(unscored_subdags); + + let commits_until_leader_schedule_update = + leader_schedule.commits_until_leader_schedule_update(dag_state.clone()); + assert_eq!(commits_until_leader_schedule_update, 299); + } + + // TODO: Remove when DistributedVoteScoring is enabled. + #[tokio::test] + async fn test_leader_schedule_update_leader_schedule_with_vote_scoring() { + telemetry_subscribers::init_for_testing(); + let mut context = Context::new_for_test(4).0; + context + .protocol_config + .set_consensus_distributed_vote_scoring_strategy_for_testing(false); + context + .protocol_config + .set_consensus_bad_nodes_stake_threshold_for_testing(33); + let context = Arc::new(context); + let leader_schedule = Arc::new(LeaderSchedule::new( + context.clone(), + LeaderSwapTable::default(), + )); + let dag_state = Arc::new(RwLock::new(DagState::new( + context.clone(), + Arc::new(MemStore::new()), + ))); + + // Populate fully connected test blocks for round 0 ~ 4, authorities 0 ~ 3. + let max_round: u32 = 4; + let num_authorities: u32 = 4; + + let mut blocks = Vec::new(); + let (genesis_references, genesis): (Vec<_>, Vec<_>) = context + .committee + .authorities() + .map(|index| { + let author_idx = index.0.value() as u32; + let block = TestBlock::new(0, author_idx).build(); + VerifiedBlock::new_for_test(block) + }) + .map(|block| (block.reference(), block)) + .unzip(); + blocks.extend(genesis); + + let mut ancestors = genesis_references; + let mut leader = None; + for round in 1..=max_round { + let mut new_ancestors = vec![]; + for author in 0..num_authorities { + let base_ts = round as BlockTimestampMs * 1000; + let block = VerifiedBlock::new_for_test( + TestBlock::new(round, author) + .set_timestamp_ms(base_ts + (author + round) as u64) + .set_ancestors(ancestors.clone()) + .build(), + ); + new_ancestors.push(block.reference()); + + // Simulate referenced block which was part of another committed + // subdag. + if round == 3 && author == 0 { + tracing::info!("Skipping {block} in committed subdags blocks"); + continue; + } + + blocks.push(block.clone()); + + // only write one block for the final round, which is the leader + // of the committed subdag. + if round == max_round { + leader = Some(block.clone()); + break; + } + } + ancestors = new_ancestors; + } + + let leader_block = leader.unwrap(); + let leader_ref = leader_block.reference(); + let commit_index = 1; + + let last_commit = TrustedCommit::new_for_test( + commit_index, + CommitDigest::MIN, + context.clock.timestamp_utc_ms(), + leader_ref, + blocks + .iter() + .map(|block| block.reference()) + .collect::>(), + ); + + let unscored_subdags = vec![CommittedSubDag::new( + leader_ref, + blocks, + context.clock.timestamp_utc_ms(), + last_commit.reference(), + vec![], + )]; + + let mut dag_state_write = dag_state.write(); + dag_state_write.set_last_commit(last_commit); + dag_state_write.add_unscored_committed_subdags(unscored_subdags); + drop(dag_state_write); + + assert_eq!( + leader_schedule.elect_leader(4, 0), + AuthorityIndex::new_for_test(0) + ); + + leader_schedule.update_leader_schedule_v1(&dag_state); + + let leader_swap_table = leader_schedule.leader_swap_table.read(); + assert_eq!(leader_swap_table.good_nodes.len(), 1); + assert_eq!( + leader_swap_table.good_nodes[0].0, + AuthorityIndex::new_for_test(2) + ); + assert_eq!(leader_swap_table.bad_nodes.len(), 1); + assert!(leader_swap_table + .bad_nodes + .contains_key(&AuthorityIndex::new_for_test(0))); + assert_eq!( + leader_schedule.elect_leader(4, 0), + AuthorityIndex::new_for_test(2) + ); + } } diff --git a/consensus/core/src/leader_scoring.rs b/consensus/core/src/leader_scoring.rs index 380003debd37d..c7b151607cd01 100644 --- a/consensus/core/src/leader_scoring.rs +++ b/consensus/core/src/leader_scoring.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use std::{ - collections::{BTreeMap, HashMap}, + collections::{BTreeMap, HashSet}, fmt::Debug, ops::Bound::{Excluded, Included}, sync::Arc, @@ -12,15 +12,14 @@ use consensus_config::AuthorityIndex; use serde::{Deserialize, Serialize}; use crate::{ - block::{BlockAPI, BlockDigest, BlockRef, Slot, VerifiedBlock}, + block::{BlockAPI, BlockDigest, BlockRef, Slot}, commit::{CommitRange, CommittedSubDag}, context::Context, - leader_scoring_strategy::ScoringStrategy, stake_aggregator::{QuorumThreshold, StakeAggregator}, - Round, + Round, VerifiedBlock, }; -pub(crate) struct ReputationScoreCalculator<'a> { +pub(crate) struct ReputationScoreCalculator { // The range of commits that these scores are calculated from. pub(crate) commit_range: CommitRange, // The scores per authority. Vec index is the `AuthorityIndex`. @@ -31,19 +30,10 @@ pub(crate) struct ReputationScoreCalculator<'a> { // the subdags are then combined into one `UnscoredSubdag` so that we can // calculate the scores for the leaders in this subdag. unscored_subdag: UnscoredSubdag, - // There are multiple scoring strategies that can be used to calculate the scores - // and the `ReputationScoreCalculator` is responsible for applying the strategy. - // For now this is dynamic while we are experimenting but eventually we can - // replace this with the final strategy that works best. - scoring_strategy: &'a dyn ScoringStrategy, } -impl<'a> ReputationScoreCalculator<'a> { - pub(crate) fn new( - context: Arc, - unscored_subdags: &[CommittedSubDag], - scoring_strategy: &'a dyn ScoringStrategy, - ) -> Self { +impl ReputationScoreCalculator { + pub(crate) fn new(context: Arc, unscored_subdags: &[CommittedSubDag]) -> Self { let num_authorities = context.committee.size(); let scores_per_authority = vec![0_u64; num_authorities]; @@ -58,7 +48,6 @@ impl<'a> ReputationScoreCalculator<'a> { Self { unscored_subdag, commit_range, - scoring_strategy, scores_per_authority, } } @@ -68,10 +57,7 @@ impl<'a> ReputationScoreCalculator<'a> { for leader in leaders { let leader_slot = Slot::from(leader); tracing::trace!("Calculating score for leader {leader_slot}"); - self.add_scores( - self.scoring_strategy - .calculate_scores_for_leader(&self.unscored_subdag, leader_slot), - ); + self.add_scores(self.calculate_scores_for_leader(&self.unscored_subdag, leader_slot)); } ReputationScores::new(self.commit_range.clone(), self.scores_per_authority.clone()) @@ -84,6 +70,47 @@ impl<'a> ReputationScoreCalculator<'a> { self.scores_per_authority[authority_idx] += *score; } } + + // VoteScoringStrategy + // This scoring strategy will give one point to any votes for the leader. + fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec { + let num_authorities = subdag.context.committee.size(); + let mut scores_per_authority = vec![0_u64; num_authorities]; + + let leader_blocks = subdag.get_blocks_at_slot(leader_slot); + + if leader_blocks.is_empty() { + tracing::trace!("[{}] No block for leader slot {leader_slot} in this set of unscored committed subdags, skip scoring", subdag.context.own_index); + return scores_per_authority; + } + + // At this point we are guaranteed that there is only one leader per slot + // because we are operating on committed subdags. + assert!(leader_blocks.len() == 1); + + let leader_block = leader_blocks.first().unwrap(); + + let voting_round = leader_slot.round + 1; + let voting_blocks = subdag.get_blocks_at_round(voting_round); + for potential_vote in voting_blocks { + // TODO: use the decided leader as input instead of leader slot. If the leader was skipped, + // votes to skip should be included in the score as well. + if subdag.is_vote(&potential_vote, leader_block) { + let authority = potential_vote.author(); + tracing::trace!( + "Found a vote {} for leader {leader_block} from authority {authority}", + potential_vote.reference() + ); + tracing::trace!( + "[{}] scores +1 reputation for {authority}!", + subdag.context.own_index + ); + scores_per_authority[authority] += 1; + } + } + + scores_per_authority + } } #[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] @@ -138,6 +165,182 @@ impl ReputationScores { } } +/// ScoringSubdag represents the scoring votes in a collection of subdags across +/// multiple commits. +/// These subdags are "scoring" for the purposes of leader schedule change. As +/// new subdags are added, the DAG is traversed and votes for leaders are recorded +/// and scored along with stake. On a leader schedule change, finalized reputation +/// scores will be calculated based on the votes & stake collected in this struct. +pub(crate) struct ScoringSubdag { + pub(crate) context: Arc, + pub(crate) commit_range: Option, + // Only includes committed leaders for now. + // TODO: Include skipped leaders as well + pub(crate) leaders: HashSet, + // A map of votes to the stake of strongly linked blocks that include that vote + // Note: Inlcuding stake aggregator so that we can quickly check if it exceeds + // quourum threshold and only include those scores for certain scoring strategies. + pub(crate) votes: BTreeMap>, +} + +impl ScoringSubdag { + pub(crate) fn new(context: Arc) -> Self { + Self { + context, + commit_range: None, + leaders: HashSet::new(), + votes: BTreeMap::new(), + } + } + + pub(crate) fn add_subdags(&mut self, committed_subdags: Vec) { + let _s = self + .context + .metrics + .node_metrics + .scope_processing_time + .with_label_values(&["ScoringSubdag::add_unscored_committed_subdags"]) + .start_timer(); + for subdag in committed_subdags { + // If the commit range is not set, then set it to the range of the first + // committed subdag index. + if self.commit_range.is_none() { + self.commit_range = Some(CommitRange::new( + subdag.commit_ref.index..=subdag.commit_ref.index, + )); + } else { + let commit_range = self.commit_range.as_mut().unwrap(); + commit_range.extend_to(subdag.commit_ref.index); + } + + // Add the committed leader to the list of leaders we will be scoring. + tracing::trace!("Adding new committed leader {} for scoring", subdag.leader); + self.leaders.insert(subdag.leader); + + // Check each block in subdag. Blocks are in order so we should traverse the + // oldest blocks first + for block in subdag.blocks { + for ancestor in block.ancestors() { + // Weak links may point to blocks with lower round numbers + // than strong links. + if ancestor.round != block.round().saturating_sub(1) { + continue; + } + + // If a blocks strong linked ancestor is in leaders, then + // it's a vote for leader. + if self.leaders.contains(ancestor) { + // There should never be duplicate references to blocks + // with strong linked ancestors to leader. + tracing::trace!( + "Found a vote {} for leader {ancestor} from authority {}", + block.reference(), + block.author() + ); + assert!(self + .votes + .insert(block.reference(), StakeAggregator::new()) + .is_none(), "Vote {block} already exists. Duplicate vote found for leader {ancestor}"); + } + + if let Some(stake) = self.votes.get_mut(ancestor) { + // Vote is strongly linked to a future block, so we + // consider this a distributed vote. + tracing::trace!( + "Found a distributed vote {ancestor} from authority {}", + ancestor.author + ); + stake.add(block.author(), &self.context.committee); + } + } + } + } + } + + // Iterate through votes and calculate scores for each authority based on + // scoring strategy that is used. (Vote or CertifiedVote) + pub(crate) fn calculate_scores(&self) -> ReputationScores { + let _s = self + .context + .metrics + .node_metrics + .scope_processing_time + .with_label_values(&["ScoringSubdag::calculate_scores"]) + .start_timer(); + + let scores_per_authority = self.score_distributed_votes(); + + // TODO: Normalize scores + ReputationScores::new( + self.commit_range + .clone() + .expect("CommitRange should be set if calculate_scores is called."), + scores_per_authority, + ) + } + + /// This scoring strategy aims to give scores based on overall vote distribution. + /// Instead of only giving one point for each vote that is included in 2f+1 + /// blocks. We give a score equal to the amount of stake of all blocks that + /// included the vote. + fn score_distributed_votes(&self) -> Vec { + let num_authorities = self.context.committee.size(); + let mut scores_per_authority = vec![0_u64; num_authorities]; + + for (vote, stake_agg) in self.votes.iter() { + let authority = vote.author; + let stake = stake_agg.stake(); + tracing::trace!( + "[{}] scores +{stake} reputation for {authority}!", + self.context.own_index, + ); + scores_per_authority[authority.value()] += stake; + } + scores_per_authority + } + + /// This scoring strategy gives points equal to the amount of stake in blocks + /// that include the authority's vote, if that amount of total_stake > 2f+1. + /// We consider this a certified vote. + // TODO: This will be used for ancestor selection + #[allow(unused)] + fn score_certified_votes(&self) -> Vec { + let num_authorities = self.context.committee.size(); + let mut scores_per_authority = vec![0_u64; num_authorities]; + + for (vote, stake_agg) in self.votes.iter() { + let authority = vote.author; + if stake_agg.reached_threshold(&self.context.committee) { + let stake = stake_agg.stake(); + tracing::trace!( + "[{}] scores +{stake} reputation for {authority}!", + self.context.own_index, + ); + scores_per_authority[authority.value()] += stake; + } + } + scores_per_authority + } + + pub(crate) fn scored_subdags_count(&self) -> usize { + if let Some(commit_range) = &self.commit_range { + commit_range.size() + } else { + 0 + } + } + + pub(crate) fn is_empty(&self) -> bool { + self.leaders.is_empty() && self.votes.is_empty() && self.commit_range.is_none() + } + + pub(crate) fn clear(&mut self) { + self.leaders.clear(); + self.votes.clear(); + self.commit_range = None; + } +} + /// UnscoredSubdag represents a collection of subdags across multiple commits. /// These subdags are considered unscored for the purposes of leader schedule /// change. On a leader schedule change, reputation scores will be calculated @@ -239,44 +442,6 @@ impl UnscoredSubdag { self.find_supported_leader_block(leader_slot, potential_vote) == Some(reference) } - pub(crate) fn is_certificate( - &self, - potential_certificate: &VerifiedBlock, - leader_block: &VerifiedBlock, - all_votes: &mut HashMap, - ) -> bool { - let mut votes_stake_aggregator = StakeAggregator::::new(); - for reference in potential_certificate.ancestors() { - let is_vote = if let Some(is_vote) = all_votes.get(reference) { - *is_vote - } else if let Some(potential_vote) = self.get_block(reference) { - let is_vote = self.is_vote(&potential_vote, leader_block); - all_votes.insert(*reference, is_vote); - is_vote - } else { - tracing::trace!( - "Potential vote not found in unscored committed subdags: {:?}", - reference - ); - false - }; - - if is_vote { - tracing::trace!("{reference} is a vote for {leader_block}"); - if votes_stake_aggregator.add(reference.author, &self.context.committee) { - tracing::trace!( - "{potential_certificate} is a certificate for leader {leader_block}" - ); - return true; - } - } else { - tracing::trace!("{reference} is not a vote for {leader_block}",); - } - } - tracing::trace!("{potential_certificate} is not a certificate for leader {leader_block}"); - false - } - pub(crate) fn get_blocks_at_slot(&self, slot: Slot) -> Vec { let mut blocks = vec![]; for (_block_ref, block) in self.blocks.range(( @@ -313,8 +478,7 @@ mod tests { use std::cmp::max; use super::*; - use crate::commit::{CommitDigest, CommitRef}; - use crate::{leader_scoring_strategy::VoteScoringStrategy, test_dag_builder::DagBuilder}; + use crate::{test_dag_builder::DagBuilder, CommitDigest, CommitRef}; #[tokio::test] async fn test_reputation_scores_authorities_by_score() { @@ -368,6 +532,99 @@ mod tests { ); } + #[tokio::test] + async fn test_scoring_subdag() { + telemetry_subscribers::init_for_testing(); + let context = Arc::new(Context::new_for_test(4).0); + + // Populate fully connected test blocks for round 0 ~ 3, authorities 0 ~ 3. + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder.layers(1..=3).build(); + // Build round 4 but with just the leader block + dag_builder + .layer(4) + .authorities(vec![ + AuthorityIndex::new_for_test(1), + AuthorityIndex::new_for_test(2), + AuthorityIndex::new_for_test(3), + ]) + .skip_block() + .build(); + + let leaders = dag_builder + .leader_blocks(1..=4) + .into_iter() + .flatten() + .collect::>(); + + let mut scoring_subdag = ScoringSubdag::new(context.clone()); + let mut last_committed_rounds = vec![0; 4]; + for (idx, leader) in leaders.into_iter().enumerate() { + let commit_index = idx as u32 + 1; + let (subdag, _commit) = dag_builder.get_sub_dag_and_commit( + leader, + last_committed_rounds.clone(), + commit_index, + ); + for block in subdag.blocks.iter() { + last_committed_rounds[block.author().value()] = + max(block.round(), last_committed_rounds[block.author().value()]); + } + scoring_subdag.add_subdags(vec![subdag]); + } + + let scores = scoring_subdag.calculate_scores(); + assert_eq!(scores.scores_per_authority, vec![5, 5, 5, 5]); + assert_eq!(scores.commit_range, (1..=4).into()); + } + + #[tokio::test] + async fn test_certified_vote_scoring_subdag() { + telemetry_subscribers::init_for_testing(); + let context = Arc::new(Context::new_for_test(4).0); + + // Populate fully connected test blocks for round 0 ~ 3, authorities 0 ~ 3. + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder.layers(1..=3).build(); + // Build round 4 but with just the leader block + dag_builder + .layer(4) + .authorities(vec![ + AuthorityIndex::new_for_test(1), + AuthorityIndex::new_for_test(2), + AuthorityIndex::new_for_test(3), + ]) + .skip_block() + .build(); + + let leaders = dag_builder + .leader_blocks(1..=4) + .into_iter() + .flatten() + .collect::>(); + + let mut scoring_subdag = ScoringSubdag::new(context.clone()); + let mut last_committed_rounds = vec![0; 4]; + for (idx, leader) in leaders.into_iter().enumerate() { + let commit_index = idx as u32 + 1; + let (subdag, _commit) = dag_builder.get_sub_dag_and_commit( + leader, + last_committed_rounds.clone(), + commit_index, + ); + for block in subdag.blocks.iter() { + last_committed_rounds[block.author().value()] = + max(block.round(), last_committed_rounds[block.author().value()]); + } + scoring_subdag.add_subdags(vec![subdag]); + } + + let scores_per_authority = scoring_subdag.score_certified_votes(); + assert_eq!(scores_per_authority, vec![4, 4, 4, 4]); + assert_eq!(scoring_subdag.commit_range.unwrap(), (1..=4).into()); + } + + // TODO: Remove all tests below this when DistributedVoteScoring is enabled. #[tokio::test] async fn test_reputation_score_calculator() { telemetry_subscribers::init_for_testing(); @@ -408,9 +665,7 @@ mod tests { } unscored_subdags.push(subdag); } - let scoring_strategy = VoteScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); + let mut calculator = ReputationScoreCalculator::new(context.clone(), &unscored_subdags); let scores = calculator.calculate(); assert_eq!(scores.scores_per_authority, vec![3, 2, 2, 2]); assert_eq!(scores.commit_range, (1..=4).into()); @@ -423,9 +678,7 @@ mod tests { let context = Arc::new(Context::new_for_test(4).0); let unscored_subdags = vec![]; - let scoring_strategy = VoteScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); + let mut calculator = ReputationScoreCalculator::new(context.clone(), &unscored_subdags); calculator.calculate(); } @@ -443,9 +696,7 @@ mod tests { CommitRef::new(1, CommitDigest::MIN), vec![], )]; - let scoring_strategy = VoteScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); + let mut calculator = ReputationScoreCalculator::new(context.clone(), &unscored_subdags); calculator.calculate(); } @@ -498,9 +749,7 @@ mod tests { unscored_subdags.push(subdag); } - let scoring_strategy = VoteScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); + let mut calculator = ReputationScoreCalculator::new(context.clone(), &unscored_subdags); let scores = calculator.calculate(); assert_eq!(scores.scores_per_authority, vec![3, 2, 2, 2]); assert_eq!(scores.commit_range, (1..=4).into()); diff --git a/consensus/core/src/leader_scoring_strategy.rs b/consensus/core/src/leader_scoring_strategy.rs deleted file mode 100644 index 640e3667cd71c..0000000000000 --- a/consensus/core/src/leader_scoring_strategy.rs +++ /dev/null @@ -1,373 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -use std::{collections::HashMap, ops::Range}; - -use crate::{ - block::{BlockAPI, BlockRef, Slot}, - commit::DEFAULT_WAVE_LENGTH, - leader_scoring::UnscoredSubdag, - stake_aggregator::{QuorumThreshold, StakeAggregator}, -}; - -#[allow(unused)] -pub(crate) trait ScoringStrategy: Send + Sync { - fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec; - - // Based on the scoring strategy there is a minimum number of rounds required - // for the scores to be calculated. This method allows that to be set by the - // scoring strategy. - fn leader_scoring_round_range(&self, min_round: u32, max_round: u32) -> Range; -} - -/// This scoring strategy is like `CertifiedVoteScoringStrategyV1` but instead of -/// only giving one point for each vote that is included in 2f+1 certificates. We -/// give a score equal to the amount of stake of all certificates that included -/// the vote. -pub(crate) struct CertifiedVoteScoringStrategyV2 {} - -impl ScoringStrategy for CertifiedVoteScoringStrategyV2 { - fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec { - let num_authorities = subdag.context.committee.size(); - let mut scores_per_authority = vec![0_u64; num_authorities]; - - let decision_round = leader_slot.round + DEFAULT_WAVE_LENGTH - 1; - - let leader_blocks = subdag.get_blocks_at_slot(leader_slot); - - if leader_blocks.is_empty() { - tracing::trace!("[{}] No block for leader slot {leader_slot} in this set of unscored committed subdags, skip scoring", subdag.context.own_index); - return scores_per_authority; - } - - // At this point we are guaranteed that there is only one leader per slot - // because we are operating on committed subdags. - assert!(leader_blocks.len() == 1); - - let leader_block = leader_blocks.first().unwrap(); - - let decision_blocks = subdag.get_blocks_at_round(decision_round); - - let mut all_votes: HashMap)> = - HashMap::new(); - for potential_cert in decision_blocks { - let authority = potential_cert.reference().author; - for reference in potential_cert.ancestors() { - if let Some((is_vote, stake_agg)) = all_votes.get_mut(reference) { - if *is_vote { - stake_agg.add(authority, &subdag.context.committee); - } - } else if let Some(potential_vote) = subdag.get_block(reference) { - let is_vote = subdag.is_vote(&potential_vote, leader_block); - let mut stake_agg = StakeAggregator::::new(); - stake_agg.add(authority, &subdag.context.committee); - all_votes.insert(*reference, (is_vote, stake_agg)); - } else { - tracing::trace!( - "Potential vote not found in unscored committed subdags: {:?}", - reference - ); - }; - } - } - - for (vote_ref, (is_vote, stake_agg)) in all_votes { - if is_vote { - let authority = vote_ref.author; - tracing::trace!( - "Found a certified vote {vote_ref} for leader {leader_block} from authority {authority}" - ); - tracing::trace!( - "[{}] scores +{} reputation for {authority}!", - subdag.context.own_index, - stake_agg.stake() - ); - scores_per_authority[authority] += stake_agg.stake(); - } - } - - scores_per_authority - } - - fn leader_scoring_round_range(&self, min_round: u32, max_round: u32) -> Range { - // To be able to calculate scores using certified votes we require +1 round - // for the votes on the leader and +1 round for the certificates of those votes. - assert!(min_round < max_round - 1); - min_round..max_round.saturating_sub(1) - } -} - -/// This scoring strategy gives one point for each authority vote that is included -/// in 2f+1 certificates. We are calling this a certified vote. -pub(crate) struct CertifiedVoteScoringStrategyV1 {} - -impl ScoringStrategy for CertifiedVoteScoringStrategyV1 { - fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec { - let num_authorities = subdag.context.committee.size(); - let mut scores_per_authority = vec![0_u64; num_authorities]; - - let decision_round = leader_slot.round + DEFAULT_WAVE_LENGTH - 1; - - let leader_blocks = subdag.get_blocks_at_slot(leader_slot); - - if leader_blocks.is_empty() { - tracing::trace!("[{}] No block for leader slot {leader_slot} in this set of unscored committed subdags, skip scoring", subdag.context.own_index); - return scores_per_authority; - } - - // At this point we are guaranteed that there is only one leader per slot - // because we are operating on committed subdags. - assert!(leader_blocks.len() == 1); - - let leader_block = leader_blocks.first().unwrap(); - - let decision_blocks = subdag.get_blocks_at_round(decision_round); - - let mut all_votes: HashMap)> = - HashMap::new(); - for potential_cert in decision_blocks { - let authority = potential_cert.reference().author; - for reference in potential_cert.ancestors() { - if let Some((is_vote, stake_agg)) = all_votes.get_mut(reference) { - if *is_vote { - stake_agg.add(authority, &subdag.context.committee); - } - } else if let Some(potential_vote) = subdag.get_block(reference) { - let is_vote = subdag.is_vote(&potential_vote, leader_block); - let mut stake_agg = StakeAggregator::::new(); - stake_agg.add(authority, &subdag.context.committee); - all_votes.insert(*reference, (is_vote, stake_agg)); - } else { - tracing::trace!( - "Potential vote not found in unscored committed subdags: {:?}", - reference - ); - }; - } - } - - for (vote_ref, (is_vote, stake_agg)) in all_votes { - if is_vote && stake_agg.reached_threshold(&subdag.context.committee) { - let authority = vote_ref.author; - tracing::trace!( - "Found a certified vote {vote_ref} for leader {leader_block} from authority {authority}" - ); - tracing::trace!( - "[{}] scores +1 reputation for {authority}!", - subdag.context.own_index - ); - scores_per_authority[authority] += 1; - } - } - - scores_per_authority - } - - fn leader_scoring_round_range(&self, min_round: u32, max_round: u32) -> Range { - // To be able to calculate scores using certified votes we require +1 round - // for the votes on the leader and +1 round for the certificates of those votes. - assert!(min_round < max_round - 1); - min_round..max_round.saturating_sub(1) - } -} - -// This scoring strategy will give one point to any votes for the leader. -pub(crate) struct VoteScoringStrategy {} - -impl ScoringStrategy for VoteScoringStrategy { - fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec { - let num_authorities = subdag.context.committee.size(); - let mut scores_per_authority = vec![0_u64; num_authorities]; - - let leader_blocks = subdag.get_blocks_at_slot(leader_slot); - - if leader_blocks.is_empty() { - tracing::trace!("[{}] No block for leader slot {leader_slot} in this set of unscored committed subdags, skip scoring", subdag.context.own_index); - return scores_per_authority; - } - - // At this point we are guaranteed that there is only one leader per slot - // because we are operating on committed subdags. - assert!(leader_blocks.len() == 1); - - let leader_block = leader_blocks.first().unwrap(); - - let voting_round = leader_slot.round + 1; - let voting_blocks = subdag.get_blocks_at_round(voting_round); - for potential_vote in voting_blocks { - // TODO: use the decided leader as input instead of leader slot. If the leader was skipped, - // votes to skip should be included in the score as well. - if subdag.is_vote(&potential_vote, leader_block) { - let authority = potential_vote.author(); - tracing::trace!( - "Found a vote {} for leader {leader_block} from authority {authority}", - potential_vote.reference() - ); - tracing::trace!( - "[{}] scores +1 reputation for {authority}!", - subdag.context.own_index - ); - scores_per_authority[authority] += 1; - } - } - - scores_per_authority - } - - fn leader_scoring_round_range(&self, min_round: u32, max_round: u32) -> Range { - // To be able to calculate scores using votes we require +1 round - // for the votes on the leader. - assert!(min_round < max_round); - min_round..max_round - } -} - -// This scoring strategy will give one point to any certificates for the leader. -pub(crate) struct CertificateScoringStrategy {} - -impl ScoringStrategy for CertificateScoringStrategy { - fn calculate_scores_for_leader(&self, subdag: &UnscoredSubdag, leader_slot: Slot) -> Vec { - let num_authorities = subdag.context.committee.size(); - let mut scores_per_authority = vec![0_u64; num_authorities]; - - let decision_round = leader_slot.round + DEFAULT_WAVE_LENGTH - 1; - - let leader_blocks = subdag.get_blocks_at_slot(leader_slot); - - if leader_blocks.is_empty() { - tracing::trace!("[{}] No block for leader slot {leader_slot} in this set of unscored committed subdags, skip scoring", subdag.context.own_index); - return scores_per_authority; - } - - // At this point we are guaranteed that there is only one leader per slot - // because we are operating on committed subdags. - assert!(leader_blocks.len() == 1); - - let leader_block = leader_blocks.first().unwrap(); - - let decision_blocks = subdag.get_blocks_at_round(decision_round); - let mut all_votes = HashMap::new(); - for potential_cert in decision_blocks { - let authority = potential_cert.reference().author; - if subdag.is_certificate(&potential_cert, leader_block, &mut all_votes) { - tracing::trace!( - "Found a certificate {} for leader {leader_block} from authority {authority}", - potential_cert.reference() - ); - tracing::trace!( - "[{}] scores +1 reputation for {authority}!", - subdag.context.own_index - ); - scores_per_authority[authority] += 1; - } - } - - scores_per_authority - } - - fn leader_scoring_round_range(&self, min_round: u32, max_round: u32) -> Range { - // To be able to calculate scores using certificates we require +1 round - // for the votes on the leader and +1 round for the certificates of those votes. - assert!(min_round < max_round - 1); - min_round..max_round.saturating_sub(1) - } -} - -#[cfg(test)] -mod tests { - use std::{cmp::max, sync::Arc}; - - use consensus_config::AuthorityIndex; - - use super::*; - use crate::{ - commit::CommittedSubDag, context::Context, leader_scoring::ReputationScoreCalculator, - test_dag_builder::DagBuilder, - }; - - #[tokio::test] - async fn test_certificate_scoring_strategy() { - let (context, unscored_subdags) = basic_setup(); - let scoring_strategy = CertificateScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); - let scores = calculator.calculate(); - assert_eq!(scores.scores_per_authority, vec![2, 1, 1, 1]); - assert_eq!(scores.commit_range, (1..=4).into()); - } - - #[tokio::test] - async fn test_vote_scoring_strategy() { - let (context, unscored_subdags) = basic_setup(); - let scoring_strategy = VoteScoringStrategy {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); - let scores = calculator.calculate(); - assert_eq!(scores.scores_per_authority, vec![3, 2, 2, 2]); - assert_eq!(scores.commit_range, (1..=4).into()); - } - - #[tokio::test] - async fn test_certified_vote_scoring_strategy_v1() { - let (context, unscored_subdags) = basic_setup(); - let scoring_strategy = CertifiedVoteScoringStrategyV1 {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); - let scores = calculator.calculate(); - assert_eq!(scores.scores_per_authority, vec![1, 1, 1, 1]); - assert_eq!(scores.commit_range, (1..=4).into()); - } - - #[tokio::test] - async fn test_certified_vote_scoring_strategy_v2() { - let (context, unscored_subdags) = basic_setup(); - let scoring_strategy = CertifiedVoteScoringStrategyV2 {}; - let mut calculator = - ReputationScoreCalculator::new(context.clone(), &unscored_subdags, &scoring_strategy); - let scores = calculator.calculate(); - assert_eq!(scores.scores_per_authority, vec![5, 5, 5, 5]); - assert_eq!(scores.commit_range, (1..=4).into()); - } - - fn basic_setup() -> (Arc, Vec) { - telemetry_subscribers::init_for_testing(); - let context = Arc::new(Context::new_for_test(4).0); - - // Populate fully connected test blocks for round 0 ~ 3, authorities 0 ~ 3. - let mut dag_builder = DagBuilder::new(context.clone()); - dag_builder.layers(1..=3).build(); - // Build round 4 but with just the leader block - dag_builder - .layer(4) - .authorities(vec![ - AuthorityIndex::new_for_test(1), - AuthorityIndex::new_for_test(2), - AuthorityIndex::new_for_test(3), - ]) - .skip_block() - .build(); - - let leaders = dag_builder - .leader_blocks(1..=4) - .into_iter() - .flatten() - .collect::>(); - - let mut unscored_subdags = vec![]; - let mut last_committed_rounds = vec![0; 4]; - for (idx, leader) in leaders.into_iter().enumerate() { - let commit_index = idx as u32 + 1; - let (subdag, _commit) = dag_builder.get_sub_dag_and_commit( - leader, - last_committed_rounds.clone(), - commit_index, - ); - for block in subdag.blocks.iter() { - last_committed_rounds[block.author().value()] = - max(block.round(), last_committed_rounds[block.author().value()]); - } - unscored_subdags.push(subdag); - } - (context, unscored_subdags) - } -} diff --git a/consensus/core/src/leader_timeout.rs b/consensus/core/src/leader_timeout.rs index d79b23ef9b7e0..4c2c15b1bbc55 100644 --- a/consensus/core/src/leader_timeout.rs +++ b/consensus/core/src/leader_timeout.rs @@ -167,12 +167,21 @@ mod tests { todo!() } - fn set_consumer_availability(&self, _available: bool) -> Result<(), CoreError> { + fn set_subscriber_exists(&self, _exists: bool) -> Result<(), CoreError> { todo!() } + + fn set_propagation_delay(&self, _delay: Round) -> Result<(), CoreError> { + todo!() + } + fn set_last_known_proposed_round(&self, _round: Round) -> Result<(), CoreError> { todo!() } + + fn highest_received_rounds(&self) -> Vec { + todo!() + } } #[tokio::test(flavor = "current_thread", start_paused = true)] diff --git a/consensus/core/src/lib.rs b/consensus/core/src/lib.rs index 4c3cc17ca965c..b7597853dbd2a 100644 --- a/consensus/core/src/lib.rs +++ b/consensus/core/src/lib.rs @@ -20,7 +20,6 @@ mod dag_state; mod error; mod leader_schedule; mod leader_scoring; -mod leader_scoring_strategy; mod leader_timeout; mod linearizer; mod metrics; @@ -36,6 +35,7 @@ mod universal_committer; #[cfg(test)] #[path = "tests/randomized_tests.rs"] mod randomized_tests; +mod round_prober; #[cfg(test)] mod test_dag; #[cfg(test)] @@ -45,7 +45,7 @@ mod test_dag_parser; /// Exported consensus API. pub use authority_node::ConsensusAuthority; -pub use block::{BlockAPI, Round}; +pub use block::{BlockAPI, Round, TransactionIndex}; pub use commit::{CommitDigest, CommitIndex, CommitRef, CommittedSubDag}; pub use commit_consumer::{CommitConsumer, CommitConsumerMonitor}; pub use transaction::{ClientError, TransactionClient, TransactionVerifier, ValidationError}; diff --git a/consensus/core/src/linearizer.rs b/consensus/core/src/linearizer.rs index 1267a6e579116..f9fffd6e659f5 100644 --- a/consensus/core/src/linearizer.rs +++ b/consensus/core/src/linearizer.rs @@ -6,12 +6,11 @@ use std::{collections::HashSet, sync::Arc}; use consensus_config::AuthorityIndex; use parking_lot::RwLock; -use crate::commit::sort_sub_dag_blocks; -use crate::leader_schedule::LeaderSchedule; use crate::{ block::{BlockAPI, VerifiedBlock}, - commit::{Commit, CommittedSubDag, TrustedCommit}, + commit::{sort_sub_dag_blocks, Commit, CommittedSubDag, TrustedCommit}, dag_state::DagState, + leader_schedule::LeaderSchedule, }; /// Expand a committed sequence of leader into a sequence of sub-dags. @@ -262,11 +261,80 @@ mod tests { // Create some commits let commits = linearizer.handle_commit(leaders.clone()); + // Write them in DagState + dag_state.write().add_scoring_subdags(commits); + + // Now update the leader schedule + leader_schedule.update_leader_schedule_v2(&dag_state); + + assert!( + leader_schedule.leader_schedule_updated(&dag_state), + "Leader schedule should have been updated" + ); + + // Try to commit now the rest of the 10 leaders + let leaders = dag_builder + .leader_blocks(11..=20) + .into_iter() + .map(Option::unwrap) + .collect::>(); + + // Now on the commits only the first one should contain the updated scores, the other should be empty + let commits = linearizer.handle_commit(leaders.clone()); + assert_eq!(commits.len(), 10); + let scores = vec![ + (AuthorityIndex::new_for_test(1), 29), + (AuthorityIndex::new_for_test(0), 29), + (AuthorityIndex::new_for_test(3), 29), + (AuthorityIndex::new_for_test(2), 29), + ]; + assert_eq!(commits[0].reputation_scores_desc, scores); + + for commit in commits.into_iter().skip(1) { + assert_eq!(commit.reputation_scores_desc, vec![]); + } + } + + // TODO: Remove when DistributedVoteScoring is enabled. + #[tokio::test] + async fn test_handle_commit_with_schedule_update_with_unscored_subdags() { + telemetry_subscribers::init_for_testing(); + let num_authorities = 4; + let context = Arc::new(Context::new_for_test(num_authorities).0); + let dag_state = Arc::new(RwLock::new(DagState::new( + context.clone(), + Arc::new(MemStore::new()), + ))); + const NUM_OF_COMMITS_PER_SCHEDULE: u64 = 10; + let leader_schedule = Arc::new( + LeaderSchedule::new(context.clone(), LeaderSwapTable::default()) + .with_num_commits_per_schedule(NUM_OF_COMMITS_PER_SCHEDULE), + ); + let mut linearizer = Linearizer::new(dag_state.clone(), leader_schedule.clone()); + + // Populate fully connected test blocks for round 0 ~ 20, authorities 0 ~ 3. + let num_rounds: u32 = 20; + let mut dag_builder = DagBuilder::new(context.clone()); + dag_builder + .layers(1..=num_rounds) + .build() + .persist_layers(dag_state.clone()); + + // Take the first 10 leaders + let leaders = dag_builder + .leader_blocks(1..=10) + .into_iter() + .map(Option::unwrap) + .collect::>(); + + // Create some commits + let commits = linearizer.handle_commit(leaders.clone()); + // Write them in DagState dag_state.write().add_unscored_committed_subdags(commits); // Now update the leader schedule - leader_schedule.update_leader_schedule(&dag_state); + leader_schedule.update_leader_schedule_v1(&dag_state); assert!( leader_schedule.leader_schedule_updated(&dag_state), diff --git a/consensus/core/src/metrics.rs b/consensus/core/src/metrics.rs index 8d9f60b179b6b..25a1a1013f2e4 100644 --- a/consensus/core/src/metrics.rs +++ b/consensus/core/src/metrics.rs @@ -113,6 +113,7 @@ pub(crate) struct NodeMetrics { pub(crate) core_add_blocks_batch_size: Histogram, pub(crate) core_lock_dequeued: IntCounter, pub(crate) core_lock_enqueued: IntCounter, + pub(crate) core_skipped_proposals: IntCounterVec, pub(crate) highest_accepted_authority_round: IntGaugeVec, pub(crate) highest_accepted_round: IntGauge, pub(crate) accepted_blocks: IntCounterVec, @@ -123,6 +124,7 @@ pub(crate) struct NodeMetrics { pub(crate) fetch_blocks_scheduler_inflight: IntGauge, pub(crate) fetch_blocks_scheduler_skipped: IntCounterVec, pub(crate) synchronizer_fetched_blocks_by_peer: IntCounterVec, + pub(crate) synchronizer_missing_blocks_by_authority: IntCounterVec, pub(crate) synchronizer_fetched_blocks_by_authority: IntCounterVec, pub(crate) invalid_blocks: IntCounterVec, pub(crate) rejected_blocks: IntCounterVec, @@ -151,6 +153,8 @@ pub(crate) struct NodeMetrics { pub(crate) block_manager_suspended_blocks: IntGauge, pub(crate) block_manager_missing_ancestors: IntGauge, pub(crate) block_manager_missing_blocks: IntGauge, + pub(crate) block_manager_missing_blocks_by_authority: IntCounterVec, + pub(crate) block_manager_missing_ancestors_by_authority: IntCounterVec, pub(crate) threshold_clock_round: IntGauge, pub(crate) subscriber_connection_attempts: IntCounterVec, pub(crate) subscriber_connections: IntGaugeVec, @@ -168,6 +172,10 @@ pub(crate) struct NodeMetrics { pub(crate) commit_sync_fetch_loop_latency: Histogram, pub(crate) commit_sync_fetch_once_latency: Histogram, pub(crate) commit_sync_fetch_once_errors: IntCounterVec, + pub(crate) round_prober_quorum_round_gaps: IntGaugeVec, + pub(crate) round_prober_propagation_delays: Histogram, + pub(crate) round_prober_last_propagation_delay: IntGauge, + pub(crate) round_prober_request_errors: IntCounter, pub(crate) uptime: Histogram, } @@ -269,10 +277,16 @@ impl NodeMetrics { "Number of enqueued core requests", registry, ).unwrap(), + core_skipped_proposals: register_int_counter_vec_with_registry!( + "core_skipped_proposals", + "Number of proposals skipped in the Core, per reason", + &["reason"], + registry, + ).unwrap(), highest_accepted_authority_round: register_int_gauge_vec_with_registry!( "highest_accepted_authority_round", - "The highest round where a block has been accepted by author. Resets on restart.", - &["author"], + "The highest round where a block has been accepted per authority. Resets on restart.", + &["authority"], registry, ).unwrap(), highest_accepted_round: register_int_gauge_with_registry!( @@ -330,6 +344,12 @@ impl NodeMetrics { &["authority", "type"], registry, ).unwrap(), + synchronizer_missing_blocks_by_authority: register_int_counter_vec_with_registry!( + "synchronizer_missing_blocks_by_authority", + "Number of missing blocks per block author, as observed by the synchronizer during periodic sync.", + &["authority"], + registry, + ).unwrap(), last_known_own_block_round: register_int_gauge_with_registry!( "last_known_own_block_round", "The highest round of our own block as this has been synced from peers during an amnesia recovery", @@ -481,6 +501,18 @@ impl NodeMetrics { "The number of blocks missing content tracked in the block manager", registry, ).unwrap(), + block_manager_missing_blocks_by_authority: register_int_counter_vec_with_registry!( + "block_manager_missing_blocks_by_authority", + "The number of new missing blocks by block authority", + &["authority"], + registry, + ).unwrap(), + block_manager_missing_ancestors_by_authority: register_int_counter_vec_with_registry!( + "block_manager_missing_ancestors_by_authority", + "The number of missing ancestors by ancestor authority across received blocks", + &["authority"], + registry, + ).unwrap(), threshold_clock_round: register_int_gauge_with_registry!( "threshold_clock_round", "The current threshold clock round. We only advance to a new round when a quorum of parents have been synced.", @@ -572,6 +604,28 @@ impl NodeMetrics { &["error"], registry ).unwrap(), + round_prober_quorum_round_gaps: register_int_gauge_vec_with_registry!( + "round_prober_quorum_round_gaps", + "Round gaps among peers for blocks proposed from each authority", + &["authority"], + registry + ).unwrap(), + round_prober_propagation_delays: register_histogram_with_registry!( + "round_prober_propagation_delays", + "Round gaps between the last proposed block round and the lower bound of own quorum round", + NUM_BUCKETS.to_vec(), + registry + ).unwrap(), + round_prober_last_propagation_delay: register_int_gauge_with_registry!( + "round_prober_last_propagation_delay", + "Most recent propagation delay observed by RoundProber", + registry + ).unwrap(), + round_prober_request_errors: register_int_counter_with_registry!( + "round_prober_request_errors", + "Number of timeouts when probing against peers", + registry + ).unwrap(), uptime: register_histogram_with_registry!( "uptime", "Total node uptime", diff --git a/consensus/core/src/network/anemo_network.rs b/consensus/core/src/network/anemo_network.rs index 6a156691cd375..9ffbad76857d4 100644 --- a/consensus/core/src/network/anemo_network.rs +++ b/consensus/core/src/network/anemo_network.rs @@ -233,6 +233,29 @@ impl NetworkClient for AnemoClient { let body = response.into_body(); Ok(body.blocks) } + + async fn get_latest_rounds( + &self, + peer: AuthorityIndex, + timeout: Duration, + ) -> ConsensusResult> { + let mut client = self.get_client(peer, timeout).await?; + let request = GetLatestRoundsRequest {}; + let response = client + .get_latest_rounds(anemo::Request::new(request).with_timeout(timeout)) + .await + .map_err(|e: Status| { + if e.status() == StatusCode::RequestTimeout { + ConsensusError::NetworkRequestTimeout(format!( + "get_latest_rounds timeout: {e:?}" + )) + } else { + ConsensusError::NetworkRequest(format!("get_latest_rounds failed: {e:?}")) + } + })?; + let body = response.into_body(); + Ok(body.highest_received) + } } /// Proxies Anemo requests to NetworkService with actual handler implementation. @@ -267,7 +290,7 @@ impl ConsensusRpc for AnemoServiceProxy { "peer_id not found", )); }; - let index = self.peer_map.get(peer_id).ok_or_else(|| { + let index = *self.peer_map.get(peer_id).ok_or_else(|| { anemo::rpc::Status::new_with_message( anemo::types::response::StatusCode::BadRequest, "peer not found", @@ -275,7 +298,7 @@ impl ConsensusRpc for AnemoServiceProxy { })?; let block = request.into_body().block; self.service - .handle_send_block(*index, block) + .handle_send_block(index, block) .await .map_err(|e| { anemo::rpc::Status::new_with_message( @@ -296,7 +319,7 @@ impl ConsensusRpc for AnemoServiceProxy { "peer_id not found", )); }; - let index = self.peer_map.get(peer_id).ok_or_else(|| { + let index = *self.peer_map.get(peer_id).ok_or_else(|| { anemo::rpc::Status::new_with_message( anemo::types::response::StatusCode::BadRequest, "peer not found", @@ -319,7 +342,7 @@ impl ConsensusRpc for AnemoServiceProxy { let blocks = self .service - .handle_fetch_blocks(*index, block_refs, highest_accepted_rounds) + .handle_fetch_blocks(index, block_refs, highest_accepted_rounds) .await .map_err(|e| { anemo::rpc::Status::new_with_message( @@ -340,7 +363,7 @@ impl ConsensusRpc for AnemoServiceProxy { "peer_id not found", )); }; - let index = self.peer_map.get(peer_id).ok_or_else(|| { + let index = *self.peer_map.get(peer_id).ok_or_else(|| { anemo::rpc::Status::new_with_message( anemo::types::response::StatusCode::BadRequest, "peer not found", @@ -349,7 +372,7 @@ impl ConsensusRpc for AnemoServiceProxy { let request = request.into_body(); let (commits, certifier_blocks) = self .service - .handle_fetch_commits(*index, (request.start..=request.end).into()) + .handle_fetch_commits(index, (request.start..=request.end).into()) .await .map_err(|e| { anemo::rpc::Status::new_with_message( @@ -381,7 +404,7 @@ impl ConsensusRpc for AnemoServiceProxy { "peer_id not found", )); }; - let index = self.peer_map.get(peer_id).ok_or_else(|| { + let index = *self.peer_map.get(peer_id).ok_or_else(|| { anemo::rpc::Status::new_with_message( anemo::types::response::StatusCode::BadRequest, "peer not found", @@ -390,7 +413,7 @@ impl ConsensusRpc for AnemoServiceProxy { let body = request.into_body(); let blocks = self .service - .handle_fetch_latest_blocks(*index, body.authorities) + .handle_fetch_latest_blocks(index, body.authorities) .await .map_err(|e| { anemo::rpc::Status::new_with_message( @@ -400,6 +423,35 @@ impl ConsensusRpc for AnemoServiceProxy { })?; Ok(Response::new(FetchLatestBlocksResponse { blocks })) } + + async fn get_latest_rounds( + &self, + request: anemo::Request, + ) -> Result, anemo::rpc::Status> { + let Some(peer_id) = request.peer_id() else { + return Err(anemo::rpc::Status::new_with_message( + anemo::types::response::StatusCode::BadRequest, + "peer_id not found", + )); + }; + let index = *self.peer_map.get(peer_id).ok_or_else(|| { + anemo::rpc::Status::new_with_message( + anemo::types::response::StatusCode::BadRequest, + "peer not found", + ) + })?; + let highest_received = self + .service + .handle_get_latest_rounds(index) + .await + .map_err(|e| { + anemo::rpc::Status::new_with_message( + anemo::types::response::StatusCode::InternalServerError, + format!("{e}"), + ) + })?; + Ok(Response::new(GetLatestRoundsResponse { highest_received })) + } } /// Manages the lifecycle of Anemo network. Typical usage during initialization: @@ -725,3 +777,12 @@ pub(crate) struct FetchLatestBlocksResponse { // Serialized SignedBlocks. blocks: Vec, } + +#[derive(Clone, Serialize, Deserialize)] +pub(crate) struct GetLatestRoundsRequest {} + +#[derive(Clone, Serialize, Deserialize)] +pub(crate) struct GetLatestRoundsResponse { + // Highest received round per authority. + highest_received: Vec, +} diff --git a/consensus/core/src/network/mod.rs b/consensus/core/src/network/mod.rs index 673adbc963e0a..a533222353204 100644 --- a/consensus/core/src/network/mod.rs +++ b/consensus/core/src/network/mod.rs @@ -114,6 +114,13 @@ pub(crate) trait NetworkClient: Send + Sync + Sized + 'static { authorities: Vec, timeout: Duration, ) -> ConsensusResult>; + + /// Gets the latest received rounds of all authorities from the peer. + async fn get_latest_rounds( + &self, + peer: AuthorityIndex, + timeout: Duration, + ) -> ConsensusResult>; } /// Network service for handling requests from peers. @@ -157,6 +164,9 @@ pub(crate) trait NetworkService: Send + Sync + 'static { peer: AuthorityIndex, authorities: Vec, ) -> ConsensusResult>; + + /// Handles the request to get the latest received rounds of all authorities. + async fn handle_get_latest_rounds(&self, peer: AuthorityIndex) -> ConsensusResult>; } /// An `AuthorityNode` holds a `NetworkManager` until shutdown. diff --git a/consensus/core/src/network/test_network.rs b/consensus/core/src/network/test_network.rs index ad0e6cfd25b22..f736b01ac27cd 100644 --- a/consensus/core/src/network/test_network.rs +++ b/consensus/core/src/network/test_network.rs @@ -91,4 +91,8 @@ impl NetworkService for Mutex { ) -> ConsensusResult> { unimplemented!("Unimplemented") } + + async fn handle_get_latest_rounds(&self, _peer: AuthorityIndex) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } diff --git a/consensus/core/src/network/tonic_network.rs b/consensus/core/src/network/tonic_network.rs index 656813aca6f18..701f93ef33e83 100644 --- a/consensus/core/src/network/tonic_network.rs +++ b/consensus/core/src/network/tonic_network.rs @@ -320,6 +320,20 @@ impl NetworkClient for TonicClient { } Ok(blocks) } + + async fn get_latest_rounds( + &self, + peer: AuthorityIndex, + timeout: Duration, + ) -> ConsensusResult> { + let mut client = self.get_client(peer, timeout).await?; + let mut request = Request::new(GetLatestRoundsRequest {}); + request.set_timeout(timeout); + let response = client.get_latest_rounds(request).await.map_err(|e| { + ConsensusError::NetworkRequest(format!("get_latest_rounds failed: {e:?}")) + })?; + Ok(response.into_inner().highest_received) + } } // Tonic channel wrapped with layers. @@ -616,6 +630,25 @@ impl ConsensusService for TonicServiceProxy { let stream = iter(responses); Ok(Response::new(stream)) } + + async fn get_latest_rounds( + &self, + request: Request, + ) -> Result, tonic::Status> { + let Some(peer_index) = request + .extensions() + .get::() + .map(|p| p.authority_index) + else { + return Err(tonic::Status::internal("PeerInfo not found")); + }; + let highest_received = self + .service + .handle_get_latest_rounds(peer_index) + .await + .map_err(|e| tonic::Status::internal(format!("{e:?}")))?; + Ok(Response::new(GetLatestRoundsResponse { highest_received })) + } } /// Manages the lifecycle of Tonic network client and service. Typical usage during initialization: @@ -1143,6 +1176,16 @@ pub(crate) struct FetchLatestBlocksResponse { blocks: Vec, } +#[derive(Clone, prost::Message)] +pub(crate) struct GetLatestRoundsRequest {} + +#[derive(Clone, prost::Message)] +pub(crate) struct GetLatestRoundsResponse { + // Highest received round per authority. + #[prost(uint32, repeated, tag = "1")] + highest_received: Vec, +} + fn chunk_blocks(blocks: Vec, chunk_limit: usize) -> Vec> { let mut chunks = vec![]; let mut chunk = vec![]; diff --git a/consensus/core/src/round_prober.rs b/consensus/core/src/round_prober.rs new file mode 100644 index 0000000000000..ea29b6345d7c3 --- /dev/null +++ b/consensus/core/src/round_prober.rs @@ -0,0 +1,540 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +//! RoundProber periodically checks each peer for the latest rounds they received from others. +//! This provides insight into how effectively each authority's blocks are propagated across +//! the network. +//! +//! Unlike inferring accepted rounds from the DAG of each block, RoundProber has the benefit that +//! it remains active even when peers are not proposing. This makes it essential for determining +//! when to disable optimizations that improve DAG quality but may compromise liveness. +//! +//! RoundProber's data source is the `highest_received_rounds` tracked by the CoreThreadDispatcher. +//! These rounds are updated after blocks are verified but before checking for dependencies. +//! This should make the values more indicative of how well authorities propagate blocks, and less +//! influenced by the quality of ancestors in the proposed blocks. + +use std::{sync::Arc, time::Duration}; + +use consensus_config::{AuthorityIndex, Committee}; +use futures::stream::{FuturesUnordered, StreamExt as _}; +use mysten_common::sync::notify_once::NotifyOnce; +use mysten_metrics::monitored_scope; +use parking_lot::RwLock; +use tokio::{task::JoinHandle, time::MissedTickBehavior}; + +use crate::{ + context::Context, core_thread::CoreThreadDispatcher, dag_state::DagState, + network::NetworkClient, BlockAPI as _, Round, +}; + +/// A [`QuorumRound`] is a round range [low, high]. It is computed from +/// highest received rounds of an authority reported by all authorities. +/// The bounds represent: +/// - the highest round lower or equal to rounds from a quorum (low) +/// - the lowest round higher or equal to rounds from a quorum (high) +/// +/// [`QuorumRound`] is useful because: +/// - [low, high] range is BFT, always between the lowest and highest rounds +/// of honest validators, with < validity threshold of malicious stake. +/// - It provides signals about how well blocks from an authority propagates +/// in the network. If low bound for an authority is lower than its last +/// proposed round, the last proposed block has not propagated to a quorum. +/// If a new block is proposed from the authority, it will not get accepted +/// immediately by a quorum. +pub(crate) type QuorumRound = (Round, Round); + +// Handle to control the RoundProber loop and read latest round gaps. +pub(crate) struct RoundProberHandle { + prober_task: JoinHandle<()>, + shutdown_notify: Arc, +} + +impl RoundProberHandle { + pub(crate) async fn stop(self) { + let _ = self.shutdown_notify.notify(); + // Do not abort prober task, which waits for requests to be cancelled. + if let Err(e) = self.prober_task.await { + if e.is_panic() { + std::panic::resume_unwind(e.into_panic()); + } + } + } +} + +pub(crate) struct RoundProber { + context: Arc, + core_thread_dispatcher: Arc, + dag_state: Arc>, + network_client: Arc, + shutdown_notify: Arc, +} + +impl RoundProber { + pub(crate) fn new( + context: Arc, + core_thread_dispatcher: Arc, + dag_state: Arc>, + network_client: Arc, + ) -> Self { + Self { + context, + core_thread_dispatcher, + dag_state, + network_client, + shutdown_notify: Arc::new(NotifyOnce::new()), + } + } + + pub(crate) fn start(self) -> RoundProberHandle { + let shutdown_notify = self.shutdown_notify.clone(); + let loop_shutdown_notify = shutdown_notify.clone(); + let prober_task = tokio::spawn(async move { + // With 200 validators, this would result in 200 * 4 * 200 / 2 = 80KB of additional + // bandwidth usage per sec. We can consider using adaptive intervals, for example + // 10s by default but reduced to 2s when the propagation delay is higher. + let mut interval = tokio::time::interval(Duration::from_millis( + self.context.parameters.round_prober_interval_ms, + )); + interval.set_missed_tick_behavior(MissedTickBehavior::Delay); + loop { + tokio::select! { + _ = interval.tick() => { + self.probe().await; + } + _ = loop_shutdown_notify.wait() => { + break; + } + } + } + }); + RoundProberHandle { + prober_task, + shutdown_notify, + } + } + + // Probes each peer for the latest rounds they received from others. + // Returns the quorum round for each authority, and the propagation delay + // of own blocks. + pub(crate) async fn probe(&self) -> (Vec, Round) { + let _scope = monitored_scope("RoundProber"); + + let request_timeout = + Duration::from_millis(self.context.parameters.round_prober_request_timeout_ms); + let own_index = self.context.own_index; + let last_proposed_round = self + .dag_state + .read() + .get_last_block_for_authority(own_index) + .round(); + + let mut requests = FuturesUnordered::new(); + for (peer, _) in self.context.committee.authorities() { + if peer == own_index { + continue; + } + let network_client = self.network_client.clone(); + requests.push(async move { + let result = tokio::time::timeout( + request_timeout, + network_client.get_latest_rounds(peer, request_timeout), + ) + .await; + (peer, result) + }); + } + + let mut highest_received_rounds = + vec![vec![0; self.context.committee.size()]; self.context.committee.size()]; + highest_received_rounds[own_index] = self.core_thread_dispatcher.highest_received_rounds(); + highest_received_rounds[own_index][own_index] = last_proposed_round; + loop { + tokio::select! { + result = requests.next() => { + let Some((peer, result)) = result else { + break; + }; + match result { + Ok(Ok(rounds)) => { + if rounds.len() == self.context.committee.size() { + highest_received_rounds[peer] = rounds; + } else { + self.context.metrics.node_metrics.round_prober_request_errors.inc(); + tracing::warn!("Received invalid number of rounds from peer {}", peer); + } + }, + // When a request fails, the highest received rounds from that authority will be 0 + // for the subsequent computations. + // For propagation delay, this behavior is desirable because the computed delay + // increases as this authority has more difficulty communicating with peers. Logic + // triggered by high delay should usually be triggered with frequent probing failures + // as well. + // For quorum rounds computed for peer, this means the values should be used for + // positive signals (peer A can propagate its blocks well) rather than negative signals + // (peer A cannot propagate its blocks well). It can be difficult to distinguish between + // own probing failures and actual propagation issues. + Ok(Err(err)) => { + self.context.metrics.node_metrics.round_prober_request_errors.inc(); + tracing::warn!("Failed to get latest rounds from peer {}: {:?}", peer, err); + }, + Err(_) => { + self.context.metrics.node_metrics.round_prober_request_errors.inc(); + tracing::warn!("Timeout while getting latest rounds from peer {}", peer); + }, + } + } + _ = self.shutdown_notify.wait() => { + break; + } + } + } + + let quorum_rounds: Vec<_> = self + .context + .committee + .authorities() + .map(|(peer, _)| { + compute_quorum_round(&self.context.committee, peer, &highest_received_rounds) + }) + .collect(); + for ((low, high), (_, authority)) in quorum_rounds + .iter() + .zip(self.context.committee.authorities()) + { + self.context + .metrics + .node_metrics + .round_prober_quorum_round_gaps + .with_label_values(&[&authority.hostname]) + .set((high - low) as i64); + } + // TODO: consider using own quorum round gap to control proposing in addition to + // propagation delay. For now they seem to be about the same. + + // It is possible more blocks arrive at a quorum of peers before the get_latest_rounds + // requests arrive. + // Using the lower bound to increase sensitivity about block propagation issues + // that can reduce round rate. + // Because of the nature of TCP and block streaming, propagation delay is expected to be + // 0 in most cases, even when the actual latency of broadcasting blocks is high. + let propagation_delay = last_proposed_round.saturating_sub(quorum_rounds[own_index].0); + self.context + .metrics + .node_metrics + .round_prober_propagation_delays + .observe(propagation_delay as f64); + self.context + .metrics + .node_metrics + .round_prober_last_propagation_delay + .set(propagation_delay as i64); + if let Err(e) = self + .core_thread_dispatcher + .set_propagation_delay(propagation_delay) + { + tracing::warn!( + "Failed to set propagation delay {propagation_delay} on Core: {:?}", + e + ); + } + + (quorum_rounds, propagation_delay) + } +} + +/// For the peer specified with target_index, compute and return its [`QuorumRound`]. +fn compute_quorum_round( + committee: &Committee, + target_index: AuthorityIndex, + highest_received_rounds: &[Vec], +) -> QuorumRound { + let mut rounds_with_stake = highest_received_rounds + .iter() + .zip(committee.authorities()) + .map(|(rounds, (_, authority))| (rounds[target_index], authority.stake)) + .collect::>(); + rounds_with_stake.sort(); + + // Forward iteration and stopping at validity threshold would produce the same result currently, + // with fault tolerance of f/3f+1 votes. But it is not semantically correct, and will provide an + // incorrect value when fault tolerance and validity threshold are different. + let mut total_stake = 0; + let mut low = 0; + for (round, stake) in rounds_with_stake.iter().rev() { + let reached_quorum_before = total_stake >= committee.quorum_threshold(); + total_stake += stake; + if !reached_quorum_before && total_stake >= committee.quorum_threshold() { + low = *round; + break; + } + } + + let mut total_stake = 0; + let mut high = 0; + for (round, stake) in rounds_with_stake.iter() { + let reached_quorum_before = total_stake >= committee.quorum_threshold(); + total_stake += stake; + if !reached_quorum_before && total_stake >= committee.quorum_threshold() { + high = *round; + break; + } + } + + (low, high) +} + +#[cfg(test)] +mod test { + use std::{collections::BTreeSet, sync::Arc, time::Duration}; + + use async_trait::async_trait; + use bytes::Bytes; + use consensus_config::AuthorityIndex; + use parking_lot::{Mutex, RwLock}; + + use crate::{ + block::BlockRef, + commit::CommitRange, + context::Context, + core_thread::{CoreError, CoreThreadDispatcher}, + dag_state::DagState, + error::{ConsensusError, ConsensusResult}, + network::{BlockStream, NetworkClient}, + round_prober::{compute_quorum_round, RoundProber}, + storage::mem_store::MemStore, + Round, TestBlock, VerifiedBlock, + }; + + struct FakeThreadDispatcher { + highest_received_rounds: Vec, + propagation_delay: Mutex, + } + + impl FakeThreadDispatcher { + fn new(highest_received_rounds: Vec) -> Self { + Self { + highest_received_rounds, + propagation_delay: Mutex::new(0), + } + } + + fn propagation_delay(&self) -> Round { + *self.propagation_delay.lock() + } + } + + #[async_trait] + impl CoreThreadDispatcher for FakeThreadDispatcher { + async fn add_blocks( + &self, + _blocks: Vec, + ) -> Result, CoreError> { + unimplemented!() + } + + async fn new_block(&self, _round: Round, _force: bool) -> Result<(), CoreError> { + unimplemented!() + } + + async fn get_missing_blocks(&self) -> Result, CoreError> { + unimplemented!() + } + + fn set_subscriber_exists(&self, _exists: bool) -> Result<(), CoreError> { + unimplemented!() + } + + fn set_propagation_delay(&self, delay: Round) -> Result<(), CoreError> { + let mut propagation_delay = self.propagation_delay.lock(); + *propagation_delay = delay; + Ok(()) + } + + fn set_last_known_proposed_round(&self, _round: Round) -> Result<(), CoreError> { + unimplemented!() + } + + fn highest_received_rounds(&self) -> Vec { + self.highest_received_rounds.clone() + } + } + + struct FakeNetworkClient { + highest_received_rounds: Vec>, + } + + impl FakeNetworkClient { + fn new(highest_received_rounds: Vec>) -> Self { + Self { + highest_received_rounds, + } + } + } + + #[async_trait] + #[async_trait::async_trait] + impl NetworkClient for FakeNetworkClient { + const SUPPORT_STREAMING: bool = true; + + async fn send_block( + &self, + _peer: AuthorityIndex, + _serialized_block: &VerifiedBlock, + _timeout: Duration, + ) -> ConsensusResult<()> { + unimplemented!("Unimplemented") + } + + async fn subscribe_blocks( + &self, + _peer: AuthorityIndex, + _last_received: Round, + _timeout: Duration, + ) -> ConsensusResult { + unimplemented!("Unimplemented") + } + + async fn fetch_blocks( + &self, + _peer: AuthorityIndex, + _block_refs: Vec, + _highest_accepted_rounds: Vec, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } + + async fn fetch_commits( + &self, + _peer: AuthorityIndex, + _commit_range: CommitRange, + _timeout: Duration, + ) -> ConsensusResult<(Vec, Vec)> { + unimplemented!("Unimplemented") + } + + async fn fetch_latest_blocks( + &self, + _peer: AuthorityIndex, + _authorities: Vec, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } + + async fn get_latest_rounds( + &self, + peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + let rounds = self.highest_received_rounds[peer].clone(); + if rounds.is_empty() { + Err(ConsensusError::NetworkRequestTimeout("test".to_string())) + } else { + Ok(rounds) + } + } + } + + #[tokio::test] + async fn test_round_prober() { + const NUM_AUTHORITIES: usize = 7; + let context = Arc::new(Context::new_for_test(NUM_AUTHORITIES).0); + let core_thread_dispatcher = Arc::new(FakeThreadDispatcher::new(vec![ + 110, 120, 130, 140, 150, 160, 170, + ])); + let store = Arc::new(MemStore::new()); + let dag_state = Arc::new(RwLock::new(DagState::new(context.clone(), store))); + // Have some peers return error or incorrect number of rounds. + let network_client = Arc::new(FakeNetworkClient::new(vec![ + vec![], + vec![109, 121, 131, 0, 151, 161, 171], + vec![101, 0, 103, 104, 105, 166, 107], + vec![], + vec![100, 102, 133, 0, 155, 106, 177], + vec![105, 115, 103, 0, 125, 126, 127], + vec![10, 20, 30, 40, 50, 60], + ])); + let prober = RoundProber::new( + context.clone(), + core_thread_dispatcher.clone(), + dag_state.clone(), + network_client.clone(), + ); + + // Fake last proposed round to be 110. + let block = VerifiedBlock::new_for_test(TestBlock::new(110, 0).build()); + dag_state.write().accept_block(block); + + // Compute quorum rounds and propagation delay based on last proposed round = 110, + // and highest received rounds: + // 110, 120, 130, 140, 150, 160, 170, + // 109, 121, 131, 0, 151, 161, 171, + // 101, 0, 103, 104, 105, 166, 107, + // 0, 0, 0, 0, 0, 0, 0, + // 100, 102, 133, 0, 155, 106, 177, + // 105, 115, 103, 0, 125, 126, 127, + // 0, 0, 0, 0, 0, 0, 0, + + let (quorum_rounds, propagation_delay) = prober.probe().await; + + assert_eq!( + quorum_rounds, + vec![ + (100, 105), + (0, 115), + (103, 130), + (0, 0), + (105, 150), + (106, 160), + (107, 170) + ] + ); + + // 110 - 100 = 10 + assert_eq!(propagation_delay, 10); + assert_eq!(core_thread_dispatcher.propagation_delay(), 10); + } + + #[tokio::test] + async fn test_compute_quorum_round() { + let (context, _) = Context::new_for_test(4); + + // Observe latest rounds from peers. + let highest_received_rounds = vec![ + vec![10, 11, 12, 13], + vec![5, 2, 7, 4], + vec![0, 0, 0, 0], + vec![3, 4, 5, 6], + ]; + + let round = compute_quorum_round( + &context.committee, + AuthorityIndex::new_for_test(0), + &highest_received_rounds, + ); + assert_eq!(round, (3, 5)); + + let round = compute_quorum_round( + &context.committee, + AuthorityIndex::new_for_test(1), + &highest_received_rounds, + ); + assert_eq!(round, (2, 4)); + + let round = compute_quorum_round( + &context.committee, + AuthorityIndex::new_for_test(2), + &highest_received_rounds, + ); + assert_eq!(round, (5, 7)); + + let round = compute_quorum_round( + &context.committee, + AuthorityIndex::new_for_test(3), + &highest_received_rounds, + ); + assert_eq!(round, (4, 6)); + } +} diff --git a/consensus/core/src/subscriber.rs b/consensus/core/src/subscriber.rs index 7b350debf313c..f43fe20b4581d 100644 --- a/consensus/core/src/subscriber.rs +++ b/consensus/core/src/subscriber.rs @@ -293,6 +293,14 @@ mod test { ) -> ConsensusResult> { unimplemented!("Unimplemented") } + + async fn get_latest_rounds( + &self, + _peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } #[tokio::test(flavor = "current_thread", start_paused = true)] diff --git a/consensus/core/src/synchronizer.rs b/consensus/core/src/synchronizer.rs index 09193b7d06742..cf97dc8b40559 100644 --- a/consensus/core/src/synchronizer.rs +++ b/consensus/core/src/synchronizer.rs @@ -16,8 +16,7 @@ use mysten_metrics::{ monitored_scope, }; use parking_lot::{Mutex, RwLock}; -#[cfg(not(test))] -use rand::{prelude::SliceRandom, rngs::ThreadRng}; +use rand::{prelude::SliceRandom as _, rngs::ThreadRng}; use sui_macros::fail_point_async; use tap::TapFallible; use tokio::{ @@ -862,6 +861,7 @@ impl Synchronizer Synchronizer Synchronizer>(); + let mut missing_blocks_per_authority = vec![0; context.committee.size()]; + for block in &missing_blocks { + missing_blocks_per_authority[block.author] += 1; + } + for (missing, (_, authority)) in missing_blocks_per_authority + .into_iter() + .zip(context.committee.authorities()) + { + context + .metrics + .node_metrics + .synchronizer_missing_blocks_by_authority + .with_label_values(&[&authority.hostname]) + .inc_by(missing as u64); + } - #[allow(unused_mut)] let mut peers = context .committee .authorities() @@ -922,11 +935,9 @@ impl Synchronizer>(); // TODO: probably inject the RNG to allow unit testing - this is a work around for now. - cfg_if::cfg_if! { - if #[cfg(not(test))] { - // Shuffle the peers - peers.shuffle(&mut ThreadRng::default()); - } + if cfg!(not(test)) { + // Shuffle the peers + peers.shuffle(&mut ThreadRng::default()); } let mut peers = peers.into_iter(); @@ -1170,6 +1181,14 @@ mod tests { Ok(serialised) } + + async fn get_latest_rounds( + &self, + _peer: AuthorityIndex, + _timeout: Duration, + ) -> ConsensusResult> { + unimplemented!("Unimplemented") + } } #[test] diff --git a/consensus/core/src/transaction.rs b/consensus/core/src/transaction.rs index f0977a5453788..fcf9903bbbd2d 100644 --- a/consensus/core/src/transaction.rs +++ b/consensus/core/src/transaction.rs @@ -10,7 +10,7 @@ use tokio::sync::oneshot; use tracing::{error, warn}; use crate::{ - block::{BlockRef, Transaction}, + block::{BlockRef, Transaction, TransactionIndex}, context::Context, }; @@ -211,8 +211,15 @@ impl TransactionClient { /// `TransactionVerifier` implementation is supplied by Sui to validate transactions in a block, /// before acceptance of the block. pub trait TransactionVerifier: Send + Sync + 'static { - /// Determines if this batch can be voted on + /// Determines if this batch of transactions is valid. + /// Fails if any one of the transactions is invalid. fn verify_batch(&self, batch: &[&[u8]]) -> Result<(), ValidationError>; + + /// Returns indices of transactions to reject. + /// Currently only uncertified user transactions can be rejected. + /// The rest of transactions are implicitly voted to accept. + // TODO: add rejection reasons, add VoteError and wrap the return in Result<>. + fn vote_batch(&self, batch: &[&[u8]]) -> Vec; } #[derive(Debug, Error)] @@ -229,6 +236,10 @@ impl TransactionVerifier for NoopTransactionVerifier { fn verify_batch(&self, _batch: &[&[u8]]) -> Result<(), ValidationError> { Ok(()) } + + fn vote_batch(&self, _batch: &[&[u8]]) -> Vec { + vec![] + } } #[cfg(test)] diff --git a/crates/mysten-common/Cargo.toml b/crates/mysten-common/Cargo.toml index 0c8951b79adf2..22c5b718ea598 100644 --- a/crates/mysten-common/Cargo.toml +++ b/crates/mysten-common/Cargo.toml @@ -7,6 +7,15 @@ edition = "2021" publish = false [dependencies] +snap.workspace = true tokio.workspace = true futures.workspace = true parking_lot.workspace = true +reqwest.workspace = true +fastcrypto.workspace = true +mysten-metrics.workspace = true +sui-tls.workspace = true +sui-types.workspace = true +tracing.workspace = true +prometheus.workspace = true +anyhow.workspace = true diff --git a/crates/mysten-common/src/lib.rs b/crates/mysten-common/src/lib.rs index 3c4df66b8a4a0..7ed00de71fbc4 100644 --- a/crates/mysten-common/src/lib.rs +++ b/crates/mysten-common/src/lib.rs @@ -1,4 +1,5 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +pub mod metrics; pub mod sync; diff --git a/crates/mysten-common/src/metrics.rs b/crates/mysten-common/src/metrics.rs new file mode 100644 index 0000000000000..05043de3c9f90 --- /dev/null +++ b/crates/mysten-common/src/metrics.rs @@ -0,0 +1,97 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use mysten_metrics::RegistryService; +use prometheus::Encoder; +use std::time::{SystemTime, UNIX_EPOCH}; +use tracing::{debug, error, info}; + +pub struct MetricsPushClient { + certificate: std::sync::Arc, + client: reqwest::Client, +} + +impl MetricsPushClient { + pub fn new(metrics_key: sui_types::crypto::NetworkKeyPair) -> Self { + use fastcrypto::traits::KeyPair; + let certificate = std::sync::Arc::new(sui_tls::SelfSignedCertificate::new( + metrics_key.private(), + sui_tls::SUI_VALIDATOR_SERVER_NAME, + )); + let identity = certificate.reqwest_identity(); + let client = reqwest::Client::builder() + .identity(identity) + .build() + .unwrap(); + + Self { + certificate, + client, + } + } + + pub fn certificate(&self) -> &sui_tls::SelfSignedCertificate { + &self.certificate + } + + pub fn client(&self) -> &reqwest::Client { + &self.client + } +} + +pub async fn push_metrics( + client: &MetricsPushClient, + url: &reqwest::Url, + registry: &RegistryService, +) -> Result<(), anyhow::Error> { + info!(push_url =% url, "pushing metrics to remote"); + + // now represents a collection timestamp for all of the metrics we send to the proxy + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis() as i64; + + let mut metric_families = registry.gather_all(); + for mf in metric_families.iter_mut() { + for m in mf.mut_metric() { + m.set_timestamp_ms(now); + } + } + + let mut buf: Vec = vec![]; + let encoder = prometheus::ProtobufEncoder::new(); + encoder.encode(&metric_families, &mut buf)?; + + let mut s = snap::raw::Encoder::new(); + let compressed = s.compress_vec(&buf).map_err(|err| { + error!("unable to snappy encode; {err}"); + err + })?; + + let response = client + .client() + .post(url.to_owned()) + .header(reqwest::header::CONTENT_ENCODING, "snappy") + .header(reqwest::header::CONTENT_TYPE, prometheus::PROTOBUF_FORMAT) + .body(compressed) + .send() + .await?; + + if !response.status().is_success() { + let status = response.status(); + let body = match response.text().await { + Ok(body) => body, + Err(error) => format!("couldn't decode response body; {error}"), + }; + return Err(anyhow::anyhow!( + "metrics push failed: [{}]:{}", + status, + body + )); + } + + debug!("successfully pushed metrics to {url}"); + + Ok(()) +} diff --git a/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum.exp b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum.exp index 4c3435ef46f66..5e576af8aa286 100644 --- a/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum.exp +++ b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum.exp @@ -18,5 +18,5 @@ gas summary: computation_cost: 1000000, storage_cost: 988000, storage_rebate: 0 task 3, lines 40-41: //# programmable --sender A //> test::m::x3() -Error: Transaction Effects Status: Move Bytecode Verification Error. Please run the Bytecode Verifier for more information. -Execution Error: ExecutionError: ExecutionError { inner: ExecutionErrorInner { kind: VMVerificationOrDeserializationError, source: Some(VMError { major_status: TOO_MANY_TYPE_NODES, sub_status: None, message: None, exec_state: None, location: Undefined, indices: [], offsets: [] }), command: Some(0) } } +mutated: object(0,0) +gas summary: computation_cost: 1000000, storage_cost: 988000, storage_rebate: 978120, non_refundable_storage_fee: 9880 diff --git a/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.exp b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.exp new file mode 100644 index 0000000000000..4c3435ef46f66 --- /dev/null +++ b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.exp @@ -0,0 +1,22 @@ +processed 4 tasks + +init: +A: object(0,0) + +task 1, lines 8-35: +//# publish +created: object(1,0) +mutated: object(0,1) +gas summary: computation_cost: 1000000, storage_cost: 6452400, storage_rebate: 0, non_refundable_storage_fee: 0 + +task 2, lines 37-38: +//# programmable --sender A +//> test::m::x1() +mutated: object(0,0) +gas summary: computation_cost: 1000000, storage_cost: 988000, storage_rebate: 0, non_refundable_storage_fee: 0 + +task 3, lines 40-41: +//# programmable --sender A +//> test::m::x3() +Error: Transaction Effects Status: Move Bytecode Verification Error. Please run the Bytecode Verifier for more information. +Execution Error: ExecutionError: ExecutionError { inner: ExecutionErrorInner { kind: VMVerificationOrDeserializationError, source: Some(VMError { major_status: TOO_MANY_TYPE_NODES, sub_status: None, message: None, exec_state: None, location: Undefined, indices: [], offsets: [] }), command: Some(0) } } diff --git a/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.move b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.move new file mode 100644 index 0000000000000..b266f17ae52c0 --- /dev/null +++ b/crates/sui-adapter-transactional-tests/tests/entry_points/large_enum_v58.move @@ -0,0 +1,41 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +// tests error after serializing a large enum return value + +//# init --addresses test=0x0 --accounts A --protocol-version 58 + +//# publish + +module test::m { + +public enum X1 has drop { + Big1(u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8), +} + +public enum X2 has drop { + V1(X1, X1, X1), + V2(X1, X1, X1), + V3(X1, X1, X1), +} + +public enum X3 has drop { + X2(X2, X2, X2), + U64(u64), +} + +entry fun x1(): X1 { + X1::Big1(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0) +} + +entry fun x3(): X3 { + X3::U64(0) +} + +} + +//# programmable --sender A +//> test::m::x1() + +//# programmable --sender A +//> test::m::x3() diff --git a/crates/sui-benchmark/src/workloads/adversarial.rs b/crates/sui-benchmark/src/workloads/adversarial.rs index 970e98baf9d86..f4d871d79bf40 100644 --- a/crates/sui-benchmark/src/workloads/adversarial.rs +++ b/crates/sui-benchmark/src/workloads/adversarial.rs @@ -15,7 +15,6 @@ use crate::ProgrammableTransactionBuilder; use crate::{convert_move_call_args, BenchMoveCallArg, ExecutionEffects, ValidatorProxy}; use anyhow::anyhow; use async_trait::async_trait; -use itertools::Itertools; use move_core_types::identifier::Identifier; use rand::distributions::{Distribution, Standard}; use rand::Rng; @@ -144,7 +143,7 @@ impl FromStr for AdversarialPayloadCfg { if !re.is_match(s) { return Err(anyhow!("invalid load config")); }; - let toks = s.split('-').collect_vec(); + let toks = s.split('-').collect::>(); let payload_type = AdversarialPayloadType::from_str(toks[0])?; let load_factor = toks[1].parse::().unwrap(); diff --git a/crates/sui-bridge-cli/src/main.rs b/crates/sui-bridge-cli/src/main.rs index 9cc8a29015adc..0919dfe6838e8 100644 --- a/crates/sui-bridge-cli/src/main.rs +++ b/crates/sui-bridge-cli/src/main.rs @@ -15,6 +15,7 @@ use std::time::Duration; use sui_bridge::client::bridge_authority_aggregator::BridgeAuthorityAggregator; use sui_bridge::crypto::{BridgeAuthorityPublicKey, BridgeAuthorityPublicKeyBytes}; use sui_bridge::eth_transaction_builder::build_eth_transaction; +use sui_bridge::metrics::BridgeMetrics; use sui_bridge::sui_client::SuiClient; use sui_bridge::sui_transaction_builder::build_sui_transaction; use sui_bridge::types::BridgeActionType; @@ -80,7 +81,9 @@ async fn main() -> anyhow::Result<()> { println!("Chain ID: {:?}", chain_id); let config = BridgeCliConfig::load(config_path).expect("Couldn't load BridgeCliConfig"); let config = LoadedBridgeCliConfig::load(config).await?; - let sui_bridge_client = SuiClient::::new(&config.sui_rpc_url).await?; + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_bridge_client = + SuiClient::::new(&config.sui_rpc_url, metrics).await?; let (sui_key, sui_address, gas_object_ref) = config .get_sui_account_info() @@ -273,7 +276,8 @@ async fn main() -> anyhow::Result<()> { } BridgeCommand::ViewBridgeRegistration { sui_rpc_url } => { - let sui_bridge_client = SuiClient::::new(&sui_rpc_url).await?; + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_bridge_client = SuiClient::::new(&sui_rpc_url, metrics).await?; let bridge_summary = sui_bridge_client .get_bridge_summary() .await @@ -358,7 +362,8 @@ async fn main() -> anyhow::Result<()> { hex, ping, } => { - let sui_bridge_client = SuiClient::::new(&sui_rpc_url).await?; + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_bridge_client = SuiClient::::new(&sui_rpc_url, metrics).await?; let bridge_summary = sui_bridge_client .get_bridge_summary() .await @@ -504,7 +509,9 @@ async fn main() -> anyhow::Result<()> { BridgeCommand::Client { config_path, cmd } => { let config = BridgeCliConfig::load(config_path).expect("Couldn't load BridgeCliConfig"); let config = LoadedBridgeCliConfig::load(config).await?; - let sui_bridge_client = SuiClient::::new(&config.sui_rpc_url).await?; + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_bridge_client = + SuiClient::::new(&config.sui_rpc_url, metrics).await?; cmd.handle(&config, sui_bridge_client).await?; return Ok(()); } diff --git a/crates/sui-bridge-indexer/Cargo.toml b/crates/sui-bridge-indexer/Cargo.toml index 3b625b1378aa3..46dd6ff414e84 100644 --- a/crates/sui-bridge-indexer/Cargo.toml +++ b/crates/sui-bridge-indexer/Cargo.toml @@ -32,6 +32,7 @@ telemetry-subscribers.workspace = true tracing.workspace = true backoff.workspace = true sui-config.workspace = true +tempfile.workspace = true sui-indexer-builder.workspace = true [dev-dependencies] diff --git a/crates/sui-bridge-indexer/src/config.rs b/crates/sui-bridge-indexer/src/config.rs index ac4d56d47ddcb..58b742642b9a8 100644 --- a/crates/sui-bridge-indexer/src/config.rs +++ b/crates/sui-bridge-indexer/src/config.rs @@ -8,7 +8,8 @@ use std::env; #[derive(Debug, Clone, Deserialize, Serialize)] pub struct IndexerConfig { pub remote_store_url: String, - pub checkpoints_path: String, + /// Only provide this if you use a colocated FN + pub checkpoints_path: Option, pub sui_rpc_url: String, pub eth_rpc_url: String, @@ -24,6 +25,9 @@ pub struct IndexerConfig { pub eth_sui_bridge_contract_address: String, pub metric_port: u16, + + /// A temporary flag to disable the eth indexer to test mainnet before eth contracts are deployed. + pub disable_eth: Option, } impl sui_config::Config for IndexerConfig {} diff --git a/crates/sui-bridge-indexer/src/eth_bridge_indexer.rs b/crates/sui-bridge-indexer/src/eth_bridge_indexer.rs index 7b00e3eeaa877..99b5e0ca2a56a 100644 --- a/crates/sui-bridge-indexer/src/eth_bridge_indexer.rs +++ b/crates/sui-bridge-indexer/src/eth_bridge_indexer.rs @@ -1,8 +1,7 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use std::collections::{HashMap, HashSet}; -use std::str::FromStr; +use std::collections::HashMap; use std::sync::Arc; use std::time::Duration; @@ -14,8 +13,10 @@ use ethers::types::{Address as EthAddress, Block, Filter, H256}; use prometheus::{IntCounterVec, IntGaugeVec}; use sui_bridge::error::BridgeError; use sui_bridge::eth_client::EthClient; +use sui_bridge::eth_syncer::EthSyncer; use sui_bridge::metered_eth_provider::MeteredEthHttpProvier; use sui_bridge::retry_with_max_elapsed_time; +use sui_indexer_builder::Task; use tokio::task::JoinHandle; use tracing::info; @@ -31,11 +32,18 @@ use crate::{ BridgeDataSource, ProcessedTxnData, TokenTransfer, TokenTransferData, TokenTransferStatus, }; -type RawEthData = (RawEthLog, Block, Transaction); +pub struct RawEthData { + log: RawEthLog, + block: Block, + transaction: Transaction, + is_finalized: bool, +} +// Create max log query range +const MAX_LOG_QUERY_RANGE: u64 = 1000; pub struct EthSubscriptionDatasource { - bridge_address: EthAddress, eth_client: Arc>, + addresses: Vec, eth_ws_url: String, indexer_metrics: BridgeIndexerMetrics, genesis_block: u64, @@ -43,15 +51,14 @@ pub struct EthSubscriptionDatasource { impl EthSubscriptionDatasource { pub async fn new( - eth_sui_bridge_contract_address: String, + eth_sui_bridge_contract_addresses: Vec, eth_client: Arc>, eth_ws_url: String, indexer_metrics: BridgeIndexerMetrics, genesis_block: u64, ) -> Result { - let bridge_address = EthAddress::from_str(ð_sui_bridge_contract_address)?; Ok(Self { - bridge_address, + addresses: eth_sui_bridge_contract_addresses, eth_client, eth_ws_url, indexer_metrics, @@ -63,21 +70,20 @@ impl EthSubscriptionDatasource { impl Datasource for EthSubscriptionDatasource { async fn start_data_retrieval( &self, - starting_checkpoint: u64, - target_checkpoint: u64, + task: Task, data_sender: DataSender, ) -> Result>, Error> { let filter = Filter::new() - .address(self.bridge_address) - .from_block(starting_checkpoint) - .to_block(target_checkpoint); + .address(self.addresses.clone()) + .from_block(task.start_checkpoint) + .to_block(task.target_checkpoint); let eth_ws_url = self.eth_ws_url.clone(); - let indexer_metrics: BridgeIndexerMetrics = self.indexer_metrics.clone(); let handle = spawn_monitored_task!(async move { let eth_ws_client = Provider::::connect(ð_ws_url).await?; + // TODO: enable a shared cache for blocks that can be used by both the subscription and finalized sync let mut cached_blocks: HashMap> = HashMap::new(); let mut stream = eth_ws_client.subscribe_logs(&filter).await?; @@ -123,12 +129,16 @@ impl Datasource for EthSubscriptionDatasource { }; data_sender - .send((block_number, vec![(raw_log, block, transaction)])) + .send(( + block_number, + vec![RawEthData { + log: raw_log, + block, + transaction, + is_finalized: false, + }], + )) .await?; - - indexer_metrics - .latest_committed_eth_block - .set(block_number as i64); } Ok::<_, Error>(()) @@ -148,58 +158,47 @@ impl Datasource for EthSubscriptionDatasource { } fn get_tasks_remaining_checkpoints_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.tasks_remaining_checkpoints + &self.indexer_metrics.backfill_tasks_remaining_checkpoints } fn get_tasks_processed_checkpoints_metric(&self) -> &IntCounterVec { &self.indexer_metrics.tasks_processed_checkpoints } - - fn get_live_task_checkpoint_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.live_task_current_checkpoint - } } -pub struct EthSyncDatasource { - bridge_address: EthAddress, +pub struct EthFinalizedSyncDatasource { + bridge_addresses: Vec, eth_http_url: String, eth_client: Arc>, indexer_metrics: BridgeIndexerMetrics, + bridge_metrics: Arc, genesis_block: u64, } -impl EthSyncDatasource { +impl EthFinalizedSyncDatasource { pub async fn new( - eth_sui_bridge_contract_address: String, + eth_sui_bridge_contract_addresses: Vec, + eth_client: Arc>, eth_http_url: String, indexer_metrics: BridgeIndexerMetrics, bridge_metrics: Arc, genesis_block: u64, ) -> Result { - let bridge_address = EthAddress::from_str(ð_sui_bridge_contract_address)?; - let eth_client: Arc> = Arc::new( - EthClient::::new( - ð_http_url, - HashSet::from_iter(vec![]), // dummy - bridge_metrics.clone(), - ) - .await?, - ); Ok(Self { - bridge_address, - eth_client, + bridge_addresses: eth_sui_bridge_contract_addresses, eth_http_url, + eth_client, indexer_metrics, + bridge_metrics, genesis_block, }) } } #[async_trait] -impl Datasource for EthSyncDatasource { +impl Datasource for EthFinalizedSyncDatasource { async fn start_data_retrieval( &self, - starting_checkpoint: u64, - target_checkpoint: u64, + task: Task, data_sender: DataSender, ) -> Result>, Error> { let provider = Arc::new( @@ -207,63 +206,33 @@ impl Datasource for EthSyncDatasource { .interval(std::time::Duration::from_millis(2000)), ); - let bridge_address = self.bridge_address; - let indexer_metrics: BridgeIndexerMetrics = self.indexer_metrics.clone(); + let bridge_addresses = self.bridge_addresses.clone(); let client = self.eth_client.clone(); let provider = provider.clone(); + let bridge_metrics = self.bridge_metrics.clone(); let handle = spawn_monitored_task!(async move { - let mut cached_blocks: HashMap> = HashMap::new(); - - let Ok(Ok(logs)) = retry_with_max_elapsed_time!( - client.get_raw_events_in_range( - bridge_address, - starting_checkpoint, - target_checkpoint - ), - Duration::from_secs(30000) - ) else { - panic!("Unable to get logs from provider"); - }; - - let mut data = Vec::new(); - let mut first_block = 0; - - for log in logs { - let block = if let Some(cached_block) = cached_blocks.get(&log.block_number) { - cached_block.clone() - } else { - let Ok(Ok(Some(block))) = retry_with_max_elapsed_time!( - provider.get_block(log.block_number), - Duration::from_secs(30000) - ) else { - panic!("Unable to get block from provider"); - }; - - cached_blocks.insert(log.block_number, block.clone()); - block - }; - - if first_block == 0 { - first_block = log.block_number; - } - - let Ok(Ok(Some(transaction))) = retry_with_max_elapsed_time!( - provider.get_transaction(log.tx_hash), - Duration::from_secs(30000) - ) else { - panic!("Unable to get transaction from provider"); - }; - - data.push((log, block, transaction)); + if task.is_live_task { + retrieve_and_process_live_finalized_logs( + client, + provider, + bridge_addresses, + task.start_checkpoint, + data_sender, + bridge_metrics, + ) + .await?; + } else { + retrieve_and_process_log_range( + client, + provider, + bridge_addresses, + task.start_checkpoint, + task.target_checkpoint, + data_sender, + ) + .await?; } - - data_sender.send((target_checkpoint, data)).await?; - - indexer_metrics - .last_synced_eth_block - .set(first_block as i64); - Ok::<_, Error>(()) }); @@ -282,16 +251,141 @@ impl Datasource for EthSyncDatasource { } fn get_tasks_remaining_checkpoints_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.tasks_remaining_checkpoints + &self.indexer_metrics.backfill_tasks_remaining_checkpoints } fn get_tasks_processed_checkpoints_metric(&self) -> &IntCounterVec { &self.indexer_metrics.tasks_processed_checkpoints } +} + +async fn retrieve_and_process_live_finalized_logs( + client: Arc>, + provider: Arc>, + addresses: Vec, + starting_checkpoint: u64, + data_sender: DataSender, + bridge_metrics: Arc, +) -> Result<(), Error> { + let eth_contracts_to_watch = HashMap::from_iter( + addresses + .iter() + .map(|address| (*address, starting_checkpoint)), + ); + + let (_, mut eth_events_rx, _) = EthSyncer::new(client.clone(), eth_contracts_to_watch) + .run(bridge_metrics.clone()) + .await + .expect("Failed to start eth syncer"); + + // forward received events to the data sender + while let Some((_, block, logs)) = eth_events_rx.recv().await { + let raw_logs: Vec = logs + .into_iter() + .map(|log| RawEthLog { + block_number: block, + tx_hash: log.tx_hash, + log: log.log, + }) + .collect(); + + process_logs(raw_logs, provider.clone(), data_sender.clone(), block, true) + .await + .expect("Failed to process logs"); + } + + panic!("Eth finalized syncer live task stopped unexpectedly"); +} - fn get_live_task_checkpoint_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.live_task_current_checkpoint +async fn retrieve_and_process_log_range( + client: Arc>, + provider: Arc>, + addresses: Vec, + starting_checkpoint: u64, + target_checkpoint: u64, + data_sender: DataSender, +) -> Result<(), Error> { + let mut all_logs = Vec::new(); + let mut current_start = starting_checkpoint; + + while current_start <= target_checkpoint { + // Calculate the end of the current chunk + let current_end = (current_start + MAX_LOG_QUERY_RANGE - 1).min(target_checkpoint); + + // Retry the request for the current chunk + let Ok(Ok(logs)) = retry_with_max_elapsed_time!( + client.get_raw_events_in_range(addresses.clone(), current_start, current_end), + Duration::from_secs(30000) + ) else { + panic!( + "Unable to get logs from provider for range {} to {}", + current_start, current_end + ); + }; + + // Add the logs from this chunk to the total + all_logs.extend(logs); + + // Update the start for the next chunk + current_start = current_end + 1; } + + process_logs( + all_logs, + provider.clone(), + data_sender.clone(), + target_checkpoint, + true, + ) + .await?; + + Ok::<_, Error>(()) +} + +async fn process_logs( + logs: Vec, + provider: Arc>, + data_sender: DataSender, + target_checkpoint: u64, + is_finalized: bool, +) -> Result<(), Error> { + let mut data = Vec::new(); + let mut cached_blocks: HashMap> = HashMap::new(); + + for log in logs { + let block = if let Some(cached_block) = cached_blocks.get(&log.block_number) { + cached_block.clone() + } else { + // TODO: add block query parallelism + let Ok(Ok(Some(block))) = retry_with_max_elapsed_time!( + provider.get_block(log.block_number), + Duration::from_secs(30000) + ) else { + panic!("Unable to get block from provider"); + }; + + cached_blocks.insert(log.block_number, block.clone()); + block + }; + + let Ok(Ok(Some(transaction))) = retry_with_max_elapsed_time!( + provider.get_transaction(log.tx_hash), + Duration::from_secs(30000) + ) else { + panic!("Unable to get transaction from provider"); + }; + + data.push(RawEthData { + log, + block, + transaction, + is_finalized, + }); + } + + data_sender.send((target_checkpoint, data)).await?; + + Ok::<_, Error>(()) } #[derive(Clone)] @@ -299,10 +393,15 @@ pub struct EthDataMapper { pub metrics: BridgeIndexerMetrics, } -impl DataMapper<(E, Block, Transaction), ProcessedTxnData> for EthDataMapper { +impl DataMapper for EthDataMapper { fn map( &self, - (log, block, transaction): (E, Block, Transaction), + RawEthData { + log, + block, + transaction, + is_finalized, + }: RawEthData, ) -> Result, Error> { let eth_bridge_event = EthBridgeEvent::try_from_log(log.log()); if eth_bridge_event.is_none() { @@ -328,12 +427,14 @@ impl DataMapper<(E, Block, Transaction), ProcessedTxnData> fo status: TokenTransferStatus::Deposited, gas_usage: gas.as_u64() as i64, data_source: BridgeDataSource::Eth, + is_finalized, data: Some(TokenTransferData { sender_address: bridge_event.sender_address.as_bytes().to_vec(), destination_chain: bridge_event.destination_chain_id, recipient_address: bridge_event.recipient_address.to_vec(), token_id: bridge_event.token_id, amount: bridge_event.sui_adjusted_amount, + is_finalized, }), }) } @@ -351,6 +452,7 @@ impl DataMapper<(E, Block, Transaction), ProcessedTxnData> fo gas_usage: gas.as_u64() as i64, data_source: BridgeDataSource::Eth, data: None, + is_finalized, }) } EthSuiBridgeEvents::PausedFilter(_) diff --git a/crates/sui-bridge-indexer/src/lib.rs b/crates/sui-bridge-indexer/src/lib.rs index b86dd5883a93e..d74f194f653c2 100644 --- a/crates/sui-bridge-indexer/src/lib.rs +++ b/crates/sui-bridge-indexer/src/lib.rs @@ -49,6 +49,7 @@ pub struct TokenTransfer { gas_usage: i64, data_source: BridgeDataSource, data: Option, + is_finalized: bool, } #[derive(Clone)] @@ -58,6 +59,7 @@ pub struct TokenTransferData { recipient_address: Vec, token_id: u8, amount: u64, + is_finalized: bool, } impl TokenTransfer { @@ -72,6 +74,7 @@ impl TokenTransfer { status: self.status.to_string(), gas_usage: self.gas_usage, data_source: self.data_source.to_string(), + is_finalized: self.is_finalized, } } @@ -87,6 +90,7 @@ impl TokenTransfer { recipient_address: data.recipient_address.clone(), token_id: data.token_id as i32, amount: data.amount as i64, + is_finalized: data.is_finalized, }) } } diff --git a/crates/sui-bridge-indexer/src/main.rs b/crates/sui-bridge-indexer/src/main.rs index 3ad53b1d02d31..56e6740e27050 100644 --- a/crates/sui-bridge-indexer/src/main.rs +++ b/crates/sui-bridge-indexer/src/main.rs @@ -3,22 +3,25 @@ use anyhow::Result; use clap::*; +use ethers::types::Address as EthAddress; use std::collections::HashSet; use std::env; use std::net::IpAddr; use std::net::{Ipv4Addr, SocketAddr}; use std::path::PathBuf; +use std::str::FromStr; use std::sync::Arc; +use sui_bridge::eth_client::EthClient; +use sui_bridge::metered_eth_provider::MeteredEthHttpProvier; +use sui_bridge_indexer::eth_bridge_indexer::EthFinalizedSyncDatasource; use sui_bridge_indexer::eth_bridge_indexer::EthSubscriptionDatasource; -use sui_bridge_indexer::eth_bridge_indexer::EthSyncDatasource; use tokio::task::JoinHandle; use tracing::info; use mysten_metrics::metered_channel::channel; use mysten_metrics::spawn_logged_monitored_task; use mysten_metrics::start_prometheus_server; -use sui_bridge::eth_client::EthClient; -use sui_bridge::metered_eth_provider::MeteredEthHttpProvier; + use sui_bridge::metrics::BridgeMetrics; use sui_bridge_indexer::config::IndexerConfig; use sui_bridge_indexer::eth_bridge_indexer::EthDataMapper; @@ -32,6 +35,9 @@ use sui_bridge_indexer::sui_transaction_queries::start_sui_tx_polling_task; use sui_config::Config; use sui_data_ingestion_core::DataIngestionMetrics; use sui_indexer_builder::indexer_builder::{BackfillStrategy, IndexerBuilder}; +use sui_indexer_builder::progress::{ + OutOfOrderSaveAfterDurationPolicy, ProgressSavingPolicy, SaveAfterDurationPolicy, +}; use sui_sdk::SuiClientBuilder; #[derive(Parser, Clone, Debug)] @@ -72,7 +78,20 @@ async fn main() -> Result<()> { let bridge_metrics = Arc::new(BridgeMetrics::new(®istry)); let db_url = config.db_url.clone(); - let datastore = PgBridgePersistent::new(get_connection_pool(db_url.clone()).await); + let datastore = PgBridgePersistent::new( + get_connection_pool(db_url.clone()).await, + ProgressSavingPolicy::SaveAfterDuration(SaveAfterDurationPolicy::new( + tokio::time::Duration::from_secs(30), + )), + indexer_meterics.clone(), + ); + let datastore_with_out_of_order_source = PgBridgePersistent::new( + get_connection_pool(db_url.clone()).await, + ProgressSavingPolicy::OutOfOrderSaveAfterDuration(OutOfOrderSaveAfterDurationPolicy::new( + tokio::time::Duration::from_secs(30), + )), + indexer_meterics.clone(), + ); let eth_client: Arc> = Arc::new( EthClient::::new( @@ -83,48 +102,61 @@ async fn main() -> Result<()> { .await?, ); - // Start the eth subscription indexer - let eth_subscription_datasource = EthSubscriptionDatasource::new( - config.eth_sui_bridge_contract_address.clone(), - eth_client.clone(), - config.eth_ws_url.clone(), - indexer_meterics.clone(), - config.eth_bridge_genesis_block, - ) - .await?; - let eth_subscription_indexer = IndexerBuilder::new( - "EthBridgeSubscriptionIndexer", - eth_subscription_datasource, - EthDataMapper { - metrics: indexer_meterics.clone(), - }, - datastore.clone(), - ) - .with_backfill_strategy(BackfillStrategy::Disabled) - .build(); - let subscription_indexer_fut = spawn_logged_monitored_task!(eth_subscription_indexer.start()); + let mut tasks = vec![]; + if Some(true) == config.disable_eth { + info!("Eth indexer is disabled"); + } else { + // Start the eth subscription indexer + let bridge_addresses = vec![EthAddress::from_str( + &config.eth_sui_bridge_contract_address, + )?]; - // Start the eth sync data source - let eth_sync_datasource = EthSyncDatasource::new( - config.eth_sui_bridge_contract_address.clone(), - config.eth_rpc_url.clone(), - indexer_meterics.clone(), - bridge_metrics.clone(), - config.eth_bridge_genesis_block, - ) - .await?; - let eth_sync_indexer = IndexerBuilder::new( - "EthBridgeSyncIndexer", - eth_sync_datasource, - EthDataMapper { - metrics: indexer_meterics.clone(), - }, - datastore.clone(), - ) - .with_backfill_strategy(BackfillStrategy::Partitioned { task_size: 1000 }) - .disable_live_task() - .build(); - let sync_indexer_fut = spawn_logged_monitored_task!(eth_sync_indexer.start()); + // Start the eth subscription indexer + let eth_subscription_datasource = EthSubscriptionDatasource::new( + bridge_addresses.clone(), + eth_client.clone(), + config.eth_ws_url.clone(), + indexer_meterics.clone(), + config.eth_bridge_genesis_block, + ) + .await?; + let eth_subscription_indexer = IndexerBuilder::new( + "EthBridgeSubscriptionIndexer", + eth_subscription_datasource, + EthDataMapper { + metrics: indexer_meterics.clone(), + }, + datastore.clone(), + ) + .with_backfill_strategy(BackfillStrategy::Disabled) + .build(); + tasks.push(spawn_logged_monitored_task!( + eth_subscription_indexer.start() + )); + + // Start the eth sync data source + let eth_sync_datasource = EthFinalizedSyncDatasource::new( + bridge_addresses.clone(), + eth_client.clone(), + config.eth_rpc_url.clone(), + indexer_meterics.clone(), + bridge_metrics.clone(), + config.eth_bridge_genesis_block, + ) + .await?; + + let eth_sync_indexer = IndexerBuilder::new( + "EthBridgeFinalizedSyncIndexer", + eth_sync_datasource, + EthDataMapper { + metrics: indexer_meterics.clone(), + }, + datastore, + ) + .with_backfill_strategy(BackfillStrategy::Partitioned { task_size: 1000 }) + .build(); + tasks.push(spawn_logged_monitored_task!(eth_sync_indexer.start())); + } let sui_client = Arc::new( SuiClientBuilder::default() @@ -135,7 +167,10 @@ async fn main() -> Result<()> { config.remote_store_url, sui_client, config.concurrency as usize, - config.checkpoints_path.clone().into(), + config + .checkpoints_path + .map(|p| p.into()) + .unwrap_or(tempfile::tempdir()?.into_path()), config.sui_bridge_genesis_checkpoint, ingestion_metrics.clone(), indexer_meterics.clone(), @@ -146,15 +181,14 @@ async fn main() -> Result<()> { SuiBridgeDataMapper { metrics: indexer_meterics.clone(), }, - datastore, + datastore_with_out_of_order_source, ) .build(); - indexer.start().await?; + tasks.push(spawn_logged_monitored_task!(indexer.start())); - // These tasks should not finish - subscription_indexer_fut.await.unwrap().unwrap(); - sync_indexer_fut.await.unwrap().unwrap(); - Ok(()) + // Wait for tasks in `tasks` to finish. Return when anyone of them returns an error. + futures::future::try_join_all(tasks).await?; + unreachable!("Indexer tasks finished unexpectedly"); } #[allow(unused)] diff --git a/crates/sui-bridge-indexer/src/metrics.rs b/crates/sui-bridge-indexer/src/metrics.rs index 9ea85514e20d3..b24387ac0b828 100644 --- a/crates/sui-bridge-indexer/src/metrics.rs +++ b/crates/sui-bridge-indexer/src/metrics.rs @@ -19,11 +19,9 @@ pub struct BridgeIndexerMetrics { pub(crate) total_eth_token_transfer_claimed: IntCounter, pub(crate) total_eth_bridge_txn_other: IntCounter, pub(crate) last_committed_sui_checkpoint: IntGauge, - pub(crate) latest_committed_eth_block: IntGauge, - pub(crate) last_synced_eth_block: IntGauge, - pub(crate) tasks_remaining_checkpoints: IntGaugeVec, + pub(crate) backfill_tasks_remaining_checkpoints: IntGaugeVec, pub(crate) tasks_processed_checkpoints: IntCounterVec, - pub(crate) live_task_current_checkpoint: IntGaugeVec, + pub(crate) tasks_current_checkpoints: IntGaugeVec, } impl BridgeIndexerMetrics { @@ -89,21 +87,9 @@ impl BridgeIndexerMetrics { registry, ) .unwrap(), - latest_committed_eth_block: register_int_gauge_with_registry!( - "bridge_indexer_last_committed_eth_block", - "The latest eth block that indexer committed to DB", - registry, - ) - .unwrap(), - last_synced_eth_block: register_int_gauge_with_registry!( - "bridge_indexer_last_synced_eth_block", - "The last eth block that indexer committed to DB", - registry, - ) - .unwrap(), - tasks_remaining_checkpoints: register_int_gauge_vec_with_registry!( - "bridge_indexer_tasks_remaining_checkpoints", - "The remaining checkpoints for each task", + backfill_tasks_remaining_checkpoints: register_int_gauge_vec_with_registry!( + "bridge_indexer_backfill_tasks_remaining_checkpoints", + "The remaining checkpoints for the currently running backfill task", &["task_name"], registry, ) @@ -111,14 +97,14 @@ impl BridgeIndexerMetrics { tasks_processed_checkpoints: register_int_counter_vec_with_registry!( "bridge_indexer_tasks_processed_checkpoints", "Total processed checkpoints for each task", - &["task_name"], + &["task_name", "task_type"], registry, ) .unwrap(), - live_task_current_checkpoint: register_int_gauge_vec_with_registry!( - "bridge_indexer_live_task_current_checkpoint", - "Current checkpoint of live task", - &["task_name"], + tasks_current_checkpoints: register_int_gauge_vec_with_registry!( + "bridge_indexer_tasks_current_checkpoints", + "Current checkpoint for each task", + &["task_name", "task_type"], registry, ) .unwrap(), diff --git a/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/down.sql b/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/down.sql new file mode 100644 index 0000000000000..1b0c79ca6d0e2 --- /dev/null +++ b/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/down.sql @@ -0,0 +1,2 @@ +ALTER TABLE token_transfer DROP COLUMN is_finalized; +ALTER TABLE token_transfer_data DROP COLUMN is_finalized; \ No newline at end of file diff --git a/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/up.sql b/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/up.sql new file mode 100644 index 0000000000000..cc32d80292ab1 --- /dev/null +++ b/crates/sui-bridge-indexer/src/migrations/2024-09-11-010632_add_finalized_to_token_transfer/up.sql @@ -0,0 +1,2 @@ +ALTER TABLE token_transfer ADD COLUMN is_finalized BOOLEAN DEFAULT false; +ALTER TABLE token_transfer_data ADD COLUMN is_finalized BOOLEAN DEFAULT false; \ No newline at end of file diff --git a/crates/sui-bridge-indexer/src/models.rs b/crates/sui-bridge-indexer/src/models.rs index 90435cd60c154..011dc238576ac 100644 --- a/crates/sui-bridge-indexer/src/models.rs +++ b/crates/sui-bridge-indexer/src/models.rs @@ -4,7 +4,7 @@ use diesel::data_types::PgTimestamp; use diesel::{Identifiable, Insertable, Queryable, Selectable}; -use sui_indexer_builder::Task; +use sui_indexer_builder::{Task, LIVE_TASK_TARGET_CHECKPOINT}; use crate::schema::{ progress_store, sui_error_transactions, sui_progress_store, token_transfer, token_transfer_data, @@ -23,10 +23,11 @@ impl From for Task { fn from(value: ProgressStore) -> Self { Self { task_name: value.task_name, - checkpoint: value.checkpoint as u64, + start_checkpoint: value.checkpoint as u64, target_checkpoint: value.target_checkpoint as u64, // Ok to unwrap, timestamp is defaulted to now() in database timestamp: value.timestamp.expect("Timestamp not set").0 as u64, + is_live_task: value.target_checkpoint == LIVE_TASK_TARGET_CHECKPOINT, } } } @@ -50,6 +51,7 @@ pub struct TokenTransfer { pub txn_sender: Vec, pub gas_usage: i64, pub data_source: String, + pub is_finalized: bool, } #[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] @@ -65,6 +67,7 @@ pub struct TokenTransferData { pub recipient_address: Vec, pub token_id: i32, pub amount: i64, + pub is_finalized: bool, } #[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] diff --git a/crates/sui-bridge-indexer/src/postgres_manager.rs b/crates/sui-bridge-indexer/src/postgres_manager.rs index 9cf4680dc523e..1751bf8e9b15b 100644 --- a/crates/sui-bridge-indexer/src/postgres_manager.rs +++ b/crates/sui-bridge-indexer/src/postgres_manager.rs @@ -2,12 +2,11 @@ // SPDX-License-Identifier: Apache-2.0 use crate::models::SuiProgressStore; -use crate::models::TokenTransfer as DBTokenTransfer; use crate::schema::sui_progress_store::txn_digest; use crate::schema::{sui_error_transactions, token_transfer_data}; use crate::{schema, schema::token_transfer, ProcessedTxnData}; -use diesel::result::Error; -use diesel::BoolExpressionMethods; +use diesel::query_dsl::methods::FilterDsl; +use diesel::upsert::excluded; use diesel::{ExpressionMethods, OptionalExtension, QueryDsl, SelectableHelper}; use diesel_async::pooled_connection::bb8::Pool; use diesel_async::pooled_connection::AsyncDieselConnectionManager; @@ -59,12 +58,54 @@ pub async fn write(pool: &PgPool, token_txns: Vec) -> Result<( async move { diesel::insert_into(token_transfer_data::table) .values(&data) - .on_conflict_do_nothing() + .on_conflict(( + schema::token_transfer_data::dsl::chain_id, + schema::token_transfer_data::dsl::nonce, + )) + .do_update() + .set(( + token_transfer_data::txn_hash.eq(excluded(token_transfer_data::txn_hash)), + token_transfer_data::chain_id.eq(excluded(token_transfer_data::chain_id)), + token_transfer_data::nonce.eq(excluded(token_transfer_data::nonce)), + token_transfer_data::block_height + .eq(excluded(token_transfer_data::block_height)), + token_transfer_data::timestamp_ms + .eq(excluded(token_transfer_data::timestamp_ms)), + token_transfer_data::sender_address + .eq(excluded(token_transfer_data::sender_address)), + token_transfer_data::destination_chain + .eq(excluded(token_transfer_data::destination_chain)), + token_transfer_data::recipient_address + .eq(excluded(token_transfer_data::recipient_address)), + token_transfer_data::token_id.eq(excluded(token_transfer_data::token_id)), + token_transfer_data::amount.eq(excluded(token_transfer_data::amount)), + token_transfer_data::is_finalized + .eq(excluded(token_transfer_data::is_finalized)), + )) + .filter(token_transfer_data::is_finalized.eq(false)) .execute(conn) .await?; diesel::insert_into(token_transfer::table) .values(&transfers) - .on_conflict_do_nothing() + .on_conflict(( + schema::token_transfer::dsl::chain_id, + schema::token_transfer::dsl::nonce, + schema::token_transfer::dsl::status, + )) + .do_update() + .set(( + token_transfer::txn_hash.eq(excluded(token_transfer::txn_hash)), + token_transfer::chain_id.eq(excluded(token_transfer::chain_id)), + token_transfer::nonce.eq(excluded(token_transfer::nonce)), + token_transfer::status.eq(excluded(token_transfer::status)), + token_transfer::block_height.eq(excluded(token_transfer::block_height)), + token_transfer::timestamp_ms.eq(excluded(token_transfer::timestamp_ms)), + token_transfer::txn_sender.eq(excluded(token_transfer::txn_sender)), + token_transfer::gas_usage.eq(excluded(token_transfer::gas_usage)), + token_transfer::data_source.eq(excluded(token_transfer::data_source)), + token_transfer::is_finalized.eq(excluded(token_transfer::is_finalized)), + )) + .filter(token_transfer::is_finalized.eq(false)) .execute(conn) .await?; diesel::insert_into(sui_error_transactions::table) @@ -111,28 +152,3 @@ pub async fn read_sui_progress_store(pool: &PgPool) -> anyhow::Result Ok(None), } } - -pub async fn get_latest_eth_token_transfer( - pool: &PgPool, - finalized: bool, -) -> Result, Error> { - use crate::schema::token_transfer::dsl::*; - - let connection = &mut pool.get().await.unwrap(); - - if finalized { - token_transfer - .filter(data_source.eq("ETH").and(status.eq("Deposited"))) - .order(block_height.desc()) - .first::(connection) - .await - .optional() - } else { - token_transfer - .filter(status.eq("DepositedUnfinalized")) - .order(block_height.desc()) - .first::(connection) - .await - .optional() - } -} diff --git a/crates/sui-bridge-indexer/src/schema.rs b/crates/sui-bridge-indexer/src/schema.rs index 8a9e7c02951e8..c0bb997a465b8 100644 --- a/crates/sui-bridge-indexer/src/schema.rs +++ b/crates/sui-bridge-indexer/src/schema.rs @@ -39,6 +39,7 @@ diesel::table! { txn_sender -> Bytea, gas_usage -> Int8, data_source -> Text, + is_finalized -> Bool, } } @@ -54,6 +55,7 @@ diesel::table! { recipient_address -> Bytea, token_id -> Int4, amount -> Int8, + is_finalized -> Bool, } } diff --git a/crates/sui-bridge-indexer/src/storage.rs b/crates/sui-bridge-indexer/src/storage.rs index c39d2079d8f65..d37c4a32f6df2 100644 --- a/crates/sui-bridge-indexer/src/storage.rs +++ b/crates/sui-bridge-indexer/src/storage.rs @@ -1,36 +1,46 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -#![allow(dead_code)] // TODO: remove in next PR where integration of ProgressSavingPolicy is done - -use std::collections::{HashMap, HashSet}; -use std::sync::{Arc, Mutex}; - use anyhow::{anyhow, Error}; use async_trait::async_trait; use diesel::dsl::now; +use diesel::query_dsl::methods::FilterDsl; +use diesel::upsert::excluded; use diesel::{ExpressionMethods, TextExpressionMethods}; use diesel::{OptionalExtension, QueryDsl, SelectableHelper}; use diesel_async::scoped_futures::ScopedFutureExt; use diesel_async::AsyncConnection; use diesel_async::RunQueryDsl; +use crate::metrics::BridgeIndexerMetrics; use crate::postgres_manager::PgPool; use crate::schema::progress_store::{columns, dsl}; use crate::schema::{sui_error_transactions, token_transfer, token_transfer_data}; use crate::{models, schema, ProcessedTxnData}; use sui_indexer_builder::indexer_builder::{IndexerProgressStore, Persistent}; -use sui_indexer_builder::Task; +use sui_indexer_builder::{ + progress::ProgressSavingPolicy, Task, Tasks, LIVE_TASK_TARGET_CHECKPOINT, +}; /// Persistent layer impl #[derive(Clone)] pub struct PgBridgePersistent { pool: PgPool, + save_progress_policy: ProgressSavingPolicy, + indexer_metrics: BridgeIndexerMetrics, } impl PgBridgePersistent { - pub fn new(pool: PgPool) -> Self { - Self { pool } + pub fn new( + pool: PgPool, + save_progress_policy: ProgressSavingPolicy, + indexer_metrics: BridgeIndexerMetrics, + ) -> Self { + Self { + pool, + save_progress_policy, + indexer_metrics, + } } } @@ -49,14 +59,71 @@ impl Persistent for PgBridgePersistent { ProcessedTxnData::TokenTransfer(t) => { diesel::insert_into(token_transfer::table) .values(&t.to_db()) - .on_conflict_do_nothing() + .on_conflict(( + token_transfer::dsl::chain_id, + token_transfer::dsl::nonce, + token_transfer::dsl::status, + )) + .do_update() + .set(( + token_transfer::chain_id + .eq(excluded(token_transfer::chain_id)), + token_transfer::nonce.eq(excluded(token_transfer::nonce)), + token_transfer::status.eq(excluded(token_transfer::status)), + token_transfer::block_height + .eq(excluded(token_transfer::block_height)), + token_transfer::timestamp_ms + .eq(excluded(token_transfer::timestamp_ms)), + token_transfer::txn_hash + .eq(excluded(token_transfer::txn_hash)), + token_transfer::txn_sender + .eq(excluded(token_transfer::txn_sender)), + token_transfer::gas_usage + .eq(excluded(token_transfer::gas_usage)), + token_transfer::data_source + .eq(excluded(token_transfer::data_source)), + token_transfer::is_finalized + .eq(excluded(token_transfer::is_finalized)), + )) + .filter(token_transfer::is_finalized.eq(false)) .execute(conn) .await?; if let Some(d) = t.to_data_maybe() { diesel::insert_into(token_transfer_data::table) .values(&d) - .on_conflict_do_nothing() + .on_conflict(( + token_transfer_data::dsl::chain_id, + token_transfer_data::dsl::nonce, + )) + .do_update() + .set(( + token_transfer_data::chain_id + .eq(excluded(token_transfer_data::chain_id)), + token_transfer_data::nonce + .eq(excluded(token_transfer_data::nonce)), + token_transfer_data::block_height + .eq(excluded(token_transfer_data::block_height)), + token_transfer_data::timestamp_ms + .eq(excluded(token_transfer_data::timestamp_ms)), + token_transfer_data::txn_hash + .eq(excluded(token_transfer_data::txn_hash)), + token_transfer_data::sender_address + .eq(excluded(token_transfer_data::sender_address)), + token_transfer_data::destination_chain.eq(excluded( + token_transfer_data::destination_chain, + )), + token_transfer_data::recipient_address.eq(excluded( + token_transfer_data::recipient_address, + )), + token_transfer_data::token_id + .eq(excluded(token_transfer_data::token_id)), + token_transfer_data::amount + .eq(excluded(token_transfer_data::amount)), + token_transfer_data::is_finalized + .eq(excluded(token_transfer_data::is_finalized)), + )) + .filter(token_transfer_data::is_finalized.eq(false)) .execute(conn) .await?; } @@ -95,41 +162,59 @@ impl IndexerProgressStore for PgBridgePersistent { async fn save_progress( &mut self, - task_name: String, - checkpoint_number: u64, - ) -> anyhow::Result<()> { - let mut conn = self.pool.get().await?; - diesel::insert_into(schema::progress_store::table) - .values(&models::ProgressStore { - task_name, - checkpoint: checkpoint_number as i64, - // Target checkpoint and timestamp will only be written for new entries - target_checkpoint: i64::MAX, - // Timestamp is defaulted to current time in DB if None - timestamp: None, - }) - .on_conflict(dsl::task_name) - .do_update() - .set(( - columns::checkpoint.eq(checkpoint_number as i64), - columns::timestamp.eq(now), - )) - .execute(&mut conn) - .await?; - Ok(()) + task: &Task, + checkpoint_numbers: &[u64], + ) -> anyhow::Result> { + if checkpoint_numbers.is_empty() { + return Ok(None); + } + let task_name = task.task_name.clone(); + let task_name_prefix = task.name_prefix(); + let task_type_label = task.type_str(); + if let Some(checkpoint_to_save) = self + .save_progress_policy + .cache_progress(task, checkpoint_numbers) + { + let mut conn = self.pool.get().await?; + diesel::insert_into(schema::progress_store::table) + .values(&models::ProgressStore { + task_name: task_name.clone(), + checkpoint: checkpoint_to_save as i64, + // Target checkpoint and timestamp will only be written for new entries + target_checkpoint: i64::MAX, + // Timestamp is defaulted to current time in DB if None + timestamp: None, + }) + .on_conflict(dsl::task_name) + .do_update() + .set(( + columns::checkpoint.eq(checkpoint_to_save as i64), + columns::timestamp.eq(now), + )) + .execute(&mut conn) + .await?; + self.indexer_metrics + .tasks_current_checkpoints + .with_label_values(&[task_name_prefix, task_type_label]) + .set(checkpoint_to_save as i64); + return Ok(Some(checkpoint_to_save)); + } + Ok(None) } - async fn get_ongoing_tasks(&self, prefix: &str) -> Result, anyhow::Error> { + async fn get_ongoing_tasks(&self, prefix: &str) -> Result { let mut conn = self.pool.get().await?; // get all unfinished tasks - let cp: Vec = dsl::progress_store + let cp: Vec = // TODO: using like could be error prone, change the progress store schema to stare the task name properly. - .filter(columns::task_name.like(format!("{prefix} - %"))) - .filter(columns::checkpoint.lt(columns::target_checkpoint)) + QueryDsl::filter( + QueryDsl::filter(dsl::progress_store, columns::task_name.like(format!("{prefix} - %"))), + columns::checkpoint.lt(columns::target_checkpoint)) .order_by(columns::target_checkpoint.desc()) .load(&mut conn) .await?; - Ok(cp.into_iter().map(|d| d.into()).collect()) + let tasks = cp.into_iter().map(|d| d.into()).collect(); + Ok(Tasks::new(tasks)?) } async fn get_largest_backfill_task_target_checkpoint( @@ -137,11 +222,11 @@ impl IndexerProgressStore for PgBridgePersistent { prefix: &str, ) -> Result, Error> { let mut conn = self.pool.get().await?; - let cp: Option = dsl::progress_store - .select(columns::target_checkpoint) + let cp: Option = // TODO: using like could be error prone, change the progress store schema to stare the task name properly. - .filter(columns::task_name.like(format!("{prefix} - %"))) - .filter(columns::target_checkpoint.ne(i64::MAX)) + QueryDsl::filter(QueryDsl::filter(dsl::progress_store + .select(columns::target_checkpoint), columns::task_name.like(format!("{prefix} - %"))), + columns::target_checkpoint.ne(i64::MAX)) .order_by(columns::target_checkpoint.desc()) .first::(&mut conn) .await @@ -149,6 +234,8 @@ impl IndexerProgressStore for PgBridgePersistent { Ok(cp.map(|c| c as u64)) } + /// Register a new task to progress store with a start checkpoint and target checkpoint. + /// Usually used for backfill tasks. async fn register_task( &mut self, task_name: String, @@ -169,251 +256,39 @@ impl IndexerProgressStore for PgBridgePersistent { Ok(()) } - async fn update_task(&mut self, task: Task) -> Result<(), anyhow::Error> { + /// Register a live task to progress store with a start checkpoint. + async fn register_live_task( + &mut self, + task_name: String, + start_checkpoint: u64, + ) -> Result<(), anyhow::Error> { let mut conn = self.pool.get().await?; - diesel::update(dsl::progress_store.filter(columns::task_name.eq(task.task_name))) - .set(( - columns::checkpoint.eq(task.checkpoint as i64), - columns::target_checkpoint.eq(task.target_checkpoint as i64), - columns::timestamp.eq(now), - )) + diesel::insert_into(schema::progress_store::table) + .values(models::ProgressStore { + task_name, + checkpoint: start_checkpoint as i64, + target_checkpoint: LIVE_TASK_TARGET_CHECKPOINT, + // Timestamp is defaulted to current time in DB if None + timestamp: None, + }) .execute(&mut conn) .await?; Ok(()) } -} - -#[derive(Debug, Clone)] -pub enum ProgressSavingPolicy { - SaveAfterDuration(SaveAfterDurationPolicy), - OutOfOrderSaveAfterDuration(OutOfOrderSaveAfterDurationPolicy), -} - -#[derive(Debug, Clone)] -pub struct SaveAfterDurationPolicy { - duration: tokio::time::Duration, - last_save_time: Arc>>>, -} - -impl SaveAfterDurationPolicy { - pub fn new(duration: tokio::time::Duration) -> Self { - Self { - duration, - last_save_time: Arc::new(Mutex::new(HashMap::new())), - } - } -} - -#[derive(Debug, Clone)] -pub struct OutOfOrderSaveAfterDurationPolicy { - duration: tokio::time::Duration, - last_save_time: Arc>>>, - seen: Arc>>>, - next_to_fill: Arc>>>, -} - -impl OutOfOrderSaveAfterDurationPolicy { - pub fn new(duration: tokio::time::Duration) -> Self { - Self { - duration, - last_save_time: Arc::new(Mutex::new(HashMap::new())), - seen: Arc::new(Mutex::new(HashMap::new())), - next_to_fill: Arc::new(Mutex::new(HashMap::new())), - } - } -} - -impl ProgressSavingPolicy { - /// If returns Some(progress), it means we should save the progress to DB. - fn cache_progress( - &mut self, - task_name: String, - heights: &[u64], - start_height: u64, - target_height: u64, - ) -> Option { - match self { - ProgressSavingPolicy::SaveAfterDuration(policy) => { - let height = *heights.iter().max().unwrap(); - let mut last_save_time_guard = policy.last_save_time.lock().unwrap(); - let last_save_time = last_save_time_guard.entry(task_name).or_insert(None); - if height >= target_height { - *last_save_time = Some(tokio::time::Instant::now()); - return Some(height); - } - if let Some(v) = last_save_time { - if v.elapsed() >= policy.duration { - *last_save_time = Some(tokio::time::Instant::now()); - Some(height) - } else { - None - } - } else { - // update `last_save_time` to now but don't actually save progress - *last_save_time = Some(tokio::time::Instant::now()); - None - } - } - ProgressSavingPolicy::OutOfOrderSaveAfterDuration(policy) => { - let mut next_to_fill = { - let mut next_to_fill_guard = policy.next_to_fill.lock().unwrap(); - (*next_to_fill_guard - .entry(task_name.clone()) - .or_insert(Some(start_height))) - .unwrap() - }; - let old_next_to_fill = next_to_fill; - { - let mut seen_guard = policy.seen.lock().unwrap(); - let seen = seen_guard - .entry(task_name.clone()) - .or_insert(HashSet::new()); - seen.extend(heights.iter().cloned()); - while seen.remove(&next_to_fill) { - next_to_fill += 1; - } - } - // We made some progress in filling gaps - if old_next_to_fill != next_to_fill { - policy - .next_to_fill - .lock() - .unwrap() - .insert(task_name.clone(), Some(next_to_fill)); - } - - let mut last_save_time_guard = policy.last_save_time.lock().unwrap(); - let last_save_time = last_save_time_guard - .entry(task_name.clone()) - .or_insert(None); - // If we have reached the target height, we always save - if next_to_fill > target_height { - *last_save_time = Some(tokio::time::Instant::now()); - return Some(next_to_fill - 1); - } - // Regardless of whether we made progress, we should save if we have waited long enough - if let Some(v) = last_save_time { - if v.elapsed() >= policy.duration && next_to_fill > start_height { - *last_save_time = Some(tokio::time::Instant::now()); - Some(next_to_fill - 1) - } else { - None - } - } else { - // update `last_save_time` to now but don't actually save progress - *last_save_time = Some(tokio::time::Instant::now()); - None - } - } - } - } -} - -#[cfg(test)] -mod tests { - - use super::*; - - #[tokio::test] - async fn test_save_after_duration_policy() { - let duration = tokio::time::Duration::from_millis(100); - let mut policy = - ProgressSavingPolicy::SaveAfterDuration(SaveAfterDurationPolicy::new(duration)); - assert_eq!( - policy.cache_progress("task1".to_string(), &[1], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task1".to_string(), &[2], 0, 100), - Some(2) - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task1".to_string(), &[3], 0, 100), - Some(3) - ); - - assert_eq!( - policy.cache_progress("task2".to_string(), &[4], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[5, 6], 0, 100), - Some(6) - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[8, 7], 0, 100), - Some(8) - ); - } - - #[tokio::test] - async fn test_out_of_order_save_after_duration_policy() { - let duration = tokio::time::Duration::from_millis(100); - let mut policy = ProgressSavingPolicy::OutOfOrderSaveAfterDuration( - OutOfOrderSaveAfterDurationPolicy::new(duration), - ); - - assert_eq!( - policy.cache_progress("task1".to_string(), &[0], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task1".to_string(), &[1], 0, 100), - Some(1) - ); - assert_eq!( - policy.cache_progress("task1".to_string(), &[3], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task1".to_string(), &[4], 0, 100), - Some(1) - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task1".to_string(), &[2], 0, 100), - Some(4) - ); - - assert_eq!( - policy.cache_progress("task2".to_string(), &[0], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[1], 0, 100), - Some(1) - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[2], 0, 100), - Some(2) - ); - assert_eq!( - policy.cache_progress("task2".to_string(), &[3], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[4], 0, 100), - Some(4) - ); - - assert_eq!( - policy.cache_progress("task2".to_string(), &[6, 7, 8], 0, 100), - None - ); - tokio::time::sleep(duration).await; - assert_eq!( - policy.cache_progress("task2".to_string(), &[5, 9], 0, 100), - Some(9) - ); + async fn update_task(&mut self, task: Task) -> Result<(), anyhow::Error> { + let mut conn = self.pool.get().await?; + diesel::update(QueryDsl::filter( + dsl::progress_store, + columns::task_name.eq(task.task_name), + )) + .set(( + columns::checkpoint.eq(task.start_checkpoint as i64), + columns::target_checkpoint.eq(task.target_checkpoint as i64), + columns::timestamp.eq(now), + )) + .execute(&mut conn) + .await?; + Ok(()) } } diff --git a/crates/sui-bridge-indexer/src/sui_bridge_indexer.rs b/crates/sui-bridge-indexer/src/sui_bridge_indexer.rs index f455665b6392a..a6e50444f2124 100644 --- a/crates/sui-bridge-indexer/src/sui_bridge_indexer.rs +++ b/crates/sui-bridge-indexer/src/sui_bridge_indexer.rs @@ -99,12 +99,14 @@ fn process_sui_event( status: TokenTransferStatus::Deposited, gas_usage: tx.effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, + is_finalized: true, data: Some(TokenTransferData { destination_chain: move_event.target_chain, sender_address: move_event.sender_address.clone(), recipient_address: move_event.target_address.clone(), token_id: move_event.token_type, amount: move_event.amount_sui_adjusted, + is_finalized: true, }), })) } @@ -123,6 +125,7 @@ fn process_sui_event( gas_usage: tx.effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, data: None, + is_finalized: true, })) } "TokenTransferClaimed" => { @@ -140,6 +143,7 @@ fn process_sui_event( gas_usage: tx.effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, data: None, + is_finalized: true, })) } _ => { diff --git a/crates/sui-bridge-indexer/src/sui_datasource.rs b/crates/sui-bridge-indexer/src/sui_datasource.rs index a9d5de0fac774..5afd9950ddd83 100644 --- a/crates/sui-bridge-indexer/src/sui_datasource.rs +++ b/crates/sui-bridge-indexer/src/sui_datasource.rs @@ -12,8 +12,8 @@ use sui_data_ingestion_core::{ DataIngestionMetrics, IndexerExecutor, ProgressStore, ReaderOptions, Worker, WorkerPool, }; use sui_indexer_builder::indexer_builder::{DataSender, Datasource}; +use sui_indexer_builder::Task; use sui_sdk::SuiClient; -use sui_types::base_types::TransactionDigest; use sui_types::full_checkpoint_content::CheckpointData as SuiCheckpointData; use sui_types::full_checkpoint_content::CheckpointTransaction; use sui_types::messages_checkpoint::CheckpointSequenceNumber; @@ -23,6 +23,9 @@ use tokio::task::JoinHandle; use crate::metrics::BridgeIndexerMetrics; +const BACKFILL_TASK_INGESTION_READER_BATCH_SIZE: usize = 300; +const LIVE_TASK_INGESTION_READER_BATCH_SIZE: usize = 10; + pub struct SuiCheckpointDatasource { remote_store_url: String, sui_client: Arc, @@ -58,23 +61,32 @@ impl SuiCheckpointDatasource { impl Datasource for SuiCheckpointDatasource { async fn start_data_retrieval( &self, - starting_checkpoint: u64, - target_checkpoint: u64, + task: Task, data_sender: DataSender, ) -> Result>, Error> { let (exit_sender, exit_receiver) = oneshot::channel(); let progress_store = PerTaskInMemProgressStore { - current_checkpoint: starting_checkpoint, - exit_checkpoint: target_checkpoint, + current_checkpoint: task.start_checkpoint, + exit_checkpoint: task.target_checkpoint, exit_sender: Some(exit_sender), }; + // The max concurrnecy of checkpoint to fetch at the same time for ingestion framework + let ingestion_reader_batch_size = if task.is_live_task { + // Live task uses smaller number to be cost effective + LIVE_TASK_INGESTION_READER_BATCH_SIZE + } else { + std::env::var("BACKFILL_TASK_INGESTION_READER_BATCH_SIZE") + .unwrap_or(BACKFILL_TASK_INGESTION_READER_BATCH_SIZE.to_string()) + .parse::() + .unwrap() + }; + tracing::info!( + "Starting Sui checkpoint data retrieval with batch size {}", + ingestion_reader_batch_size + ); let mut executor = IndexerExecutor::new(progress_store, 1, self.metrics.clone()); let worker = IndexerWorker::new(data_sender); - let worker_pool = WorkerPool::new( - worker, - TransactionDigest::random().to_string(), - self.concurrency, - ); + let worker_pool = WorkerPool::new(worker, task.task_name.clone(), self.concurrency); executor.register(worker_pool).await?; let checkpoint_path = self.checkpoint_path.clone(); let remote_store_url = self.remote_store_url.clone(); @@ -84,7 +96,10 @@ impl Datasource for SuiCheckpointDatasource { checkpoint_path, Some(remote_store_url), vec![], // optional remote store access options - ReaderOptions::default(), + ReaderOptions { + batch_size: ingestion_reader_batch_size, + ..Default::default() + }, exit_receiver, ) .await?; @@ -105,16 +120,12 @@ impl Datasource for SuiCheckpointDatasource { } fn get_tasks_remaining_checkpoints_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.tasks_remaining_checkpoints + &self.indexer_metrics.backfill_tasks_remaining_checkpoints } fn get_tasks_processed_checkpoints_metric(&self) -> &IntCounterVec { &self.indexer_metrics.tasks_processed_checkpoints } - - fn get_live_task_checkpoint_metric(&self) -> &IntGaugeVec { - &self.indexer_metrics.live_task_current_checkpoint - } } struct PerTaskInMemProgressStore { @@ -134,11 +145,19 @@ impl ProgressStore for PerTaskInMemProgressStore { async fn save( &mut self, - _task_name: String, + task_name: String, checkpoint_number: CheckpointSequenceNumber, ) -> anyhow::Result<()> { if checkpoint_number >= self.exit_checkpoint { + tracing::info!( + task_name, + checkpoint_number, + exit_checkpoint = self.exit_checkpoint, + "Task completed, sending exit signal" + ); + // `exit_sender` may be `None` if we have already sent the exit signal. if let Some(sender) = self.exit_sender.take() { + // Ignore the error if the receiver has already been dropped. let _ = sender.send(()); } } diff --git a/crates/sui-bridge-indexer/src/sui_transaction_handler.rs b/crates/sui-bridge-indexer/src/sui_transaction_handler.rs index 4ce9a8a57d60e..92a4f1c9ebdf0 100644 --- a/crates/sui-bridge-indexer/src/sui_transaction_handler.rs +++ b/crates/sui-bridge-indexer/src/sui_transaction_handler.rs @@ -105,12 +105,14 @@ pub fn into_token_transfers( status: TokenTransferStatus::Deposited, gas_usage: effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, + is_finalized: true, data: Some(TokenTransferData { destination_chain: move_event.target_chain, sender_address: move_event.sender_address.clone(), recipient_address: move_event.target_address.clone(), token_id: move_event.token_type, amount: move_event.amount_sui_adjusted, + is_finalized: true, }), })); } @@ -129,6 +131,7 @@ pub fn into_token_transfers( gas_usage: effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, data: None, + is_finalized: true, })); } "TokenTransferClaimed" => { @@ -146,6 +149,7 @@ pub fn into_token_transfers( gas_usage: effects.gas_cost_summary().net_gas_usage(), data_source: BridgeDataSource::Sui, data: None, + is_finalized: true, })); } _ => { diff --git a/crates/sui-bridge/Cargo.toml b/crates/sui-bridge/Cargo.toml index ff500f371c180..700d4979f9b6f 100644 --- a/crates/sui-bridge/Cargo.toml +++ b/crates/sui-bridge/Cargo.toml @@ -8,7 +8,6 @@ edition = "2021" [dependencies] ethers = "2.0" -snap = "1.1.0" tokio = { workspace = true, features = ["full"] } sui-types.workspace = true sui-authority-aggregation.workspace = true @@ -26,7 +25,6 @@ mysten-metrics.workspace = true sui-sdk.workspace = true sui-keys.workspace = true sui-config.workspace = true -sui-tls.workspace = true clap.workspace = true tracing.workspace = true bin-version.workspace = true @@ -46,6 +44,7 @@ rand.workspace = true lru.workspace = true shared-crypto.workspace = true backoff.workspace = true +mysten-common.workspace = true enum_dispatch.workspace = true sui-json-rpc-api.workspace = true sui-test-transaction-builder.workspace = true diff --git a/crates/sui-bridge/src/config.rs b/crates/sui-bridge/src/config.rs index d437409d0797b..065b8c0aeb6c1 100644 --- a/crates/sui-bridge/src/config.rs +++ b/crates/sui-bridge/src/config.rs @@ -158,7 +158,8 @@ impl BridgeNodeConfig { // we do this check here instead of `prepare_for_sui` below because // that is only called when `run_client` is true. - let sui_client = Arc::new(SuiClient::::new(&self.sui.sui_rpc_url).await?); + let sui_client = + Arc::new(SuiClient::::new(&self.sui.sui_rpc_url, metrics.clone()).await?); let bridge_committee = sui_client .get_bridge_committee() .await diff --git a/crates/sui-bridge/src/e2e_tests/test_utils.rs b/crates/sui-bridge/src/e2e_tests/test_utils.rs index 1494fd14e155d..ae6ef6a94a0ac 100644 --- a/crates/sui-bridge/src/e2e_tests/test_utils.rs +++ b/crates/sui-bridge/src/e2e_tests/test_utils.rs @@ -7,6 +7,7 @@ use crate::config::default_ed25519_key_pair; use crate::crypto::BridgeAuthorityKeyPair; use crate::crypto::BridgeAuthorityPublicKeyBytes; use crate::events::*; +use crate::metrics::BridgeMetrics; use crate::server::BridgeNodePublicMetadata; use crate::types::BridgeAction; use crate::utils::get_eth_signer_client; @@ -152,6 +153,7 @@ impl BridgeTestClusterBuilder { pub async fn build(self) -> BridgeTestCluster { init_all_struct_tags(); std::env::set_var("__TEST_ONLY_CONSENSUS_USE_LONG_MIN_ROUND_DELAY", "1"); + let metrics = Arc::new(BridgeMetrics::new_for_testing()); let mut bridge_keys = vec![]; let mut bridge_keys_copy = vec![]; for _ in 0..self.num_validators { @@ -177,7 +179,7 @@ impl BridgeTestClusterBuilder { .await, ); } - let bridge_client = SuiBridgeClient::new(&test_cluster.fullnode_handle.rpc_url) + let bridge_client = SuiBridgeClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); info!( @@ -836,6 +838,9 @@ impl TempDir { impl Drop for TempDir { fn drop(&mut self) { - fs::remove_dir_all(&self.path).unwrap(); + // Use eprintln! here in case logging is not initialized + if let Err(e) = fs::remove_dir_all(&self.path) { + eprintln!("Failed to remove temp dir: {:?}", e); + } } } diff --git a/crates/sui-bridge/src/eth_client.rs b/crates/sui-bridge/src/eth_client.rs index df48b3cd88b4a..dfe6993e1b760 100644 --- a/crates/sui-bridge/src/eth_client.rs +++ b/crates/sui-bridge/src/eth_client.rs @@ -177,14 +177,14 @@ where // for chunking the query. pub async fn get_raw_events_in_range( &self, - address: ethers::types::Address, + addresses: Vec, start_block: u64, end_block: u64, ) -> BridgeResult> { let filter = Filter::new() .from_block(start_block) .to_block(end_block) - .address(address); + .address(addresses.clone()); let logs = self .provider .get_logs(&filter) @@ -197,11 +197,11 @@ where e ) })?; - // Safeguard check that all events are emitted from requested contract address + // Safeguard check that all events are emitted from requested contract addresses logs.into_iter().map( |log| { - if log.address != address { - return Err(BridgeError::ProviderError(format!("Provider returns logs from different contract address (expected: {:?}): {:?}", address, log))); + if !addresses.contains(&log.address) { + return Err(BridgeError::ProviderError(format!("Provider returns logs from different contract address (expected: {:?}): {:?}", addresses, log))); } Ok(RawEthLog { block_number: log.block_number.ok_or(BridgeError::ProviderError("Provider returns log without block_number".into()))?.as_u64(), diff --git a/crates/sui-bridge/src/metrics.rs b/crates/sui-bridge/src/metrics.rs index 2ec82d5723099..f38945ca15118 100644 --- a/crates/sui-bridge/src/metrics.rs +++ b/crates/sui-bridge/src/metrics.rs @@ -2,16 +2,16 @@ // SPDX-License-Identifier: Apache-2.0 use crate::config::MetricsConfig; +use mysten_common::metrics::{push_metrics, MetricsPushClient}; use mysten_metrics::RegistryService; use prometheus::{ register_histogram_vec_with_registry, register_int_counter_vec_with_registry, register_int_counter_with_registry, register_int_gauge_vec_with_registry, - register_int_gauge_with_registry, Encoder, HistogramVec, IntCounter, IntCounterVec, IntGauge, + register_int_gauge_with_registry, HistogramVec, IntCounter, IntCounterVec, IntGauge, IntGaugeVec, Registry, }; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; +use std::time::Duration; use sui_types::crypto::NetworkKeyPair; -use tracing::error; const FINE_GRAINED_LATENCY_SEC_BUCKETS: &[f64] = &[ 0.001, 0.005, 0.01, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.6, 0.7, 0.8, 0.9, @@ -20,39 +20,6 @@ const FINE_GRAINED_LATENCY_SEC_BUCKETS: &[f64] = &[ 200., 250., 300., 350., 400., ]; -pub struct MetricsPushClient { - certificate: std::sync::Arc, - client: reqwest::Client, -} - -impl MetricsPushClient { - pub fn new(metrics_key: sui_types::crypto::NetworkKeyPair) -> Self { - use fastcrypto::traits::KeyPair; - let certificate = std::sync::Arc::new(sui_tls::SelfSignedCertificate::new( - metrics_key.private(), - sui_tls::SUI_VALIDATOR_SERVER_NAME, - )); - let identity = certificate.reqwest_identity(); - let client = reqwest::Client::builder() - .identity(identity) - .build() - .unwrap(); - - Self { - certificate, - client, - } - } - - pub fn certificate(&self) -> &sui_tls::SelfSignedCertificate { - &self.certificate - } - - pub fn client(&self) -> &reqwest::Client { - &self.client - } -} - /// Starts a task to periodically push metrics to a configured endpoint if a metrics push endpoint /// is configured. pub fn start_metrics_push_task( @@ -80,62 +47,6 @@ pub fn start_metrics_push_task( let mut client = MetricsPushClient::new(metrics_key_pair.copy()); - // TODO (johnm) split this out into mysten-common - async fn push_metrics( - client: &MetricsPushClient, - url: &reqwest::Url, - registry: &RegistryService, - ) -> Result<(), anyhow::Error> { - // now represents a collection timestamp for all of the metrics we send to the proxy - let now = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_millis() as i64; - - let mut metric_families = registry.gather_all(); - for mf in metric_families.iter_mut() { - for m in mf.mut_metric() { - m.set_timestamp_ms(now); - } - } - - let mut buf: Vec = vec![]; - let encoder = prometheus::ProtobufEncoder::new(); - encoder.encode(&metric_families, &mut buf)?; - - let mut s = snap::raw::Encoder::new(); - let compressed = s.compress_vec(&buf).map_err(|err| { - error!("unable to snappy encode; {err}"); - err - })?; - - let response = client - .client() - .post(url.to_owned()) - .header(reqwest::header::CONTENT_ENCODING, "snappy") - .header(reqwest::header::CONTENT_TYPE, prometheus::PROTOBUF_FORMAT) - .body(compressed) - .send() - .await?; - - if !response.status().is_success() { - let status = response.status(); - let body = match response.text().await { - Ok(body) => body, - Err(error) => format!("couldn't decode response body; {error}"), - }; - return Err(anyhow::anyhow!( - "metrics push failed: [{}]:{}", - status, - body - )); - } - - tracing::debug!("successfully pushed metrics to {url}"); - - Ok(()) - } - tokio::spawn(async move { tracing::info!(push_url =% url, interval =? interval, "Started Metrics Push Service"); @@ -190,6 +101,9 @@ pub struct BridgeMetrics { pub(crate) eth_rpc_queries_latency: HistogramVec, pub(crate) gas_coin_balance: IntGauge, + + pub(crate) sui_rpc_errors: IntCounterVec, + pub(crate) observed_governance_actions: IntCounterVec, } impl BridgeMetrics { @@ -372,6 +286,20 @@ impl BridgeMetrics { registry, ) .unwrap(), + sui_rpc_errors: register_int_counter_vec_with_registry!( + "bridge_sui_rpc_errors", + "Total number of errors from sui RPC, by RPC method", + &["method"], + registry, + ) + .unwrap(), + observed_governance_actions: register_int_counter_vec_with_registry!( + "bridge_observed_governance_actions", + "Total number of observed governance actions", + &["action_type", "chain_id"], + registry, + ) + .unwrap(), } } diff --git a/crates/sui-bridge/src/monitor.rs b/crates/sui-bridge/src/monitor.rs index 12affd68d0aa7..09cb914c93a11 100644 --- a/crates/sui-bridge/src/monitor.rs +++ b/crates/sui-bridge/src/monitor.rs @@ -7,6 +7,7 @@ use crate::client::bridge_authority_aggregator::BridgeAuthorityAggregator; use crate::crypto::BridgeAuthorityPublicKeyBytes; use crate::events::{BlocklistValidatorEvent, CommitteeMemberUrlUpdateEvent}; use crate::events::{EmergencyOpEvent, SuiBridgeEvent}; +use crate::metrics::BridgeMetrics; use crate::retry_with_max_elapsed_time; use crate::sui_client::{SuiClient, SuiClientInner}; use crate::types::{BridgeCommittee, IsBridgePaused}; @@ -25,6 +26,7 @@ pub struct BridgeMonitor { bridge_auth_agg: Arc>, bridge_paused_watch_tx: tokio::sync::watch::Sender, sui_token_type_tags: Arc>>, + bridge_metrics: Arc, } impl BridgeMonitor @@ -37,6 +39,7 @@ where bridge_auth_agg: Arc>, bridge_paused_watch_tx: tokio::sync::watch::Sender, sui_token_type_tags: Arc>>, + bridge_metrics: Arc, ) -> Self { Self { sui_client, @@ -44,6 +47,7 @@ where bridge_auth_agg, bridge_paused_watch_tx, sui_token_type_tags, + bridge_metrics, } } @@ -55,6 +59,7 @@ where bridge_auth_agg, bridge_paused_watch_tx, sui_token_type_tags, + bridge_metrics, } = self; let mut latest_token_config = (*sui_token_type_tags.load().clone()).clone(); @@ -66,11 +71,15 @@ where SuiBridgeEvent::TokenTransferAlreadyApproved(_) => (), SuiBridgeEvent::TokenTransferAlreadyClaimed(_) => (), SuiBridgeEvent::TokenTransferLimitExceed(_) => { - // TODO + // TODO do we want to do anything here? } SuiBridgeEvent::EmergencyOpEvent(event) => { info!("Received EmergencyOpEvent: {:?}", event); + bridge_metrics + .observed_governance_actions + .with_label_values(&["emergency_op", "sui"]) + .inc(); let is_paused = get_latest_bridge_pause_status_with_emergency_event( sui_client.clone(), event, @@ -101,6 +110,10 @@ where SuiBridgeEvent::BlocklistValidatorEvent(event) => { info!("Received BlocklistValidatorEvent: {:?}", event); + bridge_metrics + .observed_governance_actions + .with_label_values(&["blocklist_validator", "sui"]) + .inc(); let new_committee = get_latest_bridge_committee_with_blocklist_event( sui_client.clone(), event, @@ -116,6 +129,11 @@ where SuiBridgeEvent::TokenRegistrationEvent(_) => (), SuiBridgeEvent::NewTokenEvent(event) => { + info!("Received NewTokenEvent: {:?}", event); + bridge_metrics + .observed_governance_actions + .with_label_values(&["new_token", "sui"]) + .inc(); if let std::collections::hash_map::Entry::Vacant(entry) = // We only add new tokens but not remove so it's ok to just insert latest_token_config.entry(event.token_id) @@ -128,7 +146,13 @@ where } } - SuiBridgeEvent::UpdateTokenPriceEvent(_) => (), + SuiBridgeEvent::UpdateTokenPriceEvent(event) => { + info!("Received UpdateTokenPriceEvent: {:?}", event); + bridge_metrics + .observed_governance_actions + .with_label_values(&["update_token_price", "sui"]) + .inc(); + } } } @@ -725,6 +749,7 @@ mod tests { bridge_pause_tx, _bridge_pause_rx, mut authorities, + bridge_metrics, ) = setup(); let old_committee = BridgeCommittee::new(authorities.clone()).unwrap(); let agg = Arc::new(ArcSwap::new(Arc::new(BridgeAuthorityAggregator::new( @@ -738,6 +763,7 @@ mod tests { agg.clone(), bridge_pause_tx, sui_token_type_tags, + bridge_metrics, ) .run(), ); @@ -779,6 +805,7 @@ mod tests { bridge_pause_tx, _bridge_pause_rx, mut authorities, + bridge_metrics, ) = setup(); let old_committee = BridgeCommittee::new(authorities.clone()).unwrap(); let agg = Arc::new(ArcSwap::new(Arc::new(BridgeAuthorityAggregator::new( @@ -792,6 +819,7 @@ mod tests { agg.clone(), bridge_pause_tx, sui_token_type_tags, + bridge_metrics, ) .run(), ); @@ -831,6 +859,7 @@ mod tests { bridge_pause_tx, bridge_pause_rx, authorities, + bridge_metrics, ) = setup(); let event = EmergencyOpEvent { frozen: !*bridge_pause_tx.borrow(), // toggle the bridge pause status @@ -847,6 +876,7 @@ mod tests { agg.clone(), bridge_pause_tx, sui_token_type_tags, + bridge_metrics, ) .run(), ); @@ -872,6 +902,7 @@ mod tests { bridge_pause_tx, _bridge_pause_rx, authorities, + bridge_metrics, ) = setup(); let event = NewTokenEvent { token_id: 255, @@ -893,6 +924,7 @@ mod tests { agg.clone(), bridge_pause_tx, sui_token_type_tags_clone, + bridge_metrics, ) .run(), ); @@ -920,11 +952,13 @@ mod tests { tokio::sync::watch::Sender, tokio::sync::watch::Receiver, Vec, + Arc, ) { telemetry_subscribers::init_for_testing(); let registry = Registry::new(); mysten_metrics::init_metrics(®istry); init_all_struct_tags(); + let bridge_metrics = Arc::new(BridgeMetrics::new_for_testing()); let sui_client_mock = SuiMockClient::default(); let sui_client = Arc::new(SuiClient::new_for_testing(sui_client_mock.clone())); @@ -950,6 +984,7 @@ mod tests { bridge_pause_tx, bridge_pause_rx, authorities, + bridge_metrics, ) } } diff --git a/crates/sui-bridge/src/node.rs b/crates/sui-bridge/src/node.rs index 8393a66029213..517aa496d3331 100644 --- a/crates/sui-bridge/src/node.rs +++ b/crates/sui-bridge/src/node.rs @@ -146,6 +146,7 @@ async fn start_client_components( bridge_auth_agg.clone(), bridge_pause_tx, sui_token_type_tags, + metrics.clone(), ); all_handles.push(spawn_logged_monitored_task!(monitor.run())); diff --git a/crates/sui-bridge/src/sui_client.rs b/crates/sui-bridge/src/sui_client.rs index 56ce77050554c..34cecb6947913 100644 --- a/crates/sui-bridge/src/sui_client.rs +++ b/crates/sui-bridge/src/sui_client.rs @@ -8,6 +8,7 @@ use fastcrypto::traits::ToFromBytes; use serde::de::DeserializeOwned; use std::collections::HashMap; use std::str::from_utf8; +use std::sync::Arc; use std::time::Duration; use sui_json_rpc_api::BridgeReadApiClient; use sui_json_rpc_types::DevInspectResults; @@ -49,6 +50,7 @@ use tracing::{error, warn}; use crate::crypto::BridgeAuthorityPublicKey; use crate::error::{BridgeError, BridgeResult}; use crate::events::SuiBridgeEvent; +use crate::metrics::BridgeMetrics; use crate::retry_with_max_elapsed_time; use crate::types::BridgeActionStatus; use crate::types::ParsedTokenTransferMessage; @@ -56,19 +58,23 @@ use crate::types::{BridgeAction, BridgeAuthority, BridgeCommittee}; pub struct SuiClient

{ inner: P, + bridge_metrics: Arc, } pub type SuiBridgeClient = SuiClient; impl SuiBridgeClient { - pub async fn new(rpc_url: &str) -> anyhow::Result { + pub async fn new(rpc_url: &str, bridge_metrics: Arc) -> anyhow::Result { let inner = SuiClientBuilder::default() .build(rpc_url) .await .map_err(|e| { anyhow!("Can't establish connection with Sui Rpc {rpc_url}. Error: {e}") })?; - let self_ = Self { inner }; + let self_ = Self { + inner, + bridge_metrics, + }; self_.describe().await?; Ok(self_) } @@ -83,7 +89,10 @@ where P: SuiClientInner, { pub fn new_for_testing(inner: P) -> Self { - Self { inner } + Self { + inner, + bridge_metrics: Arc::new(BridgeMetrics::new_for_testing()), + } } // TODO assert chain identifier @@ -265,7 +274,10 @@ where self.inner.get_reference_gas_price(), Duration::from_secs(30) ) else { - // TODO: add metrics and fire alert + self.bridge_metrics + .sui_rpc_errors + .with_label_values(&["get_reference_gas_price"]) + .inc(); error!("Failed to get reference gas price"); continue; }; @@ -296,7 +308,10 @@ where ), Duration::from_secs(30) ) else { - // TODO: add metrics and fire alert + self.bridge_metrics + .sui_rpc_errors + .with_label_values(&["get_token_transfer_action_onchain_status"]) + .inc(); error!( source_chain_id, seq_number, "Failed to get token transfer action onchain status" @@ -322,7 +337,10 @@ where ), Duration::from_secs(30) ) else { - // TODO: add metrics and fire alert + self.bridge_metrics + .sui_rpc_errors + .with_label_values(&["get_token_transfer_action_onchain_signatures"]) + .inc(); error!( source_chain_id, seq_number, "Failed to get token transfer action onchain signatures" @@ -773,7 +791,8 @@ mod tests { .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let bridge_metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, bridge_metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); diff --git a/crates/sui-bridge/src/sui_transaction_builder.rs b/crates/sui-bridge/src/sui_transaction_builder.rs index 8e17cd60701ff..ff188d55927d8 100644 --- a/crates/sui-bridge/src/sui_transaction_builder.rs +++ b/crates/sui-bridge/src/sui_transaction_builder.rs @@ -617,6 +617,7 @@ pub fn build_committee_update_url_transaction( #[cfg(test)] mod tests { use crate::crypto::BridgeAuthorityKeyPair; + use crate::metrics::BridgeMetrics; use crate::sui_client::SuiClient; use crate::types::BridgeAction; use crate::types::EmergencyAction; @@ -632,6 +633,7 @@ mod tests { }; use ethers::types::Address as EthAddress; use std::collections::HashMap; + use std::sync::Arc; use sui_types::bridge::{BridgeChainId, TOKEN_ID_BTC, TOKEN_ID_USDC}; use sui_types::crypto::get_key_pair; use sui_types::crypto::ToFromBytes; @@ -650,7 +652,8 @@ mod tests { .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); @@ -727,7 +730,8 @@ mod tests { .with_num_validators(num_valdiator) .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); @@ -796,7 +800,8 @@ mod tests { .with_protocol_version((BRIDGE_ENABLE_PROTOCOL_VERSION).into()) .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); @@ -884,7 +889,8 @@ mod tests { .with_protocol_version((BRIDGE_ENABLE_PROTOCOL_VERSION).into()) .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); @@ -953,7 +959,8 @@ mod tests { .with_protocol_version((BRIDGE_ENABLE_PROTOCOL_VERSION).into()) .build_with_bridge(bridge_keys, true) .await; - let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url) + let metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_client = SuiClient::new(&test_cluster.fullnode_handle.rpc_url, metrics) .await .unwrap(); let bridge_authority_keys = test_cluster.bridge_authority_keys.take().unwrap(); diff --git a/crates/sui-bridge/src/utils.rs b/crates/sui-bridge/src/utils.rs index 11508554a54e8..3bc787842b906 100644 --- a/crates/sui-bridge/src/utils.rs +++ b/crates/sui-bridge/src/utils.rs @@ -4,7 +4,9 @@ use crate::abi::{ EthBridgeCommittee, EthBridgeConfig, EthBridgeLimiter, EthBridgeVault, EthSuiBridge, }; -use crate::config::{default_ed25519_key_pair, BridgeNodeConfig, EthConfig, SuiConfig}; +use crate::config::{ + default_ed25519_key_pair, BridgeNodeConfig, EthConfig, MetricsConfig, SuiConfig, +}; use crate::crypto::BridgeAuthorityKeyPair; use crate::crypto::BridgeAuthorityPublicKeyBytes; use crate::server::APPLICATION_JSON; @@ -194,7 +196,10 @@ pub fn generate_bridge_node_config_and_write_to_file( run_client, db_path: None, metrics_key_pair: default_ed25519_key_pair(), - metrics: None, + metrics: Some(MetricsConfig { + push_interval_seconds: None, // use default value + push_url: "metrics_proxy_url".to_string(), + }), }; if run_client { config.sui.bridge_client_key_path = Some(PathBuf::from("/path/to/your/bridge_client_key")); diff --git a/crates/sui-cluster-test/Cargo.toml b/crates/sui-cluster-test/Cargo.toml index 0b38efc9bc6bb..93bf428007108 100644 --- a/crates/sui-cluster-test/Cargo.toml +++ b/crates/sui-cluster-test/Cargo.toml @@ -42,7 +42,3 @@ telemetry-subscribers.workspace = true test-cluster.workspace = true move-core-types.workspace = true - -[features] -default = [] -pg_integration = [] diff --git a/crates/sui-cluster-test/tests/local_cluster_test.rs b/crates/sui-cluster-test/tests/local_cluster_test.rs index a5bdc47236ef3..0a9ea1353013c 100644 --- a/crates/sui-cluster-test/tests/local_cluster_test.rs +++ b/crates/sui-cluster-test/tests/local_cluster_test.rs @@ -10,7 +10,6 @@ async fn cluster_test() { ClusterTest::run(ClusterTestOpt::new_local()).await; } -#[cfg(feature = "pg_integration")] #[tokio::test] async fn test_sui_cluster() { use reqwest::StatusCode; diff --git a/crates/sui-config/src/node.rs b/crates/sui-config/src/node.rs index 976ceb6e23e6c..08ea30d786a25 100644 --- a/crates/sui-config/src/node.rs +++ b/crates/sui-config/src/node.rs @@ -270,6 +270,10 @@ pub fn default_zklogin_oauth_providers() -> BTreeMap> { "Microsoft".to_string(), "KarrierOne".to_string(), "Credenza3".to_string(), + "Playtron".to_string(), + "Threedos".to_string(), + "Onefc".to_string(), + "FanTV".to_string(), "AwsTenant-region:us-east-1-tenant_id:us-east-1_LPSLCkC3A".to_string(), // test tenant in mysten aws "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8".to_string(), // ambrus, external partner ]); diff --git a/crates/sui-core/src/authority.rs b/crates/sui-core/src/authority.rs index daf157c740dad..1e626d50c5897 100644 --- a/crates/sui-core/src/authority.rs +++ b/crates/sui-core/src/authority.rs @@ -961,7 +961,7 @@ impl AuthorityState { let cache_reader = self.get_transaction_cache_reader().clone(); let epoch_store = epoch_store.clone(); spawn_monitored_task!(epoch_store.within_alive_epoch( - validator_tx_finalizer.track_signed_tx(cache_reader, tx) + validator_tx_finalizer.track_signed_tx(cache_reader, &epoch_store, tx) )); } } @@ -4381,7 +4381,7 @@ impl AuthorityState { desired_upgrades.sort(); desired_upgrades .into_iter() - .group_by(|(packages, _authority)| packages.clone()) + .chunk_by(|(packages, _authority)| packages.clone()) .into_iter() .find_map(|(packages, group)| { // should have been filtered out earlier. @@ -4468,7 +4468,7 @@ impl AuthorityState { desired_upgrades.sort(); desired_upgrades .into_iter() - .group_by(|(digest, packages, _authority)| (*digest, packages.clone())) + .chunk_by(|(digest, packages, _authority)| (*digest, packages.clone())) .into_iter() .find_map(|((digest, packages), group)| { // should have been filtered out earlier. diff --git a/crates/sui-core/src/authority/authority_per_epoch_store.rs b/crates/sui-core/src/authority/authority_per_epoch_store.rs index a5e71a43beb53..8e38a41242e84 100644 --- a/crates/sui-core/src/authority/authority_per_epoch_store.rs +++ b/crates/sui-core/src/authority/authority_per_epoch_store.rs @@ -339,7 +339,7 @@ pub struct AuthorityPerEpochStore { /// We need to keep track of those in order to know when to send EndOfPublish message. /// Lock ordering: this is a 'leaf' lock, no other locks should be acquired in the scope of this lock /// In particular, this lock is always acquired after taking read or write lock on reconfig state - pending_consensus_certificates: Mutex>, + pending_consensus_certificates: RwLock>, /// MutexTable for transaction locks (prevent concurrent execution of same transaction) mutex_table: MutexTable, @@ -876,7 +876,7 @@ impl AuthorityPerEpochStore { running_root_notify_read: NotifyRead::new(), executed_digests_notify_read: NotifyRead::new(), end_of_publish: Mutex::new(end_of_publish), - pending_consensus_certificates: Mutex::new(pending_consensus_certificates), + pending_consensus_certificates: RwLock::new(pending_consensus_certificates), mutex_table: MutexTable::new(MUTEX_TABLE_SIZE), epoch_open_time: current_time, epoch_close_time: Default::default(), @@ -1889,7 +1889,7 @@ impl AuthorityPerEpochStore { "Reconfiguration state should allow accepting user transactions" ); self.pending_consensus_certificates - .lock() + .write() .insert(*cert.digest()); } } @@ -1906,22 +1906,28 @@ impl AuthorityPerEpochStore { // TODO: lock once for all remove() calls. for key in keys { if let ConsensusTransactionKey::Certificate(cert) = key { - self.pending_consensus_certificates.lock().remove(cert); + self.pending_consensus_certificates.write().remove(cert); } } Ok(()) } pub fn pending_consensus_certificates_count(&self) -> usize { - self.pending_consensus_certificates.lock().len() + self.pending_consensus_certificates.read().len() } pub fn pending_consensus_certificates_empty(&self) -> bool { - self.pending_consensus_certificates.lock().is_empty() + self.pending_consensus_certificates.read().is_empty() } pub fn pending_consensus_certificates(&self) -> HashSet { - self.pending_consensus_certificates.lock().clone() + self.pending_consensus_certificates.read().clone() + } + + pub fn is_pending_consensus_certificate(&self, tx_digest: &TransactionDigest) -> bool { + self.pending_consensus_certificates + .read() + .contains(tx_digest) } pub fn deferred_transactions_empty(&self) -> bool { diff --git a/crates/sui-core/src/authority_server.rs b/crates/sui-core/src/authority_server.rs index ee927304682c7..abe16af083743 100644 --- a/crates/sui-core/src/authority_server.rs +++ b/crates/sui-core/src/authority_server.rs @@ -841,7 +841,7 @@ impl ValidatorService { fp_ensure!( !epoch_store.is_any_tx_certs_consensus_message_processed(certificates.iter())?, SuiError::UserInputError { - error: UserInputError::CeritificateAlreadyProcessed + error: UserInputError::CertificateAlreadyProcessed } .into() ); diff --git a/crates/sui-core/src/consensus_manager/mysticeti_manager.rs b/crates/sui-core/src/consensus_manager/mysticeti_manager.rs index 22a8d38075083..93612791e90f3 100644 --- a/crates/sui-core/src/consensus_manager/mysticeti_manager.rs +++ b/crates/sui-core/src/consensus_manager/mysticeti_manager.rs @@ -178,8 +178,8 @@ impl ConsensusManagerTrait for MysticetiManager { let registry_id = self.registry_service.add(registry.clone()); - self.authority - .swap(Some(Arc::new((authority, registry_id)))); + let registered_authority = Arc::new((authority, registry_id)); + self.authority.swap(Some(registered_authority.clone())); // Initialize the client to send transactions to this Mysticeti instance. self.client.set(client); @@ -188,6 +188,9 @@ impl ConsensusManagerTrait for MysticetiManager { let handler = MysticetiConsensusHandler::new(consensus_handler, commit_receiver, monitor); let mut consensus_handler = self.consensus_handler.lock().await; *consensus_handler = Some(handler); + + // Wait until all locally available commits have been processed + registered_authority.0.replay_complete().await; } async fn shutdown(&self) { diff --git a/crates/sui-core/src/consensus_validator.rs b/crates/sui-core/src/consensus_validator.rs index b0a061e9a93b2..9c2609e36d971 100644 --- a/crates/sui-core/src/consensus_validator.rs +++ b/crates/sui-core/src/consensus_validator.rs @@ -3,7 +3,7 @@ use std::sync::Arc; -use consensus_core::{TransactionVerifier, ValidationError}; +use consensus_core::{TransactionIndex, TransactionVerifier, ValidationError}; use eyre::WrapErr; use fastcrypto_tbls::dkg; use mysten_metrics::monitored_scope; @@ -152,6 +152,10 @@ impl TransactionVerifier for SuiTxValidator { self.validate_transactions(txs) .map_err(|e| ValidationError::InvalidTransaction(e.to_string())) } + + fn vote_batch(&self, _batch: &[&[u8]]) -> Vec { + vec![] + } } pub struct SuiTxValidatorMetrics { diff --git a/crates/sui-core/src/quorum_driver/mod.rs b/crates/sui-core/src/quorum_driver/mod.rs index 94ffdfdbf6771..69923bac8c26e 100644 --- a/crates/sui-core/src/quorum_driver/mod.rs +++ b/crates/sui-core/src/quorum_driver/mod.rs @@ -354,8 +354,7 @@ where ); Err(Some(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes: conflicting_tx_digests, - retried_tx: None, - retried_tx_success: None, + retried_tx_status: None, })) } @@ -450,8 +449,7 @@ where ); Err(Some(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes: conflicting_tx_digests, - retried_tx: None, - retried_tx_success: None, + retried_tx_status: None, })) } Ok(success) => { @@ -468,8 +466,7 @@ where } Err(Some(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes: conflicting_tx_digests, - retried_tx: Some(conflicting_tx_digest), - retried_tx_success: Some(success), + retried_tx_status: Some((conflicting_tx_digest, success)), })) } } diff --git a/crates/sui-core/src/quorum_driver/tests.rs b/crates/sui-core/src/quorum_driver/tests.rs index 049fc9d0a8c9e..3c721d4667ef8 100644 --- a/crates/sui-core/src/quorum_driver/tests.rs +++ b/crates/sui-core/src/quorum_driver/tests.rs @@ -296,12 +296,10 @@ async fn test_quorum_driver_object_locked() -> Result<(), anyhow::Error> { // double spend and this is a fatal error. if let Err(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, }) = res { - assert_eq!(retried_tx, None); - assert_eq!(retried_tx_success, None); + assert_eq!(retried_tx_status, None); assert_eq!(conflicting_txes.len(), 1); assert_eq!(conflicting_txes.iter().next().unwrap().0, tx.digest()); } else { @@ -338,12 +336,10 @@ async fn test_quorum_driver_object_locked() -> Result<(), anyhow::Error> { // Aggregator gets three bad responses, and tries tx, which should succeed. if let Err(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, }) = res { - assert_eq!(retried_tx, Some(*tx.digest())); - assert_eq!(retried_tx_success, Some(true)); + assert_eq!(retried_tx_status, Some((*tx.digest(), true))); assert_eq!(conflicting_txes.len(), 1); assert_eq!(conflicting_txes.iter().next().unwrap().0, tx.digest()); } else { @@ -403,12 +399,10 @@ async fn test_quorum_driver_object_locked() -> Result<(), anyhow::Error> { if let Err(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, }) = res { - assert_eq!(retried_tx, None); - assert_eq!(retried_tx_success, None); + assert_eq!(retried_tx_status, None); assert_eq!(conflicting_txes.len(), 2); let tx_stake = conflicting_txes.get(tx.digest()).unwrap().1; assert!(tx_stake == 2500 || tx_stake == 5000); @@ -444,12 +438,10 @@ async fn test_quorum_driver_object_locked() -> Result<(), anyhow::Error> { if let Err(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, }) = res { - assert_eq!(retried_tx, None); - assert_eq!(retried_tx_success, None); + assert_eq!(retried_tx_status, None); assert_eq!(conflicting_txes.len(), 1); assert_eq!(conflicting_txes.get(tx.digest()).unwrap().1, 5000); } else { @@ -515,12 +507,10 @@ async fn test_quorum_driver_object_locked() -> Result<(), anyhow::Error> { if let Err(QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, }) = res { - assert_eq!(retried_tx, None); - assert_eq!(retried_tx_success, None); + assert_eq!(retried_tx_status, None); assert!(conflicting_txes.len() == 3 || conflicting_txes.len() == 2); assert!(conflicting_txes .iter() diff --git a/crates/sui-core/src/rest_index.rs b/crates/sui-core/src/rest_index.rs index ade98f8ada4dd..7e16553172beb 100644 --- a/crates/sui-core/src/rest_index.rs +++ b/crates/sui-core/src/rest_index.rs @@ -452,7 +452,9 @@ impl IndexStoreTables { } Owner::ObjectOwner(parent) => { if let Some(field_info) = - try_create_dynamic_field_info(object, resolver)? + try_create_dynamic_field_info(object, resolver) + .ok() + .flatten() { let field_key = DynamicFieldKey::new(*parent, object.id()); diff --git a/crates/sui-core/src/unit_tests/subscription_handler_tests.rs b/crates/sui-core/src/unit_tests/subscription_handler_tests.rs index 67b2e78b3110a..1e67cf8cb34a6 100644 --- a/crates/sui-core/src/unit_tests/subscription_handler_tests.rs +++ b/crates/sui-core/src/unit_tests/subscription_handler_tests.rs @@ -85,11 +85,11 @@ impl TestEvent { fn layout() -> MoveStructLayout { MoveStructLayout { type_: Self::type_(), - fields: vec![ + fields: Box::new(vec![ MoveFieldLayout::new(ident_str!("creator").to_owned(), MoveTypeLayout::Address), MoveFieldLayout::new( ident_str!("name").to_owned(), - MoveTypeLayout::Struct(UTF8String::layout()), + MoveTypeLayout::Struct(Box::new(UTF8String::layout())), ), MoveFieldLayout::new( ident_str!("data").to_owned(), @@ -97,9 +97,11 @@ impl TestEvent { ), MoveFieldLayout::new( ident_str!("coins").to_owned(), - MoveTypeLayout::Vector(Box::new(MoveTypeLayout::Struct(GasCoin::layout()))), + MoveTypeLayout::Vector(Box::new(MoveTypeLayout::Struct(Box::new( + GasCoin::layout(), + )))), ), - ], + ]), } } } @@ -131,10 +133,10 @@ impl UTF8String { fn layout() -> MoveStructLayout { MoveStructLayout { type_: Self::type_(), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("bytes").to_owned(), MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - )], + )]), } } } diff --git a/crates/sui-core/src/unit_tests/transaction_tests.rs b/crates/sui-core/src/unit_tests/transaction_tests.rs index 6861e0e9b610f..053e494af838f 100644 --- a/crates/sui-core/src/unit_tests/transaction_tests.rs +++ b/crates/sui-core/src/unit_tests/transaction_tests.rs @@ -2149,7 +2149,7 @@ async fn test_handle_soft_bundle_certificates_errors() { assert_matches!( response.unwrap_err(), SuiError::UserInputError { - error: UserInputError::CeritificateAlreadyProcessed { .. }, + error: UserInputError::CertificateAlreadyProcessed { .. }, } ); } diff --git a/crates/sui-core/src/validator_tx_finalizer.rs b/crates/sui-core/src/validator_tx_finalizer.rs index fde6c12e224eb..2129fd147b571 100644 --- a/crates/sui-core/src/validator_tx_finalizer.rs +++ b/crates/sui-core/src/validator_tx_finalizer.rs @@ -1,6 +1,7 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +use crate::authority::authority_per_epoch_store::AuthorityPerEpochStore; use crate::authority_aggregator::AuthorityAggregator; use crate::authority_client::AuthorityAPI; use crate::execution_cache::TransactionCacheRead; @@ -177,11 +178,15 @@ where pub async fn track_signed_tx( &self, cache_read: Arc, + epoch_store: &Arc, tx: VerifiedSignedTransaction, ) { let tx_digest = *tx.digest(); trace!(?tx_digest, "Tracking signed transaction"); - match self.delay_and_finalize_tx(cache_read, tx).await { + match self + .delay_and_finalize_tx(cache_read, epoch_store, tx) + .await + { Ok(did_run) => { if did_run { debug!(?tx_digest, "Transaction finalized"); @@ -196,6 +201,7 @@ where async fn delay_and_finalize_tx( &self, cache_read: Arc, + epoch_store: &Arc, tx: VerifiedSignedTransaction, ) -> anyhow::Result { let tx_digest = *tx.digest(); @@ -213,6 +219,14 @@ where } } + if epoch_store.is_pending_consensus_certificate(&tx_digest) { + trace!( + ?tx_digest, + "Transaction has been submitted to consensus, no need to help drive finality" + ); + return Ok(false); + } + self.metrics .validator_tx_finalizer_attempt_delay .observe(tx_finalization_delay.as_secs_f64()); @@ -419,9 +433,12 @@ mod tests { let signed_tx = create_tx(&clients, &states[0], sender, &keypair, gas_object_id).await; let tx_digest = *signed_tx.digest(); let cache_read = states[0].get_transaction_cache_reader().clone(); + let epoch_store = states[0].epoch_store_for_testing(); let metrics = finalizer1.metrics.clone(); let handle = tokio::spawn(async move { - finalizer1.track_signed_tx(cache_read, signed_tx).await; + finalizer1 + .track_signed_tx(cache_read, &epoch_store, signed_tx) + .await; }); handle.await.unwrap(); check_quorum_execution(&auth_agg.load(), &clients, &tx_digest, true); @@ -452,7 +469,7 @@ mod tests { let metrics = finalizer1.metrics.clone(); let handle = tokio::spawn(async move { let _ = epoch_store - .within_alive_epoch(finalizer1.track_signed_tx(cache_read, signed_tx)) + .within_alive_epoch(finalizer1.track_signed_tx(cache_read, &epoch_store, signed_tx)) .await; }); states[0].reconfigure_for_testing().await; @@ -499,12 +516,13 @@ mod tests { let signed_tx = create_tx(&clients, &states[0], sender, &keypair, gas_object_id).await; let tx_digest = *signed_tx.digest(); let cache_read = states[0].get_transaction_cache_reader().clone(); + let epoch_store = states[0].epoch_store_for_testing(); let metrics = finalizer1.metrics.clone(); let signed_tx_clone = signed_tx.clone(); let handle = tokio::spawn(async move { finalizer1 - .track_signed_tx(cache_read, signed_tx_clone) + .track_signed_tx(cache_read, &epoch_store, signed_tx_clone) .await; }); auth_agg @@ -537,6 +555,7 @@ mod tests { let signed_tx = create_tx(&clients, &states[0], sender, &keypair, gas_object_id).await; let tx_digest = *signed_tx.digest(); let cache_read = states[0].get_transaction_cache_reader().clone(); + let epoch_store = states[0].epoch_store_for_testing(); for client in clients.values() { client.inject_fault.store(true, Relaxed); } @@ -545,7 +564,7 @@ mod tests { let signed_tx_clone = signed_tx.clone(); let handle = tokio::spawn(async move { finalizer1 - .track_signed_tx(cache_read, signed_tx_clone) + .track_signed_tx(cache_read, &epoch_store, signed_tx_clone) .await; }); handle.await.unwrap(); diff --git a/crates/sui-data-ingestion-core/src/reader.rs b/crates/sui-data-ingestion-core/src/reader.rs index db25ed6d85fac..59a6e79260f21 100644 --- a/crates/sui-data-ingestion-core/src/reader.rs +++ b/crates/sui-data-ingestion-core/src/reader.rs @@ -27,10 +27,9 @@ use tokio::sync::oneshot; use tokio::time::timeout; use tracing::{debug, error, info}; -/// Implements a checkpoint reader that monitors a local directory. -/// Designed for setups where the indexer daemon is colocated with FN. -/// This implementation is push-based and utilizes the inotify API. pub struct CheckpointReader { + /// Used to read from a local directory when running with a colocated FN. + /// When fetching from a remote store, a temp dir can be passed in and it will be an no-op path: PathBuf, remote_store_url: Option, remote_store_options: Vec<(String, String)>, diff --git a/crates/sui-deepbook-indexer/Cargo.toml b/crates/sui-deepbook-indexer/Cargo.toml new file mode 100644 index 0000000000000..b32a1769c6c9d --- /dev/null +++ b/crates/sui-deepbook-indexer/Cargo.toml @@ -0,0 +1,45 @@ +[package] +name = "sui-deepbook-indexer" +version = "0.1.0" +authors = ["Mysten Labs "] +license = "Apache-2.0" +publish = false +edition = "2021" + +[dependencies] +serde.workspace = true +tap.workspace = true +diesel = { workspace = true, features = ["serde_json"] } +diesel-async = { workspace = true, features = ["bb8", "postgres"] } +tokio = { workspace = true, features = ["full"] } +anyhow.workspace = true +futures.workspace = true +async-trait.workspace = true +bcs.workspace = true +bin-version.workspace = true +clap.workspace = true +mysten-metrics.workspace = true +prometheus.workspace = true +serde_yaml.workspace = true +sui-bridge.workspace = true +sui-sdk.workspace = true +sui-json-rpc-types.workspace = true +sui-data-ingestion-core.workspace = true +sui-types.workspace = true +telemetry-subscribers.workspace = true +tracing.workspace = true +backoff.workspace = true +sui-config.workspace = true +sui-indexer-builder.workspace = true +tempfile.workspace = true +bigdecimal = "0.4.0" + +[dev-dependencies] +sui-types = { workspace = true, features = ["test-utils"] } +sui-test-transaction-builder.workspace = true +test-cluster.workspace = true +hex-literal = "0.3.4" + +[[bin]] +name = "deepbook-indexer" +path = "src/main.rs" diff --git a/crates/sui-deepbook-indexer/config.yaml b/crates/sui-deepbook-indexer/config.yaml new file mode 100644 index 0000000000000..e5a3b12686932 --- /dev/null +++ b/crates/sui-deepbook-indexer/config.yaml @@ -0,0 +1,22 @@ +# config.yaml format: + +# URL of the remote store +# remote_store_url: +# Database connection URL +# db_url: +# Path to the checkpoints +# checkpoints_path: +# DeepBook package ID +# deepbook_package_id: +# DeepBook genesis checkpoint +# deepbook_genesis_checkpoint: +# Number of concurrent operations +# concurrency: 1 +# Client metric URL +# metric_url: +# Client metric port +# metric_port: +# checkpoint size of each backfill worker, use 432000 for 1 worker per day, assume 5 checkpoint per second +# back_fill_lot_size: +# Optional starting checkpoint for realtime ingestion task +# resume_from_checkpoint: \ No newline at end of file diff --git a/crates/sui-deepbook-indexer/diesel.toml b/crates/sui-deepbook-indexer/diesel.toml new file mode 100644 index 0000000000000..061dc413ebe1f --- /dev/null +++ b/crates/sui-deepbook-indexer/diesel.toml @@ -0,0 +1,9 @@ +# For documentation on how to configure this file, +# see https://diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "src/schema.rs" +#custom_type_derives = ["diesel::query_builder::QueryId"] + +[migrations_directory] +dir = "src/migrations" diff --git a/crates/sui-deepbook-indexer/src/config.rs b/crates/sui-deepbook-indexer/src/config.rs new file mode 100644 index 0000000000000..363d722451061 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/config.rs @@ -0,0 +1,27 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use serde::{Deserialize, Serialize}; +use std::env; + +/// config as loaded from `config.yaml`. +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct IndexerConfig { + pub remote_store_url: String, + #[serde(default = "default_db_url")] + pub db_url: String, + /// Only provide this if you use a colocated FN + pub checkpoints_path: Option, + pub sui_rpc_url: String, + pub deepbook_package_id: String, + pub deepbook_genesis_checkpoint: u64, + pub concurrency: u64, + pub metric_port: u16, + pub resume_from_checkpoint: Option, +} + +impl sui_config::Config for IndexerConfig {} + +pub fn default_db_url() -> String { + env::var("DB_URL").expect("db_url must be set in config or via the $DB_URL env var") +} diff --git a/crates/sui-deepbook-indexer/src/events.rs b/crates/sui-deepbook-indexer/src/events.rs new file mode 100644 index 0000000000000..026bc27d57ff5 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/events.rs @@ -0,0 +1,131 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use serde::{Deserialize, Serialize}; +use sui_types::base_types::{ObjectID, SuiAddress}; + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveOrderFilledEvent { + pub pool_id: ObjectID, + pub maker_order_id: u128, + pub taker_order_id: u128, + pub maker_client_order_id: u64, + pub taker_client_order_id: u64, + pub price: u64, + pub taker_is_bid: bool, + pub taker_fee: u64, + pub maker_fee: u64, + pub base_quantity: u64, + pub quote_quantity: u64, + pub maker_balance_manager_id: ObjectID, + pub taker_balance_manager_id: ObjectID, + pub timestamp: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveOrderCanceledEvent { + pub balance_manager_id: ObjectID, + pub pool_id: ObjectID, + pub order_id: u128, + pub client_order_id: u64, + pub trader: SuiAddress, + pub price: u64, + pub is_bid: bool, + pub original_quantity: u64, + pub base_asset_quantity_canceled: u64, + pub timestamp: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveOrderModifiedEvent { + pub balance_manager_id: ObjectID, + pub pool_id: ObjectID, + pub order_id: u128, + pub client_order_id: u64, + pub trader: SuiAddress, + pub price: u64, + pub is_bid: bool, + pub new_quantity: u64, + pub timestamp: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveOrderPlacedEvent { + pub balance_manager_id: ObjectID, + pub pool_id: ObjectID, + pub order_id: u128, + pub client_order_id: u64, + pub trader: SuiAddress, + pub price: u64, + pub is_bid: bool, + pub placed_quantity: u64, + pub expire_timestamp: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MovePriceAddedEvent { + pub conversion_rate: u64, + pub timestamp: u64, + pub is_base_conversion: bool, + pub reference_pool: ObjectID, + pub target_pool: ObjectID, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveFlashLoanBorrowedEvent { + pub pool_id: ObjectID, + pub borrow_quantity: u64, + pub type_name: String, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveBalanceEvent { + pub balance_manager_id: ObjectID, + pub asset: String, + pub amount: u64, + pub deposit: bool, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveTradeParamsUpdateEvent { + pub taker_fee: u64, + pub maker_fee: u64, + pub stake_required: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveStakeEvent { + pub pool_id: ObjectID, + pub balance_manager_id: ObjectID, + pub epoch: u64, + pub amount: u64, + pub stake: bool, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveProposalEvent { + pub pool_id: ObjectID, + pub balance_manager_id: ObjectID, + pub epoch: u64, + pub taker_fee: u64, + pub maker_fee: u64, + pub stake_required: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveVoteEvent { + pub pool_id: ObjectID, + pub balance_manager_id: ObjectID, + pub epoch: u64, + pub from_proposal_id: Option, + pub to_proposal_id: ObjectID, + pub stake: u64, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct MoveRebateEvent { + pub pool_id: ObjectID, + pub balance_manager_id: ObjectID, + pub epoch: u64, + pub claim_amount: u64, +} diff --git a/crates/sui-deepbook-indexer/src/lib.rs b/crates/sui-deepbook-indexer/src/lib.rs new file mode 100644 index 0000000000000..3c461811dc991 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/lib.rs @@ -0,0 +1,13 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +pub mod config; +pub mod events; +pub mod metrics; +pub mod models; +pub mod postgres_manager; +pub mod schema; +pub mod types; + +pub mod sui_datasource; +pub mod sui_deepbook_indexer; diff --git a/crates/sui-deepbook-indexer/src/main.rs b/crates/sui-deepbook-indexer/src/main.rs new file mode 100644 index 0000000000000..3ef803c77a5b4 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/main.rs @@ -0,0 +1,101 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use clap::*; +use mysten_metrics::start_prometheus_server; +use std::env; +use std::net::IpAddr; +use std::net::{Ipv4Addr, SocketAddr}; +use std::path::PathBuf; +use std::sync::Arc; +use sui_config::Config; +use sui_data_ingestion_core::DataIngestionMetrics; +use sui_deepbook_indexer::config::IndexerConfig; +use sui_deepbook_indexer::metrics::DeepBookIndexerMetrics; +use sui_deepbook_indexer::postgres_manager::get_connection_pool; +use sui_deepbook_indexer::sui_datasource::SuiCheckpointDatasource; +use sui_deepbook_indexer::sui_deepbook_indexer::PgDeepbookPersistent; +use sui_deepbook_indexer::sui_deepbook_indexer::SuiDeepBookDataMapper; +use sui_indexer_builder::indexer_builder::IndexerBuilder; +use sui_indexer_builder::progress::{OutOfOrderSaveAfterDurationPolicy, ProgressSavingPolicy}; +use sui_sdk::SuiClientBuilder; +use sui_types::base_types::ObjectID; +use tracing::info; + +#[derive(Parser, Clone, Debug)] +struct Args { + /// Path to a yaml config + #[clap(long, short)] + config_path: Option, +} + +#[tokio::main] +async fn main() -> Result<()> { + let _guard = telemetry_subscribers::TelemetryConfig::new() + .with_env() + .init(); + + let args = Args::parse(); + + // load config + let config_path = if let Some(path) = args.config_path { + path + } else { + env::current_dir() + .expect("Couldn't get current directory") + .join("config.yaml") + }; + let config = IndexerConfig::load(&config_path)?; + + // Init metrics server + let metrics_address = + SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), config.metric_port); + let registry_service = start_prometheus_server(metrics_address); + let registry = registry_service.default_registry(); + mysten_metrics::init_metrics(®istry); + info!("Metrics server started at port {}", config.metric_port); + + let indexer_meterics = DeepBookIndexerMetrics::new(®istry); + let ingestion_metrics = DataIngestionMetrics::new(®istry); + + let db_url = config.db_url.clone(); + let datastore = PgDeepbookPersistent::new( + get_connection_pool(db_url.clone()).await, + ProgressSavingPolicy::OutOfOrderSaveAfterDuration(OutOfOrderSaveAfterDurationPolicy::new( + tokio::time::Duration::from_secs(30), + )), + ); + + let sui_client = Arc::new( + SuiClientBuilder::default() + .build(config.sui_rpc_url.clone()) + .await?, + ); + let sui_checkpoint_datasource = SuiCheckpointDatasource::new( + config.remote_store_url, + sui_client, + config.concurrency as usize, + config + .checkpoints_path + .map(|p| p.into()) + .unwrap_or(tempfile::tempdir()?.into_path()), + config.deepbook_genesis_checkpoint, + ingestion_metrics.clone(), + indexer_meterics.clone(), + ); + let indexer = IndexerBuilder::new( + "SuiDeepBookIndexer", + sui_checkpoint_datasource, + SuiDeepBookDataMapper { + metrics: indexer_meterics.clone(), + package_id: ObjectID::from_hex_literal(&config.deepbook_package_id.clone()) + .unwrap_or_else(|err| panic!("Failed to parse deepbook package ID: {}", err)), + }, + datastore, + ) + .build(); + indexer.start().await?; + + Ok(()) +} diff --git a/crates/sui-deepbook-indexer/src/metrics.rs b/crates/sui-deepbook-indexer/src/metrics.rs new file mode 100644 index 0000000000000..4a88807cd7d3f --- /dev/null +++ b/crates/sui-deepbook-indexer/src/metrics.rs @@ -0,0 +1,46 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use prometheus::{ + register_int_counter_vec_with_registry, register_int_counter_with_registry, + register_int_gauge_vec_with_registry, IntCounter, IntCounterVec, IntGaugeVec, Registry, +}; + +#[derive(Clone, Debug)] +pub struct DeepBookIndexerMetrics { + pub(crate) total_deepbook_transactions: IntCounter, + pub(crate) backfill_tasks_remaining_checkpoints: IntGaugeVec, + pub(crate) tasks_processed_checkpoints: IntCounterVec, +} + +impl DeepBookIndexerMetrics { + pub fn new(registry: &Registry) -> Self { + Self { + total_deepbook_transactions: register_int_counter_with_registry!( + "deepbook_indexer_total_deepbook_transactions", + "Total number of deepbook transactions", + registry, + ) + .unwrap(), + backfill_tasks_remaining_checkpoints: register_int_gauge_vec_with_registry!( + "deepbook_indexer_backfill_tasks_remaining_checkpoints", + "The remaining checkpoints for the currently running backfill task", + &["task_name"], + registry, + ) + .unwrap(), + tasks_processed_checkpoints: register_int_counter_vec_with_registry!( + "deepbook_indexer_tasks_processed_checkpoints", + "Total processed checkpoints for each task", + &["task_name", "task_type"], + registry, + ) + .unwrap(), + } + } + + pub fn new_for_testing() -> Self { + let registry = Registry::new(); + Self::new(®istry) + } +} diff --git a/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/down.sql b/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/down.sql new file mode 100644 index 0000000000000..454dcfecb7003 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/down.sql @@ -0,0 +1,7 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS order_updates; +DROP TABLE IF EXISTS order_fills; +DROP TABLE IF EXISTS flashloans; +DROP TABLE IF EXISTS pool_prices; +DROP TABLE IF EXISTS sui_error_transactions; +DROP TABLE IF EXISTS progress_store; diff --git a/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/up.sql b/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/up.sql new file mode 100644 index 0000000000000..cfd3fe851df94 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/migrations/00000000000000_diesel_initial_setup/up.sql @@ -0,0 +1,181 @@ +-- Your SQL goes here + +CREATE TABLE IF NOT EXISTS order_updates +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + status TEXT NOT NULL, + pool_id TEXT NOT NULL, + order_id TEXT NOT NULL, + client_order_id BIGINT NOT NULL, + price BIGINT NOT NULL, + is_bid BOOLEAN NOT NULL, + original_quantity BIGINT NOT NULL, + quantity BIGINT NOT NULL, + onchain_timestamp BIGINT NOT NULL, + balance_manager_id TEXT NOT NULL, + trader TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS order_fills +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + maker_order_id TEXT NOT NULL, + taker_order_id TEXT NOT NULL, + maker_client_order_id BIGINT NOT NULL, + taker_client_order_id BIGINT NOT NULL, + price BIGINT NOT NULL, + taker_fee BIGINT NOT NULL, + maker_fee BIGINT NOT NULL, + taker_is_bid BOOLEAN NOT NULL, + base_quantity BIGINT NOT NULL, + quote_quantity BIGINT NOT NULL, + maker_balance_manager_id TEXT NOT NULL, + taker_balance_manager_id TEXT NOT NULL, + onchain_timestamp BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS flashloans +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + borrow BOOLEAN NOT NULL, + pool_id TEXT NOT NULL, + borrow_quantity BIGINT NOT NULL, + type_name TEXT NOT NULL +); + +CREATE TABLE IF NOT EXISTS pool_prices +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + target_pool TEXT NOT NULL, + reference_pool TEXT NOT NULL, + conversion_rate BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS balances +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + balance_manager_id TEXT NOT NULL, + asset TEXT NOT NULL, + amount BIGINT NOT NULL, + deposit BOOLEAN NOT NULL +); + +CREATE TABLE IF NOT EXISTS trade_params_update +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + taker_fee BIGINT NOT NULL, + maker_fee BIGINT NOT NULL, + stake_required BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS stakes +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + balance_manager_id TEXT NOT NULL, + epoch BIGINT NOT NULL, + amount BIGINT NOT NULL, + stake BOOLEAN NOT NULL +); + +CREATE TABLE IF NOT EXISTS proposals +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + balance_manager_id TEXT NOT NULL, + epoch BIGINT NOT NULL, + taker_fee BIGINT NOT NULL, + maker_fee BIGINT NOT NULL, + stake_required BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS votes +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + balance_manager_id TEXT NOT NULL, + epoch BIGINT NOT NULL, + from_proposal_id TEXT, + to_proposal_id TEXT NOT NULL, + stake BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS rebates +( + id SERIAL PRIMARY KEY, + digest TEXT NOT NULL, + sender TEXT NOT NULL, + checkpoint BIGINT NOT NULL, + timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + package TEXT NOT NULL, + pool_id TEXT NOT NULL, + balance_manager_id TEXT NOT NULL, + epoch BIGINT NOT NULL, + claim_amount BIGINT NOT NULL +); + +CREATE TABLE IF NOT EXISTS progress_store +( + task_name TEXT PRIMARY KEY, + checkpoint BIGINT NOT NULL, + target_checkpoint BIGINT DEFAULT 9223372036854775807 NOT NULL, + timestamp TIMESTAMP DEFAULT now() +); + +CREATE TABLE IF NOT EXISTS sui_error_transactions +( + id SERIAL PRIMARY KEY, + txn_digest TEXT NOT NULL, + sender_address TEXT NOT NULL, + timestamp_ms BIGINT NOT NULL, + failure_status TEXT NOT NULL, + package TEXT NOT NULL, + cmd_idx BIGINT +); \ No newline at end of file diff --git a/crates/sui-deepbook-indexer/src/models.rs b/crates/sui-deepbook-indexer/src/models.rs new file mode 100644 index 0000000000000..f556b31e89a0c --- /dev/null +++ b/crates/sui-deepbook-indexer/src/models.rs @@ -0,0 +1,195 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use diesel::data_types::PgTimestamp; +use diesel::{Identifiable, Insertable, Queryable, Selectable}; + +use sui_indexer_builder::{Task, LIVE_TASK_TARGET_CHECKPOINT}; + +use crate::schema::{ + balances, flashloans, order_fills, order_updates, pool_prices, progress_store, proposals, + rebates, stakes, sui_error_transactions, trade_params_update, votes, +}; + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = order_updates, primary_key(digest))] +pub struct OrderUpdate { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub status: String, + pub pool_id: String, + pub order_id: String, // u128 + pub client_order_id: i64, + pub price: i64, + pub is_bid: bool, + pub original_quantity: i64, + pub quantity: i64, + pub onchain_timestamp: i64, + pub trader: String, + pub balance_manager_id: String, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = order_fills, primary_key(digest))] +pub struct OrderFill { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub maker_order_id: String, // u128 + pub taker_order_id: String, // u128 + pub maker_client_order_id: i64, + pub taker_client_order_id: i64, + pub price: i64, + pub taker_fee: i64, + pub maker_fee: i64, + pub taker_is_bid: bool, + pub base_quantity: i64, + pub quote_quantity: i64, + pub maker_balance_manager_id: String, + pub taker_balance_manager_id: String, + pub onchain_timestamp: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = flashloans, primary_key(digest))] +pub struct Flashloan { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub borrow_quantity: i64, + pub borrow: bool, + pub type_name: String, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = pool_prices, primary_key(digest))] +pub struct PoolPrice { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub target_pool: String, + pub reference_pool: String, + pub conversion_rate: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = balances, primary_key(digest))] +pub struct Balances { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub balance_manager_id: String, + pub asset: String, + pub amount: i64, + pub deposit: bool, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = proposals, primary_key(digest))] +pub struct Proposals { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub balance_manager_id: String, + pub epoch: i64, + pub taker_fee: i64, + pub maker_fee: i64, + pub stake_required: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = rebates, primary_key(digest))] +pub struct Rebates { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub balance_manager_id: String, + pub epoch: i64, + pub claim_amount: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = stakes, primary_key(digest))] +pub struct Stakes { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub balance_manager_id: String, + pub epoch: i64, + pub amount: i64, + pub stake: bool, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = trade_params_update, primary_key(digest))] +pub struct TradeParamsUpdate { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub taker_fee: i64, + pub maker_fee: i64, + pub stake_required: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = votes, primary_key(digest))] +pub struct Votes { + pub digest: String, + pub sender: String, + pub checkpoint: i64, + pub package: String, + pub pool_id: String, + pub balance_manager_id: String, + pub epoch: i64, + pub from_proposal_id: Option, + pub to_proposal_id: String, + pub stake: i64, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = sui_error_transactions, primary_key(txn_digest))] +pub struct SuiErrorTransactions { + pub txn_digest: String, + pub sender_address: String, + pub timestamp_ms: i64, + pub failure_status: String, + pub package: String, + pub cmd_idx: Option, +} + +#[derive(Queryable, Selectable, Insertable, Identifiable, Debug)] +#[diesel(table_name = progress_store, primary_key(task_name))] +pub struct ProgressStore { + pub task_name: String, + pub checkpoint: i64, + pub target_checkpoint: i64, + pub timestamp: Option, +} + +impl From for Task { + fn from(value: ProgressStore) -> Self { + Self { + task_name: value.task_name, + start_checkpoint: value.checkpoint as u64, + target_checkpoint: value.target_checkpoint as u64, + // Ok to unwrap, timestamp is defaulted to now() in database + timestamp: value.timestamp.expect("Timestamp not set").0 as u64, + is_live_task: value.target_checkpoint == LIVE_TASK_TARGET_CHECKPOINT, + } + } +} diff --git a/crates/sui-deepbook-indexer/src/postgres_manager.rs b/crates/sui-deepbook-indexer/src/postgres_manager.rs new file mode 100644 index 0000000000000..ca5fccdd624b1 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/postgres_manager.rs @@ -0,0 +1,191 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use crate::schema::{ + balances, flashloans, order_fills, order_updates, pool_prices, proposals, rebates, stakes, + sui_error_transactions, trade_params_update, votes, +}; +use crate::types::ProcessedTxnData; +// use diesel::{ +// pg::PgConnection, +// r2d2::{ConnectionManager, Pool}, +// RunQueryDsl, +// }; +use diesel_async::pooled_connection::bb8::Pool; +use diesel_async::pooled_connection::AsyncDieselConnectionManager; +use diesel_async::scoped_futures::ScopedFutureExt; +use diesel_async::AsyncConnection; +use diesel_async::AsyncPgConnection; +use diesel_async::RunQueryDsl; + +pub(crate) type PgPool = + diesel_async::pooled_connection::bb8::Pool; + +pub async fn get_connection_pool(database_url: String) -> PgPool { + let manager = AsyncDieselConnectionManager::::new(database_url); + Pool::builder() + .test_on_check_out(true) + .build(manager) + .await + .expect("Could not build Postgres DB connection pool") +} + +// TODO: add retry logic +pub async fn write(pool: &PgPool, txns: Vec) -> Result<(), anyhow::Error> { + if txns.is_empty() { + return Ok(()); + } + let ( + order_updates, + order_fills, + pool_prices, + flahloans, + balances, + proposals, + rebates, + stakes, + trade_params_update, + votes, + errors, + ) = txns.iter().fold( + ( + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + vec![], + ), + |( + mut order_updates, + mut order_fills, + mut pool_prices, + mut flashloans, + mut balances, + mut proposals, + mut rebates, + mut stakes, + mut trade_params_update, + mut votes, + mut errors, + ), + d| { + match d { + ProcessedTxnData::OrderUpdate(t) => { + order_updates.push(t.to_db()); + } + ProcessedTxnData::OrderFill(t) => { + order_fills.push(t.to_db()); + } + ProcessedTxnData::PoolPrice(t) => { + pool_prices.push(t.to_db()); + } + ProcessedTxnData::Flashloan(t) => { + flashloans.push(t.to_db()); + } + ProcessedTxnData::Balances(t) => { + balances.push(t.to_db()); + } + ProcessedTxnData::Proposals(t) => { + proposals.push(t.to_db()); + } + ProcessedTxnData::Rebates(t) => { + rebates.push(t.to_db()); + } + ProcessedTxnData::Stakes(t) => { + stakes.push(t.to_db()); + } + ProcessedTxnData::TradeParamsUpdate(t) => { + trade_params_update.push(t.to_db()); + } + ProcessedTxnData::Votes(t) => { + votes.push(t.to_db()); + } + ProcessedTxnData::Error(e) => errors.push(e.to_db()), + } + ( + order_updates, + order_fills, + pool_prices, + flashloans, + balances, + proposals, + rebates, + stakes, + trade_params_update, + votes, + errors, + ) + }, + ); + + let connection = &mut pool.get().await?; + connection + .transaction(|conn| { + async move { + diesel::insert_into(order_updates::table) + .values(&order_updates) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(order_fills::table) + .values(&order_fills) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(flashloans::table) + .values(&flahloans) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(pool_prices::table) + .values(&pool_prices) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(balances::table) + .values(&balances) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(proposals::table) + .values(&proposals) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(rebates::table) + .values(&rebates) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(stakes::table) + .values(&stakes) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(trade_params_update::table) + .values(&trade_params_update) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(votes::table) + .values(&votes) + .on_conflict_do_nothing() + .execute(conn) + .await?; + diesel::insert_into(sui_error_transactions::table) + .values(&errors) + .on_conflict_do_nothing() + .execute(conn) + .await + } + .scope_boxed() + }) + .await?; + Ok(()) +} diff --git a/crates/sui-deepbook-indexer/src/schema.rs b/crates/sui-deepbook-indexer/src/schema.rs new file mode 100644 index 0000000000000..ff2eeef876445 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/schema.rs @@ -0,0 +1,210 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 +// @generated automatically by Diesel CLI. + +diesel::table! { + balances (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + balance_manager_id -> Text, + asset -> Text, + amount -> Int8, + deposit -> Bool, + } +} + +diesel::table! { + flashloans (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + borrow -> Bool, + pool_id -> Text, + borrow_quantity -> Int8, + type_name -> Text, + } +} + +diesel::table! { + order_fills (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + maker_order_id -> Text, + taker_order_id -> Text, + maker_client_order_id -> Int8, + taker_client_order_id -> Int8, + price -> Int8, + taker_fee -> Int8, + maker_fee -> Int8, + taker_is_bid -> Bool, + base_quantity -> Int8, + quote_quantity -> Int8, + maker_balance_manager_id -> Text, + taker_balance_manager_id -> Text, + onchain_timestamp -> Int8, + } +} + +diesel::table! { + order_updates (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + status -> Text, + pool_id -> Text, + order_id -> Text, + client_order_id -> Int8, + price -> Int8, + is_bid -> Bool, + original_quantity -> Int8, + quantity -> Int8, + onchain_timestamp -> Int8, + balance_manager_id -> Text, + trader -> Text, + } +} + +diesel::table! { + pool_prices (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + target_pool -> Text, + reference_pool -> Text, + conversion_rate -> Int8, + } +} + +diesel::table! { + progress_store (task_name) { + task_name -> Text, + checkpoint -> Int8, + target_checkpoint -> Int8, + timestamp -> Nullable, + } +} + +diesel::table! { + proposals (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + balance_manager_id -> Text, + epoch -> Int8, + taker_fee -> Int8, + maker_fee -> Int8, + stake_required -> Int8, + } +} + +diesel::table! { + rebates (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + balance_manager_id -> Text, + epoch -> Int8, + claim_amount -> Int8, + } +} + +diesel::table! { + stakes (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + balance_manager_id -> Text, + epoch -> Int8, + amount -> Int8, + stake -> Bool, + } +} + +diesel::table! { + sui_error_transactions (id) { + id -> Int4, + txn_digest -> Text, + sender_address -> Text, + timestamp_ms -> Int8, + failure_status -> Text, + package -> Text, + cmd_idx -> Nullable, + } +} + +diesel::table! { + trade_params_update (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + taker_fee -> Int8, + maker_fee -> Int8, + stake_required -> Int8, + } +} + +diesel::table! { + votes (id) { + id -> Int4, + digest -> Text, + sender -> Text, + checkpoint -> Int8, + timestamp -> Timestamp, + package -> Text, + pool_id -> Text, + balance_manager_id -> Text, + epoch -> Int8, + from_proposal_id -> Nullable, + to_proposal_id -> Text, + stake -> Int8, + } +} + +diesel::allow_tables_to_appear_in_same_query!( + balances, + flashloans, + order_fills, + order_updates, + pool_prices, + progress_store, + proposals, + rebates, + stakes, + sui_error_transactions, + trade_params_update, + votes, +); diff --git a/crates/sui-deepbook-indexer/src/sui_datasource.rs b/crates/sui-deepbook-indexer/src/sui_datasource.rs new file mode 100644 index 0000000000000..f42eeefa08e7f --- /dev/null +++ b/crates/sui-deepbook-indexer/src/sui_datasource.rs @@ -0,0 +1,180 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Error; +use async_trait::async_trait; +use mysten_metrics::{metered_channel, spawn_monitored_task}; +use prometheus::IntCounterVec; +use prometheus::IntGaugeVec; +use std::path::PathBuf; +use std::sync::Arc; +use sui_data_ingestion_core::{ + DataIngestionMetrics, IndexerExecutor, ProgressStore, ReaderOptions, Worker, WorkerPool, +}; +use sui_indexer_builder::indexer_builder::{DataSender, Datasource}; +use sui_indexer_builder::Task; +use sui_sdk::SuiClient; +use sui_types::base_types::TransactionDigest; +use sui_types::full_checkpoint_content::CheckpointData as SuiCheckpointData; +use sui_types::full_checkpoint_content::CheckpointTransaction; +use sui_types::messages_checkpoint::CheckpointSequenceNumber; +use tokio::sync::oneshot; +use tokio::sync::oneshot::Sender; +use tokio::task::JoinHandle; + +use crate::metrics::DeepBookIndexerMetrics; + +pub struct SuiCheckpointDatasource { + remote_store_url: String, + sui_client: Arc, + concurrency: usize, + checkpoint_path: PathBuf, + genesis_checkpoint: u64, + metrics: DataIngestionMetrics, + indexer_metrics: DeepBookIndexerMetrics, +} +impl SuiCheckpointDatasource { + pub fn new( + remote_store_url: String, + sui_client: Arc, + concurrency: usize, + checkpoint_path: PathBuf, + genesis_checkpoint: u64, + metrics: DataIngestionMetrics, + indexer_metrics: DeepBookIndexerMetrics, + ) -> Self { + SuiCheckpointDatasource { + remote_store_url, + sui_client, + concurrency, + checkpoint_path, + metrics, + indexer_metrics, + genesis_checkpoint, + } + } +} + +#[async_trait] +impl Datasource for SuiCheckpointDatasource { + async fn start_data_retrieval( + &self, + task: Task, + data_sender: DataSender, + ) -> Result>, Error> { + let (exit_sender, exit_receiver) = oneshot::channel(); + let progress_store = PerTaskInMemProgressStore { + current_checkpoint: task.start_checkpoint, + exit_checkpoint: task.target_checkpoint, + exit_sender: Some(exit_sender), + }; + let mut executor = IndexerExecutor::new(progress_store, 1, self.metrics.clone()); + let worker = IndexerWorker::new(data_sender); + let worker_pool = WorkerPool::new( + worker, + TransactionDigest::random().to_string(), + self.concurrency, + ); + executor.register(worker_pool).await?; + let checkpoint_path = self.checkpoint_path.clone(); + let remote_store_url = self.remote_store_url.clone(); + Ok(spawn_monitored_task!(async { + executor + .run( + checkpoint_path, + Some(remote_store_url), + vec![], // optional remote store access options + ReaderOptions::default(), + exit_receiver, + ) + .await?; + Ok(()) + })) + } + + async fn get_live_task_starting_checkpoint(&self) -> Result { + self.sui_client + .read_api() + .get_latest_checkpoint_sequence_number() + .await + .map_err(|e| anyhow::anyhow!("Failed to get last finalized block id: {:?}", e)) + } + + fn get_genesis_height(&self) -> u64 { + self.genesis_checkpoint + } + + fn get_tasks_remaining_checkpoints_metric(&self) -> &IntGaugeVec { + &self.indexer_metrics.backfill_tasks_remaining_checkpoints + } + + fn get_tasks_processed_checkpoints_metric(&self) -> &IntCounterVec { + &self.indexer_metrics.tasks_processed_checkpoints + } +} + +struct PerTaskInMemProgressStore { + pub current_checkpoint: u64, + pub exit_checkpoint: u64, + pub exit_sender: Option>, +} + +#[async_trait] +impl ProgressStore for PerTaskInMemProgressStore { + async fn load( + &mut self, + _task_name: String, + ) -> Result { + Ok(self.current_checkpoint) + } + + async fn save( + &mut self, + _task_name: String, + checkpoint_number: CheckpointSequenceNumber, + ) -> anyhow::Result<()> { + if checkpoint_number >= self.exit_checkpoint { + if let Some(sender) = self.exit_sender.take() { + let _ = sender.send(()); + } + } + self.current_checkpoint = checkpoint_number; + Ok(()) + } +} + +pub struct IndexerWorker { + data_sender: metered_channel::Sender<(u64, Vec)>, +} + +impl IndexerWorker { + pub fn new(data_sender: metered_channel::Sender<(u64, Vec)>) -> Self { + Self { data_sender } + } +} + +pub type CheckpointTxnData = (CheckpointTransaction, u64, u64); + +#[async_trait] +impl Worker for IndexerWorker { + async fn process_checkpoint(&self, checkpoint: SuiCheckpointData) -> anyhow::Result<()> { + tracing::trace!( + "Received checkpoint [{}] {}: {}", + checkpoint.checkpoint_summary.epoch, + checkpoint.checkpoint_summary.sequence_number, + checkpoint.transactions.len(), + ); + let checkpoint_num = checkpoint.checkpoint_summary.sequence_number; + let timestamp_ms = checkpoint.checkpoint_summary.timestamp_ms; + + let transactions = checkpoint + .transactions + .into_iter() + .map(|tx| (tx, checkpoint_num, timestamp_ms)) + .collect(); + Ok(self + .data_sender + .send((checkpoint_num, transactions)) + .await?) + } +} diff --git a/crates/sui-deepbook-indexer/src/sui_deepbook_indexer.rs b/crates/sui-deepbook-indexer/src/sui_deepbook_indexer.rs new file mode 100644 index 0000000000000..f06bc039e18f4 --- /dev/null +++ b/crates/sui-deepbook-indexer/src/sui_deepbook_indexer.rs @@ -0,0 +1,695 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::{anyhow, Error}; +use async_trait::async_trait; +use diesel::dsl::now; +use diesel::{ExpressionMethods, TextExpressionMethods}; +use diesel::{OptionalExtension, QueryDsl, SelectableHelper}; +use diesel_async::scoped_futures::ScopedFutureExt; +use diesel_async::AsyncConnection; +use diesel_async::RunQueryDsl; +use sui_indexer_builder::progress::ProgressSavingPolicy; +use sui_types::base_types::ObjectID; +use sui_types::transaction::{Command, TransactionDataAPI}; +use tracing::info; + +use sui_indexer_builder::indexer_builder::{DataMapper, IndexerProgressStore, Persistent}; +use sui_indexer_builder::{Task, Tasks, LIVE_TASK_TARGET_CHECKPOINT}; +use sui_types::effects::TransactionEffectsAPI; +use sui_types::event::Event; +use sui_types::execution_status::ExecutionStatus; +use sui_types::full_checkpoint_content::CheckpointTransaction; + +use crate::events::{ + MoveBalanceEvent, MoveFlashLoanBorrowedEvent, MoveOrderCanceledEvent, MoveOrderFilledEvent, + MoveOrderModifiedEvent, MoveOrderPlacedEvent, MovePriceAddedEvent, MoveProposalEvent, + MoveRebateEvent, MoveStakeEvent, MoveTradeParamsUpdateEvent, MoveVoteEvent, +}; +use crate::metrics::DeepBookIndexerMetrics; +use crate::postgres_manager::PgPool; +use crate::schema::progress_store::{columns, dsl}; +use crate::schema::{ + balances, flashloans, order_fills, order_updates, pool_prices, proposals, rebates, stakes, + sui_error_transactions, trade_params_update, votes, +}; +use crate::sui_datasource::CheckpointTxnData; +use crate::types::{ + Balances, Flashloan, OrderFill, OrderUpdate, OrderUpdateStatus, PoolPrice, ProcessedTxnData, + Proposals, Rebates, Stakes, SuiTxnError, TradeParamsUpdate, Votes, +}; +use crate::{models, schema}; + +/// Persistent layer impl +#[derive(Clone)] +pub struct PgDeepbookPersistent { + pool: PgPool, + save_progress_policy: ProgressSavingPolicy, +} + +impl PgDeepbookPersistent { + pub fn new(pool: PgPool, save_progress_policy: ProgressSavingPolicy) -> Self { + Self { + pool, + save_progress_policy, + } + } +} + +#[async_trait] +impl Persistent for PgDeepbookPersistent { + async fn write(&self, data: Vec) -> Result<(), Error> { + if data.is_empty() { + return Ok(()); + } + let connection = &mut self.pool.get().await?; + connection + .transaction(|conn| { + async move { + for d in data { + match d { + ProcessedTxnData::OrderUpdate(t) => { + diesel::insert_into(order_updates::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::OrderFill(t) => { + diesel::insert_into(order_fills::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Flashloan(t) => { + diesel::insert_into(flashloans::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::PoolPrice(t) => { + diesel::insert_into(pool_prices::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Balances(t) => { + diesel::insert_into(balances::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Proposals(t) => { + diesel::insert_into(proposals::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Rebates(t) => { + diesel::insert_into(rebates::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Stakes(t) => { + diesel::insert_into(stakes::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::TradeParamsUpdate(t) => { + diesel::insert_into(trade_params_update::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Votes(t) => { + diesel::insert_into(votes::table) + .values(&t.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + ProcessedTxnData::Error(e) => { + diesel::insert_into(sui_error_transactions::table) + .values(&e.to_db()) + .on_conflict_do_nothing() + .execute(conn) + .await?; + } + } + } + Ok(()) + } + .scope_boxed() + }) + .await + } +} + +#[async_trait] +impl IndexerProgressStore for PgDeepbookPersistent { + async fn load_progress(&self, task_name: String) -> anyhow::Result { + let mut conn = self.pool.get().await?; + let cp: Option = dsl::progress_store + .find(&task_name) + .select(models::ProgressStore::as_select()) + .first(&mut conn) + .await + .optional()?; + Ok(cp + .ok_or(anyhow!("Cannot found progress for task {task_name}"))? + .checkpoint as u64) + } + + async fn save_progress( + &mut self, + task: &Task, + checkpoint_numbers: &[u64], + ) -> anyhow::Result> { + if checkpoint_numbers.is_empty() { + return Ok(None); + } + let task_name = task.task_name.clone(); + if let Some(checkpoint_to_save) = self + .save_progress_policy + .cache_progress(task, checkpoint_numbers) + { + let mut conn = self.pool.get().await?; + diesel::insert_into(schema::progress_store::table) + .values(&models::ProgressStore { + task_name, + checkpoint: checkpoint_to_save as i64, + // Target checkpoint and timestamp will only be written for new entries + target_checkpoint: i64::MAX, + // Timestamp is defaulted to current time in DB if None + timestamp: None, + }) + .on_conflict(dsl::task_name) + .do_update() + .set(( + columns::checkpoint.eq(checkpoint_to_save as i64), + columns::timestamp.eq(now), + )) + .execute(&mut conn) + .await?; + // TODO: add metrics here + return Ok(Some(checkpoint_to_save)); + } + Ok(None) + } + + async fn get_ongoing_tasks(&self, prefix: &str) -> Result { + let mut conn = self.pool.get().await?; + // get all unfinished tasks + let cp: Vec = dsl::progress_store + // TODO: using like could be error prone, change the progress store schema to stare the task name properly. + .filter(columns::task_name.like(format!("{prefix} - %"))) + .filter(columns::checkpoint.lt(columns::target_checkpoint)) + .order_by(columns::target_checkpoint.desc()) + .load(&mut conn) + .await?; + let tasks = cp.into_iter().map(|d| d.into()).collect(); + Ok(Tasks::new(tasks)?) + } + + async fn get_largest_backfill_task_target_checkpoint( + &self, + prefix: &str, + ) -> Result, Error> { + let mut conn = self.pool.get().await?; + let cp: Option = dsl::progress_store + .select(columns::target_checkpoint) + // TODO: using like could be error prone, change the progress store schema to stare the task name properly. + .filter(columns::task_name.like(format!("{prefix} - %"))) + .filter(columns::target_checkpoint.ne(i64::MAX)) + .order_by(columns::target_checkpoint.desc()) + .first::(&mut conn) + .await + .optional()?; + Ok(cp.map(|c| c as u64)) + } + + async fn register_task( + &mut self, + task_name: String, + checkpoint: u64, + target_checkpoint: u64, + ) -> Result<(), anyhow::Error> { + let mut conn = self.pool.get().await?; + diesel::insert_into(schema::progress_store::table) + .values(models::ProgressStore { + task_name, + checkpoint: checkpoint as i64, + target_checkpoint: target_checkpoint as i64, + // Timestamp is defaulted to current time in DB if None + timestamp: None, + }) + .execute(&mut conn) + .await?; + Ok(()) + } + + /// Register a live task to progress store with a start checkpoint. + async fn register_live_task( + &mut self, + task_name: String, + start_checkpoint: u64, + ) -> Result<(), anyhow::Error> { + let mut conn = self.pool.get().await?; + diesel::insert_into(schema::progress_store::table) + .values(models::ProgressStore { + task_name, + checkpoint: start_checkpoint as i64, + target_checkpoint: LIVE_TASK_TARGET_CHECKPOINT, + // Timestamp is defaulted to current time in DB if None + timestamp: None, + }) + .execute(&mut conn) + .await?; + Ok(()) + } + + async fn update_task(&mut self, task: Task) -> Result<(), anyhow::Error> { + let mut conn = self.pool.get().await?; + diesel::update(dsl::progress_store.filter(columns::task_name.eq(task.task_name))) + .set(( + columns::checkpoint.eq(task.start_checkpoint as i64), + columns::target_checkpoint.eq(task.target_checkpoint as i64), + columns::timestamp.eq(now), + )) + .execute(&mut conn) + .await?; + Ok(()) + } +} + +/// Data mapper impl +#[derive(Clone)] +pub struct SuiDeepBookDataMapper { + pub metrics: DeepBookIndexerMetrics, + pub package_id: ObjectID, +} + +impl DataMapper for SuiDeepBookDataMapper { + fn map( + &self, + (data, checkpoint_num, timestamp_ms): CheckpointTxnData, + ) -> Result, Error> { + if !data.input_objects.iter().any(|obj| { + obj.data + .type_() + .map(|t| t.address() == self.package_id.into()) + .unwrap_or_default() + }) { + return Ok(vec![]); + } + + self.metrics.total_deepbook_transactions.inc(); + + match &data.events { + Some(events) => { + let processed_sui_events = + events.data.iter().try_fold(vec![], |mut result, ev| { + if let Some(data) = process_sui_event( + ev, + &data, + checkpoint_num, + // timestamp_ms, + self.package_id, + )? { + result.push(data); + } + Ok::<_, anyhow::Error>(result) + })?; + + if !processed_sui_events.is_empty() { + info!( + "SUI: Extracted {} deepbook data entries for tx {}.", + processed_sui_events.len(), + data.transaction.digest() + ); + } + Ok(processed_sui_events) + } + None => { + if let ExecutionStatus::Failure { error, command } = data.effects.status() { + let txn_kind = data.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Ok(vec![ProcessedTxnData::Error(SuiTxnError { + tx_digest: *data.transaction.digest(), + sender: data.transaction.sender_address(), + timestamp_ms, + failure_status: error.to_string(), + package, + cmd_idx: command.map(|idx| idx as u64), + })]) + } else { + Ok(vec![]) + } + } + } + } +} + +fn process_sui_event( + ev: &Event, + tx: &CheckpointTransaction, + checkpoint: u64, + // timestamp_ms: u64, + package_id: ObjectID, +) -> Result, anyhow::Error> { + Ok(if ev.type_.address == *package_id { + match ev.type_.name.as_str() { + "OrderPlaced" => { + info!("Observed Deepbook Order Placed {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveOrderPlacedEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::OrderUpdate(OrderUpdate { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + status: OrderUpdateStatus::Placed, + pool_id: move_event.pool_id.to_string(), + order_id: move_event.order_id, + client_order_id: move_event.client_order_id, + price: move_event.price, + is_bid: move_event.is_bid, + onchain_timestamp: move_event.expire_timestamp, + original_quantity: move_event.placed_quantity, + quantity: move_event.placed_quantity, + trader: move_event.trader.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + })) + } + "OrderModified" => { + info!("Observed Deepbook Order Modified {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveOrderModifiedEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::OrderUpdate(OrderUpdate { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + status: OrderUpdateStatus::Modified, + pool_id: move_event.pool_id.to_string(), + order_id: move_event.order_id, + client_order_id: move_event.client_order_id, + price: move_event.price, + is_bid: move_event.is_bid, + onchain_timestamp: move_event.timestamp, + original_quantity: 0, + quantity: move_event.new_quantity, + trader: move_event.trader.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + })) + } + "OrderCanceled" => { + info!("Observed Deepbook Order Canceled {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveOrderCanceledEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::OrderUpdate(OrderUpdate { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + status: OrderUpdateStatus::Canceled, + pool_id: move_event.pool_id.to_string(), + order_id: move_event.order_id, + client_order_id: move_event.client_order_id, + price: move_event.price, + is_bid: move_event.is_bid, + onchain_timestamp: move_event.timestamp, + original_quantity: move_event.original_quantity, + quantity: move_event.base_asset_quantity_canceled, + trader: move_event.trader.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + })) + } + "OrderFilled" => { + info!("Observed Deepbook Order Filled {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveOrderFilledEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::OrderFill(OrderFill { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id: move_event.pool_id.to_string(), + maker_order_id: move_event.maker_order_id, + taker_order_id: move_event.taker_order_id, + maker_client_order_id: move_event.maker_client_order_id, + taker_client_order_id: move_event.taker_client_order_id, + price: move_event.price, + taker_is_bid: move_event.taker_is_bid, + taker_fee: move_event.taker_fee, + maker_fee: move_event.maker_fee, + base_quantity: move_event.base_quantity, + quote_quantity: move_event.quote_quantity, + maker_balance_manager_id: move_event.maker_balance_manager_id.to_string(), + taker_balance_manager_id: move_event.taker_balance_manager_id.to_string(), + onchain_timestamp: move_event.timestamp, + })) + } + "FlashLoanBorrowed" => { + info!("Observed Deepbook Flash Loan Borrowed {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveFlashLoanBorrowedEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Flashloan(Flashloan { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id: move_event.pool_id.to_string(), + borrow_quantity: move_event.borrow_quantity, + borrow: true, + type_name: move_event.type_name.to_string(), + })) + } + "PriceAdded" => { + info!("Observed Deepbook Price Addition {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MovePriceAddedEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::PoolPrice(PoolPrice { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + target_pool: move_event.target_pool.to_string(), + conversion_rate: move_event.conversion_rate, + reference_pool: move_event.reference_pool.to_string(), + })) + } + "BalanceEvent" => { + info!("Observed Deepbook Balance Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveBalanceEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Balances(Balances { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + balance_manager_id: move_event.balance_manager_id.to_string(), + asset: move_event.asset.to_string(), + amount: move_event.amount, + deposit: move_event.deposit, + })) + } + "ProposalEvent" => { + info!("Observed Deepbook Proposal Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveProposalEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Proposals(Proposals { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + balance_manager_id: move_event.balance_manager_id.to_string(), + epoch: move_event.epoch, + taker_fee: move_event.taker_fee, + maker_fee: move_event.maker_fee, + stake_required: move_event.stake_required, + })) + } + "RebateEvent" => { + info!("Observed Deepbook Rebate Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveRebateEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Rebates(Rebates { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id: move_event.pool_id.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + epoch: move_event.epoch, + claim_amount: move_event.claim_amount, + })) + } + "StakeEvent" => { + info!("Observed Deepbook Stake Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveStakeEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Stakes(Stakes { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id: move_event.pool_id.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + epoch: move_event.epoch, + amount: move_event.amount, + stake: move_event.stake, + })) + } + "TradeParamsUpdateEvent" => { + info!("Observed Deepbook Trade Params Update Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveTradeParamsUpdateEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + let shared_objects = &tx.input_objects; + let mut pool_id = "0x0".to_string(); + for obj in shared_objects.iter() { + if let Some(obj_type) = obj.data.type_() { + if obj_type.module().to_string().eq("pool") + && obj_type.address() == *package_id + { + pool_id = obj_type.address().to_string(); + break; + } + } + } + Some(ProcessedTxnData::TradeParamsUpdate(TradeParamsUpdate { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id, + taker_fee: move_event.taker_fee, + maker_fee: move_event.maker_fee, + stake_required: move_event.stake_required, + })) + } + "VoteEvent" => { + info!("Observed Deepbook Vote Event {:?}", ev); + // metrics.total_sui_token_deposited.inc(); + let move_event: MoveVoteEvent = bcs::from_bytes(&ev.contents)?; + let txn_kind = tx.transaction.transaction_data().clone().into_kind(); + let first_command = txn_kind.iter_commands().next(); + let package = if let Some(Command::MoveCall(move_call)) = first_command { + move_call.package.to_string() + } else { + "".to_string() + }; + Some(ProcessedTxnData::Votes(Votes { + digest: tx.transaction.digest().to_string(), + sender: tx.transaction.sender_address().to_string(), + checkpoint, + package, + pool_id: move_event.pool_id.to_string(), + balance_manager_id: move_event.balance_manager_id.to_string(), + epoch: move_event.epoch, + from_proposal_id: move_event.from_proposal_id.map(|id| id.to_string()), + to_proposal_id: move_event.to_proposal_id.to_string(), + stake: move_event.stake, + })) + } + _ => { + // todo: metrics.total_sui_bridge_txn_other.inc(); + None + } + } + } else { + None + }) +} diff --git a/crates/sui-deepbook-indexer/src/types.rs b/crates/sui-deepbook-indexer/src/types.rs new file mode 100644 index 0000000000000..e04fec322866e --- /dev/null +++ b/crates/sui-deepbook-indexer/src/types.rs @@ -0,0 +1,385 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use bigdecimal::BigDecimal; +use sui_types::base_types::{SuiAddress, TransactionDigest}; + +use std::fmt::{Display, Formatter}; + +use crate::models::Balances as DBBalances; +use crate::models::Flashloan as DBFlashloan; +use crate::models::OrderFill as DBOrderFill; +use crate::models::OrderUpdate as DBOrderUpdate; +use crate::models::PoolPrice as DBPoolPrice; +use crate::models::Proposals as DBProposals; +use crate::models::Rebates as DBRebates; +use crate::models::Stakes as DBStakes; +use crate::models::SuiErrorTransactions; +use crate::models::TradeParamsUpdate as DBTradeParamsUpdate; +use crate::models::Votes as DBVotes; + +#[derive(Clone)] +pub enum ProcessedTxnData { + Flashloan(Flashloan), + OrderUpdate(OrderUpdate), + OrderFill(OrderFill), + PoolPrice(PoolPrice), + Balances(Balances), + Proposals(Proposals), + Rebates(Rebates), + Stakes(Stakes), + TradeParamsUpdate(TradeParamsUpdate), + Votes(Votes), + Error(SuiTxnError), +} + +#[derive(Clone)] +pub(crate) enum OrderUpdateStatus { + Placed, + Modified, + Canceled, +} + +impl Display for OrderUpdateStatus { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let str = match self { + OrderUpdateStatus::Placed => "Placed", + OrderUpdateStatus::Modified => "Modified", + OrderUpdateStatus::Canceled => "Canceled", + }; + write!(f, "{str}") + } +} + +#[derive(Clone)] +pub struct OrderUpdate { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) status: OrderUpdateStatus, + pub(crate) pool_id: String, + pub(crate) order_id: u128, + pub(crate) client_order_id: u64, + pub(crate) price: u64, + pub(crate) is_bid: bool, + pub(crate) original_quantity: u64, + pub(crate) quantity: u64, + pub(crate) onchain_timestamp: u64, + pub(crate) trader: String, + pub(crate) balance_manager_id: String, +} + +impl OrderUpdate { + pub(crate) fn to_db(&self) -> DBOrderUpdate { + DBOrderUpdate { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + status: self.status.clone().to_string(), + pool_id: self.pool_id.clone(), + order_id: BigDecimal::from(self.order_id).to_string(), + client_order_id: self.client_order_id as i64, + trader: self.trader.clone(), + price: self.price as i64, + is_bid: self.is_bid, + original_quantity: self.original_quantity as i64, + quantity: self.quantity as i64, + onchain_timestamp: self.onchain_timestamp as i64, + balance_manager_id: self.balance_manager_id.clone(), + } + } +} + +#[derive(Clone)] +pub struct OrderFill { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) pool_id: String, + pub(crate) maker_order_id: u128, + pub(crate) taker_order_id: u128, + pub(crate) maker_client_order_id: u64, + pub(crate) taker_client_order_id: u64, + pub(crate) price: u64, + pub(crate) taker_is_bid: bool, + pub(crate) taker_fee: u64, + pub(crate) maker_fee: u64, + pub(crate) base_quantity: u64, + pub(crate) quote_quantity: u64, + pub(crate) maker_balance_manager_id: String, + pub(crate) taker_balance_manager_id: String, + pub(crate) onchain_timestamp: u64, +} + +impl OrderFill { + pub(crate) fn to_db(&self) -> DBOrderFill { + DBOrderFill { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + pool_id: self.pool_id.clone(), + maker_order_id: BigDecimal::from(self.maker_order_id).to_string(), + taker_order_id: BigDecimal::from(self.taker_order_id).to_string(), + maker_client_order_id: self.maker_client_order_id as i64, + taker_client_order_id: self.taker_client_order_id as i64, + price: self.price as i64, + taker_fee: self.taker_fee as i64, + maker_fee: self.maker_fee as i64, + taker_is_bid: self.taker_is_bid, + base_quantity: self.base_quantity as i64, + quote_quantity: self.quote_quantity as i64, + maker_balance_manager_id: self.maker_balance_manager_id.clone(), + taker_balance_manager_id: self.taker_balance_manager_id.clone(), + onchain_timestamp: self.onchain_timestamp as i64, + } + } +} + +#[derive(Clone)] +pub struct Flashloan { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) borrow: bool, + pub(crate) pool_id: String, + pub(crate) borrow_quantity: u64, + pub(crate) type_name: String, +} + +impl Flashloan { + pub(crate) fn to_db(&self) -> DBFlashloan { + DBFlashloan { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + borrow: self.borrow, + pool_id: self.pool_id.clone(), + borrow_quantity: self.borrow_quantity as i64, + type_name: self.type_name.clone(), + } + } +} + +#[derive(Clone)] +pub struct PoolPrice { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) target_pool: String, + pub(crate) reference_pool: String, + pub(crate) conversion_rate: u64, +} + +impl PoolPrice { + pub(crate) fn to_db(&self) -> DBPoolPrice { + DBPoolPrice { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + target_pool: self.target_pool.clone(), + reference_pool: self.reference_pool.clone(), + conversion_rate: self.conversion_rate as i64, + } + } +} + +#[derive(Clone)] +pub struct Balances { + pub digest: String, + pub sender: String, + pub checkpoint: u64, + pub package: String, + pub balance_manager_id: String, + pub asset: String, + pub amount: u64, + pub deposit: bool, +} + +impl Balances { + pub(crate) fn to_db(&self) -> DBBalances { + DBBalances { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + balance_manager_id: self.balance_manager_id.clone(), + asset: self.asset.clone(), + amount: self.amount as i64, + deposit: self.deposit, + } + } +} + +#[derive(Clone)] +pub struct Proposals { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) balance_manager_id: String, + pub(crate) epoch: u64, + pub(crate) taker_fee: u64, + pub(crate) maker_fee: u64, + pub(crate) stake_required: u64, +} + +impl Proposals { + pub(crate) fn to_db(&self) -> DBProposals { + DBProposals { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + balance_manager_id: self.balance_manager_id.clone(), + epoch: self.epoch as i64, + taker_fee: self.taker_fee as i64, + maker_fee: self.maker_fee as i64, + stake_required: self.stake_required as i64, + } + } +} + +#[derive(Clone)] +pub struct Rebates { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) pool_id: String, + pub(crate) balance_manager_id: String, + pub(crate) epoch: u64, + pub(crate) claim_amount: u64, +} + +impl Rebates { + pub(crate) fn to_db(&self) -> DBRebates { + DBRebates { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + pool_id: self.pool_id.clone(), + balance_manager_id: self.balance_manager_id.clone(), + epoch: self.epoch as i64, + claim_amount: self.claim_amount as i64, + } + } +} + +#[derive(Clone)] +pub struct Stakes { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) pool_id: String, + pub(crate) balance_manager_id: String, + pub(crate) epoch: u64, + pub(crate) amount: u64, + pub(crate) stake: bool, +} + +impl Stakes { + pub(crate) fn to_db(&self) -> DBStakes { + DBStakes { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + pool_id: self.pool_id.clone(), + balance_manager_id: self.balance_manager_id.clone(), + epoch: self.epoch as i64, + amount: self.amount as i64, + stake: self.stake, + } + } +} + +#[derive(Clone)] +pub struct TradeParamsUpdate { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) pool_id: String, + pub(crate) taker_fee: u64, + pub(crate) maker_fee: u64, + pub(crate) stake_required: u64, +} + +impl TradeParamsUpdate { + pub(crate) fn to_db(&self) -> DBTradeParamsUpdate { + DBTradeParamsUpdate { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + pool_id: self.pool_id.clone(), + taker_fee: self.taker_fee as i64, + maker_fee: self.maker_fee as i64, + stake_required: self.stake_required as i64, + } + } +} + +#[derive(Clone)] +pub struct Votes { + pub(crate) digest: String, + pub(crate) sender: String, + pub(crate) checkpoint: u64, + pub(crate) package: String, + pub(crate) pool_id: String, + pub(crate) balance_manager_id: String, + pub(crate) epoch: u64, + pub(crate) from_proposal_id: Option, + pub(crate) to_proposal_id: String, + pub(crate) stake: u64, +} + +impl Votes { + pub(crate) fn to_db(&self) -> DBVotes { + DBVotes { + digest: self.digest.clone(), + sender: self.sender.clone(), + checkpoint: self.checkpoint as i64, + package: self.package.clone(), + pool_id: self.pool_id.clone(), + balance_manager_id: self.balance_manager_id.clone(), + epoch: self.epoch as i64, + from_proposal_id: self.from_proposal_id.clone(), + to_proposal_id: self.to_proposal_id.clone(), + stake: self.stake as i64, + } + } +} + +#[derive(Clone)] +pub struct SuiTxnError { + pub(crate) tx_digest: TransactionDigest, + pub(crate) sender: SuiAddress, + pub(crate) timestamp_ms: u64, + pub(crate) failure_status: String, + pub(crate) package: String, + pub(crate) cmd_idx: Option, +} + +impl SuiTxnError { + pub(crate) fn to_db(&self) -> SuiErrorTransactions { + SuiErrorTransactions { + txn_digest: self.tx_digest.to_string(), + sender_address: self.sender.to_string(), + timestamp_ms: self.timestamp_ms as i64, + failure_status: self.failure_status.clone(), + package: self.package.clone(), + cmd_idx: self.cmd_idx.map(|idx| idx as i64), + } + } +} diff --git a/crates/sui-framework/docs/move-stdlib/vector.md b/crates/sui-framework/docs/move-stdlib/vector.md index d3f55f7b737fa..7f9d3d1f48877 100644 --- a/crates/sui-framework/docs/move-stdlib/vector.md +++ b/crates/sui-framework/docs/move-stdlib/vector.md @@ -24,6 +24,7 @@ vectors are growable. This module has many native functions. - [Function `remove`](#0x1_vector_remove) - [Function `insert`](#0x1_vector_insert) - [Function `swap_remove`](#0x1_vector_swap_remove) +- [Function `flatten`](#0x1_vector_flatten)

@@ -509,4 +510,31 @@ Aborts if i is out of bounds. + + + + +## Function `flatten` + +Concatenate the vectors of v into a single vector, keeping the order of the elements. + + +
public fun flatten<T>(v: vector<vector<T>>): vector<T>
+
+ + + +
+Implementation + + +
public fun flatten<T>(v: vector<vector<T>>): vector<T> {
+    let mut r = vector[];
+    v.do!(|u| r.append(u));
+    r
+}
+
+ + +
diff --git a/crates/sui-framework/docs/sui-framework/groth16.md b/crates/sui-framework/docs/sui-framework/groth16.md index 84dd18baf397b..4d3db01febc22 100644 --- a/crates/sui-framework/docs/sui-framework/groth16.md +++ b/crates/sui-framework/docs/sui-framework/groth16.md @@ -170,6 +170,15 @@ A ProofPoints wrap + + + + +
const EInvalidScalar: u64 = 3;
+
+ + + @@ -188,6 +197,15 @@ A ProofPoints wrap + + + + +
const MaxPublicInputs: u64 = 8;
+
+ + + ## Function `bls12381` @@ -298,7 +316,8 @@ Returns bytes of the four components of the PublicProofInputs wrapper from bytes. +Creates a PublicProofInputs wrapper from bytes. The bytes parameter should be a concatenation of a number of +32 bytes scalar field elements to be used as public inputs in little-endian format to a circuit.
public fun public_proof_inputs_from_bytes(bytes: vector<u8>): groth16::PublicProofInputs
@@ -311,6 +330,8 @@ Creates a PublicProofIn
 
 
 
public fun public_proof_inputs_from_bytes(bytes: vector<u8>): PublicProofInputs {
+    assert!(bytes.length() % 32 == 0, EInvalidScalar);
+    assert!(bytes.length() / 32 <= MaxPublicInputs, ETooManyPublicInputs);
     PublicProofInputs { bytes }
 }
 
diff --git a/crates/sui-framework/packages/move-stdlib/sources/vector.move b/crates/sui-framework/packages/move-stdlib/sources/vector.move index 55c1abac34b74..6d7d0e37fe458 100644 --- a/crates/sui-framework/packages/move-stdlib/sources/vector.move +++ b/crates/sui-framework/packages/move-stdlib/sources/vector.move @@ -263,6 +263,13 @@ module std::vector { acc } + /// Concatenate the vectors of `v` into a single vector, keeping the order of the elements. + public fun flatten(v: vector>): vector { + let mut r = vector[]; + v.do!(|u| r.append(u)); + r + } + /// Whether any element in the vector `v` satisfies the predicate `f`. /// If the vector is empty, returns `false`. public macro fun any<$T>($v: &vector<$T>, $f: |&$T| -> bool): bool { diff --git a/crates/sui-framework/packages/move-stdlib/tests/vector_tests.move b/crates/sui-framework/packages/move-stdlib/tests/vector_tests.move index 3aac6ef5b4e5f..7064ee307c357 100644 --- a/crates/sui-framework/packages/move-stdlib/tests/vector_tests.move +++ b/crates/sui-framework/packages/move-stdlib/tests/vector_tests.move @@ -693,6 +693,16 @@ module std::vector_tests { assert!(r.fold!(10, |acc, e| acc + e) == 16); } + #[test] + fun test_flatten() { + assert!(vector>[].flatten().is_empty()); + assert!(vector>[vector[], vector[]].flatten().is_empty()); + assert!(vector[vector[1]].flatten() == vector[1]); + assert!(vector[vector[1], vector[]].flatten() == vector[1]); + assert!(vector[vector[1], vector[2]].flatten() == vector[1, 2]); + assert!(vector[vector[1], vector[2, 3]].flatten() == vector[1, 2, 3]); + } + #[test] fun any_all_macro() { assert!(vector[].any!(|e| *e == 2) == false); diff --git a/crates/sui-framework/packages/sui-framework/sources/crypto/groth16.move b/crates/sui-framework/packages/sui-framework/sources/crypto/groth16.move index e1b5804028fa4..ede657b589d58 100644 --- a/crates/sui-framework/packages/sui-framework/sources/crypto/groth16.move +++ b/crates/sui-framework/packages/sui-framework/sources/crypto/groth16.move @@ -11,10 +11,15 @@ module sui::groth16 { // Error if the given curve is not supported const EInvalidCurve: u64 = 1; - #[allow(unused_const)] // Error if the number of public inputs given exceeds the max. const ETooManyPublicInputs: u64 = 2; + // Error a public input does not have the correct length. + const EInvalidScalar: u64 = 3; + + // We need to set an upper bound on the number of public inputs to avoid a DoS attack. + const MaxPublicInputs: u64 = 8; // This must match the corresponding constant in the native verify function. + /// Represents an elliptic curve construction to be used in the verifier. Currently we support BLS12-381 and BN254. /// This should be given as the first parameter to `prepare_verifying_key` or `verify_groth16_proof`. public struct Curve has store, copy, drop { @@ -60,8 +65,11 @@ module sui::groth16 { bytes: vector, } - /// Creates a `PublicProofInputs` wrapper from bytes. + /// Creates a `PublicProofInputs` wrapper from bytes. The `bytes` parameter should be a concatenation of a number of + /// 32 bytes scalar field elements to be used as public inputs in little-endian format to a circuit. public fun public_proof_inputs_from_bytes(bytes: vector): PublicProofInputs { + assert!(bytes.length() % 32 == 0, EInvalidScalar); + assert!(bytes.length() / 32 <= MaxPublicInputs, ETooManyPublicInputs); PublicProofInputs { bytes } } diff --git a/crates/sui-framework/packages/sui-framework/sources/kiosk/kiosk.move b/crates/sui-framework/packages/sui-framework/sources/kiosk/kiosk.move index 21ea2dad8b2fd..0da0136ce0537 100644 --- a/crates/sui-framework/packages/sui-framework/sources/kiosk/kiosk.move +++ b/crates/sui-framework/packages/sui-framework/sources/kiosk/kiosk.move @@ -231,7 +231,7 @@ module sui::kiosk { // === Kiosk packing and unpacking === - #[allow(lint(self_transfer, share_owned))] + #[allow(lint(self_transfer))] /// Creates a new Kiosk in a default configuration: sender receives the /// `KioskOwnerCap` and becomes the Owner, the `Kiosk` is shared. entry fun default(ctx: &mut TxContext) { diff --git a/crates/sui-framework/packages/sui-framework/sources/token.move b/crates/sui-framework/packages/sui-framework/sources/token.move index bdc1b63fac1e1..634efa027b99f 100644 --- a/crates/sui-framework/packages/sui-framework/sources/token.move +++ b/crates/sui-framework/packages/sui-framework/sources/token.move @@ -155,7 +155,6 @@ module sui::token { (policy, cap) } - #[allow(lint(share_owned))] /// Share the `TokenPolicy`. Due to `key`-only restriction, it must be /// shared after initialization. public fun share_policy(policy: TokenPolicy) { diff --git a/crates/sui-framework/packages/sui-framework/tests/crypto/groth16_tests.move b/crates/sui-framework/packages/sui-framework/tests/crypto/groth16_tests.move index 0ac0224f154ed..31e861c34e32a 100644 --- a/crates/sui-framework/packages/sui-framework/tests/crypto/groth16_tests.move +++ b/crates/sui-framework/packages/sui-framework/tests/crypto/groth16_tests.move @@ -36,6 +36,13 @@ module sui::groth16_tests { groth16::prepare_verifying_key(&bls12381(), &invalid_vk); } + #[test] + #[expected_failure(abort_code = groth16::EInvalidScalar)] + fun test_invalid_public_inputs() { + let public_inputs_too_short = vector[x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", x"1234"].flatten(); + groth16::public_proof_inputs_from_bytes(public_inputs_too_short); + } + #[test] fun test_verify_groth_16_proof_bls12381() { let curve = bls12381(); @@ -60,10 +67,6 @@ module sui::groth16_tests { let invalid_pvk = groth16::pvk_from_bytes(vk_bytes, alpha_bytes, gamma_bytes, delta_bytes); assert!(groth16::verify_groth16_proof(&curve, &invalid_pvk, &inputs, &proof) == false); - // Invalid public inputs bytes. - let invalid_inputs = groth16::public_proof_inputs_from_bytes(x"cf"); - assert!(groth16::verify_groth16_proof(&curve, &pvk, &invalid_inputs, &proof) == false); - // Invalid proof bytes. let invalid_proof = groth16::proof_points_from_bytes(x"4a"); assert!(groth16::verify_groth16_proof(&curve, &pvk, &inputs, &invalid_proof) == false); @@ -71,22 +74,20 @@ module sui::groth16_tests { #[test] #[expected_failure(abort_code = groth16::ETooManyPublicInputs)] - fun test_too_many_public_inputs_bls12381() { - let curve = bls12381(); - - let vk_bytes = x"ada3c24e8c2e63579cc03fd1f112a093a17fc8ab0ff6eee7e04cab7bf8e03e7645381f309ec113309e05ac404c77ac7c8585d5e4328594f5a70a81f6bd4f29073883ee18fd90e2aa45d0fc7376e81e2fdf5351200386f5732e58eb6ff4d318dc"; - let alpha_bytes = x"8b0f85a9e7d929244b0af9a35af10717bd667b6227aae37a6d336e815fb0d850873e0d87968345a493b2d31aa8aa400d9820af1d35fa862d1b339ea1f98ac70db7faa304bff120a151a1741d782d08b8f1c1080d4d2f3ebee63ac6cadc666605be306de0973be38fbbf0f54b476bbb002a74ff9506a2b9b9a34b99bfa7481a84a2c9face7065c19d7069cc5738c5350b886a5eeebe656499d2ffb360afc7aff20fa9ee689fb8b46863e90c85224e8f597bf323ad4efb02ee96eb40221fc89918a2c740eabd2886476c7f247a3eb34f0106b3b51cf040e2cdcafea68b0d8eecabf58b5aa2ece3d86259cf2dfa3efab1170c6eb11948826def533849b68335d76d60f3e16bb5c629b1c24df2bdd1a7f13c754d7fe38617ecd7783504e4615e5c13168185cc08de8d63a0f7032ab7e82ff78cf0bc46a84c98f2d95bb5af355cbbe525c44d5c1549c169dfe119a219dbf9038ec73729d187bd0e3ed369e4a2ec2be837f3dcfd958aea7110627d2c0192d262f17e722509c17196005b646a556cf010ef9bd2a2a9b937516a5ecdee516e77d14278e96bc891b630fc833dda714343554ae127c49460416430b7d4f048d08618058335dec0728ad37d10dd9d859c385a38673e71cc98e8439da0accc29de5c92d3c3dc98e199361e9f7558e8b0a2a315ccc5a72f54551f07fad6f6f4615af498aba98aea01a13a4eb84667fd87ee9782b1d812a03f8814f042823a7701238d0fec1e7dec2a26ffea00330b5c7930e95138381435d2a59f51313a48624e30b0a685e357874d41a0a19d83f7420c1d9c04"; - let gamma_bytes = x"b675d1ff988116d1f2965d3c0c373569b74d0a1762ea7c4f4635faa5b5a8fa198a2a2ce6153f390a658dc9ad01a415491747e9de7d5f493f59cf05a52eb46eaac397ffc47aef1396cf0d8b75d0664077ea328ad6b63284b42972a8f11c523a60"; - let delta_bytes = x"8229cb9443ef1fb72887f917f500e2aef998717d91857bcb92061ecd74d1d24c2b2b282736e8074e4316939b4c9853c117aa08ed49206860d648818b2cccb526585f5790161b1730d39c73603b482424a27bba891aaa6d99f3025d3df2a6bd42"; - let pvk = groth16::pvk_from_bytes(vk_bytes, alpha_bytes, gamma_bytes, delta_bytes); - - let inputs_bytes = x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849"; - let inputs = groth16::public_proof_inputs_from_bytes(inputs_bytes); - - let proof_bytes = x"a29981304df8e0f50750b558d4de59dbc8329634b81c986e28e9fff2b0faa52333b14a1f7b275b029e13499d1f5dd8ab955cf5fa3000a097920180381a238ce12df52207597eade4a365a6872c0a19a39c08a9bfb98b69a15615f90cc32660180ca32e565c01a49b505dd277713b1eae834df49643291a3601b11f56957bde02d5446406d0e4745d1bd32c8ccb8d8e80b877712f5f373016d2ecdeebb58caebc7a425b8137ebb1bd0c5b81c1d48151b25f0f24fe9602ba4e403811fb17db6f14"; - let proof = groth16::proof_points_from_bytes(proof_bytes); - - groth16::verify_groth16_proof(&curve, &pvk, &inputs, &proof); + fun test_too_many_public_inputs() { + // We give 9 inputs which exceeds the limit of 8 + let inputs_bytes = vector[ + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849", + x"440758042e68b76a376f2fecf3a5a8105edb194c3e774e5a760140305aec8849" + ].flatten(); + groth16::public_proof_inputs_from_bytes(inputs_bytes); } #[test] @@ -119,6 +120,13 @@ module sui::groth16_tests { groth16::prepare_verifying_key(&bn254(), &invalid_vk); } + #[test] + #[expected_failure(abort_code = groth16::EInvalidScalar)] + fun test_invalid_public_inputs_bn254() { + let public_inputs_too_short = vector[x"3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a", x"1234"].flatten(); + groth16::public_proof_inputs_from_bytes(public_inputs_too_short); + } + #[test] fun test_verify_groth_16_proof_bn254() { let curve = bn254(); @@ -143,33 +151,8 @@ module sui::groth16_tests { let invalid_pvk = groth16::pvk_from_bytes(vk_bytes, alpha_bytes, gamma_bytes, delta_bytes); assert!(groth16::verify_groth16_proof(&curve, &invalid_pvk, &inputs, &proof) == false); - // Invalid public inputs bytes. - let invalid_inputs = groth16::public_proof_inputs_from_bytes(x"cf"); - assert!(groth16::verify_groth16_proof(&curve, &pvk, &invalid_inputs, &proof) == false); - // Invalid proof bytes. let invalid_proof = groth16::proof_points_from_bytes(x"4a"); assert!(groth16::verify_groth16_proof(&curve, &pvk, &inputs, &invalid_proof) == false); } - - #[test] - #[expected_failure(abort_code = groth16::ETooManyPublicInputs)] - fun test_too_many_public_inputs_bn254() { - let curve = bn254(); - - let vk_bytes = x"e8324a3242be5193eb38cca8761691ce061e89ce86f1fce8fd7ef40808f12da3c67d9ed5667c841f956e11adbbe240ddf37a1e3a4a890600dc88f608b897898e"; - let alpha_bytes = x"51e6d72cd3b0914dd232653f84e7971d3e5bbcde6b47ff8d6c05277e579f1c1eb2fe30aa252c63950de6ea00dd21a1027f6d130357e47c31fafeca0d31e19406231df42bc11ce376f8cf75135d9074f081c242c31f198d151ec69ec37d67cc2b12542cb306a7823c8b194f13672176c6ee8266b2a0c9f57a5dbdb2278046b511d44e715a3ebe02ec2e1cf493c1b1ada84676e234134a6da5a552f61d4e905e15c0dc58a3414d74304775de5ba8571128f3548d269b51fdc08d5b646fd9157e0a2bc0c4bec5a9a6048d17d1d6cd941b4d459f1de0c7c1d417f33995d2a8dd670b91f0baaccaaf2802100901711885026a5ec97fbbb801000d0d01185651947c1900e336921d07eb16d0e25a2192829540ad5eeb1c498ba9c6316e16807a55dc2b9a7f3dea2e4a2f485ed1295a96d6ca86851842b3a22f83507f93ac66a1dc341d5d22f592527d8ea5c12db16bbabe24b76b3e1baf825c8dcf147be369fd8c5300fd77d0aa8dce730e4e7442c93c4890023f3a266c9fbc90ebbf72825e798c4c00"; - let gamma_bytes = x"240a80664919b9f7490209cff12bfd81c32c272607dc004661c792082cbe282ef826f56a3822ebd72345f86c7ee9872e23f10d1f2dbf43f8aca5dc2ceb5388a5"; - let delta_bytes = x"f755df8c90edab48ac5adafef6a5a461902217f392e3aa4c34c0462b700c18164f79018778755980d491647de11ecc51fda2cc17171c4b44485ec37ccd23a69b"; - let pvk = groth16::pvk_from_bytes(vk_bytes, alpha_bytes, gamma_bytes, delta_bytes); - - // We give 9 equal inputs which exceeds the limit of 8 - let inputs_bytes = x"3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a3fd7c445c6845a9399d1a7b8394c16373399a037786c169f16219359d3be840a"; - let inputs = groth16::public_proof_inputs_from_bytes(inputs_bytes); - - let proof_bytes = x"dd2ef02e57d6a282df6b7f36c134ab7e55c2e04c5b8cbd7831be18e0e7224623ae8bd6c41637c10cbd02f5e68de6394461f417895ddd264d6f0ddacf68c6cd02feb8881f0efa599139a6faf4223dd8743777c4346cba52322eb466af96f2be9f813af1450f84d6f8029804f60cac1add70ad1a3d4226404f84f4022dc18caa0f"; - let proof = groth16::proof_points_from_bytes(proof_bytes); - - groth16::verify_groth16_proof(&curve, &pvk, &inputs, &proof); - } } diff --git a/crates/sui-framework/packages/sui-framework/tests/crypto/vdf_tests.move b/crates/sui-framework/packages/sui-framework/tests/crypto/vdf_tests.move index 89ccef6607fae..df97e4cb3b600 100644 --- a/crates/sui-framework/packages/sui-framework/tests/crypto/vdf_tests.move +++ b/crates/sui-framework/packages/sui-framework/tests/crypto/vdf_tests.move @@ -9,7 +9,7 @@ module sui::vdf_tests { #[test] fun test_hash_to_input() { let seed = b"seed"; - let expected = x"bb0102ed9d7af1f184a7542c36ae40e7bf3cd3953dfdc1aab84c7137f33614a87a443be6b57eb28e33f661bbd7088c6fae16a4550997d05a1631ebf5ec886440ac82cdfbff978efe1d53c672e25374627a2e9cc15baaf19042a83044099ce13612ff2b0926fe2185a3920b34e607d580a8474c3ec6f3961303c76babfcbde4a2df5243784048649edd19cdd52fd0cfe5b304c6da8240e04d054c97fa6fe0799e711197d7b31c4a2f2e3984ae9cd4c6ea20e2190cec9ef8d32f3d03034fe90209d296ce18b5e8203ccbcfc05897dbede2cd84ebfe493a8d4f1f5f07e75a0dd61920dd8bb5fbca1db92f0d99572010f66034155c667dd9d2cb8ffece37cf3bfcfb712654cf99b07042903be494570a66cb9357ea33b4332e2117256cbbb9cab3a3c44b402c13b672f3e226073b96f604863b247f801acc9efb8d613fee7a648e0e3b5c4da791d8bdf5d097c254ed7ed885af0ab78535d0aedde4ad5817a5230372e64080caa56ada2debbf6ed8131c939b66d02fc49e3f188156ac49a88298fae6ad2f4efa068f613d522a4efacd5aa3dc84046684fa337f38238d51ec92a46aab01d00b95444f20f544d6fc38af9b6bd63a0654ba98aa7480c52a833a47c3466ff518c78717d7f72b04889e0ce64bff5d558291a0aaad673fe6958d5a4aadfefaa16eea8a4407a0930dd86c44d1ad97b464064bc8d17499d9248e729a75d8469223700f080ddafb981af5a12f08f5f2f09c96d69d4a17fb7bcce368fa4b88351db270186806cb75c79704083ccd2eae77a02efcd283a47f715ff24264c89c1a9f01fbcdb5c551f7607a20300cc85b41d131687632fbebf93226eb91d8136be471824f90d17e4ed4c0702c85345814b4095a0a7227d4836bfd5b284ffde26e7d03caf588c2f371255fbe439b1094d1dd4c7e0dadb53a546c6d406e2d106277cd22139aa71c91411537312fa216ea90d790d6c230ee7a9b743f559a2287acd1fd2b8dec38549bd625c33616ac96ef76eb12b327b4c49a87b4a46134c4e3829eaf0f1821847fdf9edf4c6d23b8a9ee7b6d95a98ba05e6dcaa921326a79b3d10fb2e407d095191c9f8afc2e64810109ff2bc1e34f999f6faac05c2b6295ee4799d0ce7b268c6bac3d3285116ed0b821d9463d05cb349ac309223123e176ec8f6898ef420e87e74aaabd3c247b2312ff7f49e90054af6f871c7e37c61986e3572ff201bc8b205f0924897b206ca9613216daa12fbbda5c668ae8eb26e717a5c813c965cc906bea52ab281c240b6d4bb65e097e7cc924bbdcc6efbd8e8c4366c65989c6f5ab2d5f1d1fe5ee7865e9f6d468d63d7f548a4e02507dbd18fefa777c5163c6d15bc36979de551d8cdc19601ee861c674bafa5f62bc203eea1c50e7c25c3db98063a0774c7c2dbfdcae651d6484f62c3970085fe8cd841d44c857edae7e5e413008060fe384df6773d4edbc211cccf33e0cd63dac177dc435395452b187d634d6581a5db81998ef64d43c3b64d7909a79abfc70981bc7bda06a3caf2273123568"; + let expected = x"22267333dc4015f6f9bac320d7415760b2bb130a096d9c16b85d1c6e930a40df14e04f33ff121d2d7f0adad584d6f21790d800415a9b6a5e815270bfa2ab4aa0f40e5988eaaca821b007f863bbae63ffcdc9896fd05c65df02012b47d1d661485d078eb6020a3e6c85d24c9a11dc53b37582344898550a5bae1364c3f79a63f06c877e35190e020e23c668bd1a1e42c293b96d7f139d039c5cbf9792c379758c705b5cd282f9b3bac964f564ae7520dfa7465195428be86aec06e9d350a56cb15c85aad39bcf309fd7fc31a79992ecb842ddba1ad67cd01305d1b2d913877d3557844a21394bac443a329ba18b05f0816aa99f67c6ec88e9027d4978ba0d0e1d94a1d7aa3739443a8d04ca073c368416fdfa9d7e01e6d26a0f9442d5389a923ea9e740baf94c98d8d3fffa15c5cd440be43c9b61a684767798bcf87dcce303734df7491f4b77f4218c20a2b2e3b9dac7971ffdb83ea5141a57885e3be2f3425a21fd6eb2eded45f555fa03797662824e4eeb447c8f90ba9e8a02af9a1911c2e113632f36d42b7530b2694ff0160fada57cfde663c0dc34ab20af0e709656a09bfe8c8cb075b3d7afb6eb131a6fa1b1828fa7ae03c21f247e"; let actual = hash_to_input(&seed); assert!(actual == expected, 0); @@ -22,15 +22,15 @@ module sui::vdf_tests { fun test_vdf_verify() { let input_seed = x"2ef29e01809053dcfc89e7acad77e13c2bf03b5a9a0bbfea555a1423f1f1ae23"; let input_bytes = hash_to_input(&input_seed); - let input_bytes_expected = x"bc01009ed67199c633da0faa75a8ed0ff5c3ae7c278c79aad85953cdf86d57101b1ee941e5239b7d614e5b16eac269c460f16d57a12c75b52c48fac643a1c4918fab86805fe08fcd6b38093a17143cca7550fd018b8bc6871fb441b406bec7a7f3a61c3b2a5aa9daca5f9a6fa474b270688de643323de1acc8073a3418bc1681a614c1abb5fa56b61a7d6df4260547c9f613c5f0dbd7cb91a478ac94b1cce6b1f4784dc161ec3c85bf02cf99fd460b0b25a44d1990dacd1fe7a43b797611ea0210645fef3905f7e1accf97bd3b868a8a99d4a1a546e5a55e20f343fc2724829f1770591b50a73c93ec9b8c01ce1fa6b84eddd5a7ddd077110e21b8e058bf2fed2592a5449db177ec0e32595b20bda5779c2f346b01df8c0d277d9d3a7fe0a04e67b210be60334efdadb7abc5ac001b71509c2d487d9d26443527c1b8b02dfcffc50ef98020f569cdf6fffca5870b0e502493fceee35b79eed99e2c758a0aff4c86b2af0dd223e270ecf84eb7405fe35a9e37d6b080efa3c59806c2ceffa82f38502f9d37b6c298cf07534347cd9ee436406784bd7e0a57d380dd3923ddca13d86f3b2c83a135f125f9429a6802247a0e926b54144d74e4e8f66f0303cdc91843ce7e1fb9c6276c000512c0709c7fbfde2b80e66db77222447ef6b4da4a698e011c6de95ad88738aea465c158288a54223c7f7152577cc48691af57e2631e3224b7c94e2a4c5034db35bbf9e807753fa51da8798bf63b7e6ebd857ca4cf01fcab7a33e63fa89eb386e2ef98046c44491bdf8d62ede2af4ab79ccac88e404abb649b92f49c9f9abcf2216bb628e96400a75a66c12b6ff1c6dae498dd4183ad989921ebc6a1be73127741333671eb72cd25eabc69fecc3c50da06b4a3af155264d4e39e8c681b8c5555d4cab748ed15d119527820e01854fa203c2deba3a67620d47733919e8c71d659e60e86db69905ebdc4dbeda67f77291c2202b2116a05f227f963a97eb8c87104b2df349f01f251aa22bbd41541998ce755309b98d9597d7ee26b6acaef1869885c775e6ceb710c36c07e401e17a8ccb838e33f64e43e4db3491b5cef6e800c4e494610ab81a8b489263b86976160d7d0106cab79bf2a2fce5b01e8f9d1fb069a98e814c94f10d9917b7ea27209bc822b35741f56a9aeadb75a7eae6a8cbd7df08e079db64fd48655f42c24c14bb6c72e744206a3e15deee45cab74d589deb1055e0e69fe508a2ef356dc4e2caaaf89f44a520722490374eade8573429d0d6d16e3c681853f96759cc6e3ea3aaad55284282abd40686281ff944c6a507086143cf76d0f7f93b486d552fa4698656cff8a325fea84943333645b29ee11c99555b2076a09466f6e602db663e1bd45c523a12a7fcd2328d5139d14b25561b94f62f69d436c5d4c92b01ae3a91baa1b5781bd0bf2156e1d0042ab2cbc6e10f4389868fc41d05b19bfe3dfcaacb0478b3dce887da8435c9d49f457fd54e129133e5ce87c39acb9206213daec867fca35e6b612c523fb9fba959542a777ea74"; + let input_bytes_expected = x"2300b094d4facc4b552df23714f5d09a6ddfde4f0d06163b7fda1fc022ebafa5be196f4d33fc3bce80649d3e64cb6d1497bf65f6b40ffcf5ca3211889b994019cf4dfa8697b0f69a70c5ee14ef438cf0e50b2f93786f506fde02412ad75a441ab785dbe34ce00cb829962e4216f964fb8c53852cc86e0977428322ef7d505e10c37260a9ebeb0cc66ddb59901556bc4a6fa9bc0f9d8159c3319f3a2e1781912000917a12054126a728b46dc6849cda81648e0f34200824a050fd1e01382bf4cfa17019ae2f6f48cfbe6e4ef84e1b00361ed6a4892e4b5db94e8847e0c7050637d668c5cbef08fc60fde9b5d68a02af4c5ed7f7ba99f3b68eb59fa2ea8a282b705124ec5577f166130a0c043823ea1a3fd495643dfbe8d294a9f034d91f8c8e76689a676ebe1ded776bdf7fd1e4050a84b8cead2e6adcd0ae7d12a6e221cb6579eb54911d3ce9739048924f3451c07b203ab811a9506d4d134b335eab6e84c49983f405f3d5b2040c522922e501086c19db5a82a4c7134a7cad5738bc884b382e4b3cfca0521a0e7eacd0d053855dcbb6fa897f122cc2b49df60d4d3424d37a01764b77b65b5f5472365ae3b82a3cb7f9f7a13d4a6ca100c4"; assert!(input_bytes == input_bytes_expected, 0); - let output_bytes = x"c001503be6eff558a40ba145da5aa9d1270367f32cde44c1601846cdd3d0911abce8ab6adb6b82682b45107545e9ae1efca44dd6f3ba8d95a687652b94f479f6b1a37156c44194d6bc5f266098b75251b12de6fa67de6aea14250c0481694db5f24db5e3c89da3354aafc10c4bd0371f9a175d1b5b193190c4c1089ed7a95dc07f1ce29021b55f3aaa7eb65725d61277f0996b783c005a919ba121d81f211f63d188ac525056235504fe4858765dc6498362d98e8540287a0ff78424c18de53abe46c0014f847bd49f599960fe3c3b7cfc571cd854c7d21b0e9984070f7e168c872a6e6480d8fd37d30602f57a237b83ae961e6a4acb94b78c32d04f06058bda037d6ad313c81f823db25c53c265b02a29008f727f95010c82b0cf8745e77a7f4000dac929ba83a4594482b4e6ff59c93a78df5c816f244914329c145e288fd3fd4800a1cc2df23f386112e569608e6de40ee65fe870960b4e3fee4bb188d8db0dd5df3c2384eb24a797eb20cf8524d563663ccde866a405e2713cfafdb760e50c77a797c10100a31fc5ca0a91aa788d5f5df17a1433f1a0e6e4da440ce935b1b48dc6868c8fc00d7ee725ce21797a6c4440af02570466081479e99eee1a5b509a3e1ac2e000ed386c35d9fadd130df2a292fa5f9aa2c195c48c9d11e58ac98c8dbd2169721ed2d2c9f5544de17deeaa9655360ed7baa46820f5e008af1e3f028d819dee3fee50ab55b266385dfc8f65f7f0c1b6149e5295bfefb83b14db3a30b2cefd1495ba4e5ae39d2b729f9644fc28764d03243fad3e61145ed83cbf2708b60c0b7cac7148"; + let output_bytes = x"c001532225cbc82be245b2571c83babcc61ab86234465a7e91e000b14b1a3426cef72a90e5db733523ba5593200c35281b77156eaa978b7ad1c5eac1336ca8e74b1341ae072b9fd620d24ff33590903668ac6d5f53fb6ac32d7c0cd380c790418f68e64505aeb82f40d6e9bcf3642850c440bc458c0465965ce384c7d6d11570d3499e265a38e4e52580e54580fccd28b1dbb9d32a7bca5e59dc9d083c6c6642df20a160da447c78f2695d424984531a1de5a35cd13ba496cf9c0adec749d31d2bbfc001f2d872ba990524a2756e74224f1162bcbceafb2222a2ca19255511f775ceb74a4bc6a36848377201b62b3bc12332781e20097612005d642033866bca926d9242bb2d0a6d305b0e46e9715340b64012271d3fe2c614ab9dca9d2f81a18ba8f49b5b9945fd4b3af743d4c7f2ac0906266ae4c26915bfcbe9ec6a1a17420c442e3cb00422efb70467188e124a56e171004521360f2da80323a2b9d327c43931327a59a711986116333010e743d9ff1c3de811281e5eba84e49c33a78d60e27b065fc101008af0c8fa3fe81a2bbdcdbc3040727f3797121254d48b182ca4a4cd698f0523057b4b223a94253a75842fe823085e8a55b2cb639e4799256f03252ded38eb93a544ca93b89f72c358098b0badedda13de0d3c99c8a33e3b6fa4d894b4031e5400872453086b36cb28d02497c3e2f1e4c6a0b4344b4dc94f4b39481cef8ea05d170f0159f06d7ffcbc99e214d62a1297153a58c41d11dacae5b409b5e6023159897bb773e0a146b7785dc772bc2c61a75aa9b30765027fc1a36bf31113799d2644"; let proof_bytes = x"0101010180032cf35709e1301d02b40a0dbe3dadfe6ec1eeba8fb8060a1decd0c7a126ea3f27fadcad81435601b0e0abca5c89173ef639e5a88043aa29801e6799e430b509e479b57af981f9ddd48d3a8d5919f99258081557a08270bb441233c78030a01e03ec199b5e3eef5ccc9b1a3d4841cbe4ff529c22a8cd1b1b0075338d864e3890942df6b007d2c3e3a8ef1ce7490c6bbec5372adfcbf8704a1ffc9a69db8d9cdc54762f019036e450e457325eef74b794f3f16ff327d68079a5b9de49163d7323937374f8a785a8f9afe84d6a71b336e4de00f239ee3af1d7604a3985e610e1603bd0e1a4998e19fa0c8920ffd8d61b0a87eeee50ac7c03ff7c4708a34f3bc92fd0103758c954ee34032cee2c78ad8cdc79a35dbc810196b7bf6833e1c45c83b09c0d1b78bc6f8753e10770e7045b08d50b4aa16a75b27a096d5ec1331f1fd0a44e95a8737c20240c90307b5497d3470393c2a00da0649e86d13e820591296c644fc1eef9e7c6ca4967c5e19df3153cd7fbd598c271e11c10397349ddc8cc8452ec"; assert!(vdf_verify(&input_bytes, &output_bytes, &proof_bytes, 100), 1); // Try with output from 101 iterations of the VDF instead - let other_output_bytes = x"c0015011855f4d75bbd3c52c439c8c94ac523090a0cfd3ce04132ea8091c7893fb173a9f5e305d515f5e94fd7a23697bd3a6fba9ebb80c7b9fcece3b42b83605c3b94207bcfd135d90e2ad1b0a2c98c2a0e57d7499cfa3ecddea4fd733c2e437a6d1e707ae2a38cca2c65daa08c1c26cb7078d5b05f9d721e457c94d0fd1c8793eac8370eb8167b47492444403ab9c1d1ff37d75c2c4ae2271a86e6af59f31d07243bd3134bcafcfb1716085eca27d248611732563df5832c35672880d69301869adc00106c9d5439af4d4dfa21e75e3966af4905b7cd97711bd217605043778626ed51be43a5d188356df1f260ed60a1f90b7ffbb7c3c416de92fdd8358480e5b938afd6143053b1736b316503d18b9e5b024299cbdbc17d9b1f825d97b74b3fb09d1ddab4fa60a420db4cc11b93c9166152c38f7ea175dee2293ce3fafc8fd271bae814bcef963bdcb0a1e0cf6404e6a0b63e44321c54041d2a704aaa4fa5836774499dcdc2e8255cb9f907a6c6edef5d7290e4fe2bc3307bb136da05c705ea16419fdc101008fdcd8a6d77bf0f7efed3258549419d564c91cd4682c6465105c966f1fbf73f11ee123f9f292d2d04c63adfc21e8146f108779e7e781424989393d8677e142f2b9c8115d98fc2a81d826bb252c0b3152f4cd290062ff583066f263ffa399da76159c4adb007cd8a1ba6f41f119909e9336a0d0205272101ab79753712ba031094f2be571929d4d0c478a5893f9673d593279ba6b92db14c23dff795d084e1bd913f32279c4c252afe11da7371945be9e9907ca6864cf3137364677c1b5757566"; + let other_output_bytes = x"c0013f8fa1bc67da02c43b1dafb2dd576be9c301a6281f938f1ff96718ec46ed102344c5e207464a0a90da7a1a0d5a69b1942048596c1dee5a95199a8a7729616135687c0bcacd2bdcb039af9f362649575ca249f793110280d3f6700c199b1d37f8f58a8ad186d60e45f1ffa675f1a067cf706b110c1134c6b6af97a6920db607fa4eb2bdda161a9527d5503494dd3e3a492bce2289934f16cdda528ad3eee75528de240d60f0174d54c987e6849cc11285dca21eeee1d83645fca10a469f116f81c00105f1f6f5f7fcae2abae809e1f7570b3f7dade7724ef61bfcc96c025e205de5a6b3660b3dfc2a92ba83fa799fd9b509c8962d01400560ff9e2ad8ab8d55c1a072ed70297516e69d08245fbd9eb554796eeb249c7252303089ec9e136c6050ef24d425c14836071a517302721827da07273578215b903093b61ce3307c44dd2b66725129f382e28fe7931082aa06f8896ebb79c2828e63bd978f7a328bd2d324711e41f9e2e1c016fe625fb412526662c3f08ed1333061b12bdf4f71975b4b4a95c10100b52ed14a6b298e95644a48edcad1b15df4ef5fbbf3b8b4b3f61ed5e58bbadd660f23940313610b8c95614beb38e1df0888d7b3198fb07ee8d5c0a6bd8309109871aedd999f72fc73b5d1c0972331c9a77d6d28396ec00ea99c928a17ff418bf535678b1a9a87ce52f90b7d23bad0cf80f51357c92457e6d55b74e224f16bdec90f521b5db6c0d02d9d6b9eee0baceeda3037bc39ceee4112e29beb1815807c30539d85ed54cba13bacf381ff761fc95bcc6537ea26e25b249d3473b1ced85e9a"; let other_proof_bytes = x"0101010180032cf35709e1301d02b40a0dbe3dadfe6ec1eeba8fb8060a1decd0c7a126ea3f27fadcad81435601b0e0abca5c89173ef639e5a88043aa29801e6799e430b509e479b57af981f9ddd48d3a8d5919f99258081557a08270bb441233c78030a01e03ec199b5e3eef5ccc9b1a3d4841cbe4ff529c22a8cd1b1b0075338d864e3890942df6b007d2c3e3a8ef1ce7490c6bbec5372adfcbf8704a1ffc9a69db8d9cdc54762f019036e450e457325eef74b794f3f16ff327d68079a5b9de49163d7323937374f8a785a8f9afe84d6a71b336e4de00f239ee3af1d7604a3985e610e1603bd0e1a4998e19fa0c8920ffd8d61b0a87eeee50ac7c03ff7c4708a34f3bc92fd0103758c954ee34032cee2c78ad8cdc79a35dbc810196b7bf6833e1c45c83b09c0d1b78bc6f8753e10770e7045b08d50b4aa16a75b27a096d5ec1331f1fd0a44e95a8737c20240c90307b5497d3470393c2a00da0649e86d13e820591296c644fc1eef9e7c6ca4967c5e19df3153cd7fbd598c271e11c10397349ddc8cc8452ec"; assert!(!vdf_verify(&input_bytes, &other_output_bytes, &other_proof_bytes, 100), 2); assert!(vdf_verify(&input_bytes, &other_output_bytes, &other_proof_bytes, 101), 3); diff --git a/crates/sui-framework/packages/sui-framework/tests/test_scenario_tests.move b/crates/sui-framework/packages/sui-framework/tests/test_scenario_tests.move index f69ad844b8892..a6980110303eb 100644 --- a/crates/sui-framework/packages/sui-framework/tests/test_scenario_tests.move +++ b/crates/sui-framework/packages/sui-framework/tests/test_scenario_tests.move @@ -5,9 +5,6 @@ module sui::test_scenario_tests { use sui::test_scenario; - const EIdBytesMismatch: u64 = 0; - const EValueMismatch: u64 = 1; - public struct Object has key, store { id: UID, value: u64, @@ -196,8 +193,8 @@ module sui::test_scenario_tests { assert!(scenario.has_most_recent_for_sender()); let received_obj = scenario.take_from_sender(); let Object { id: received_id, value } = received_obj; - assert!(received_id.to_inner() == id_bytes, EIdBytesMismatch); - assert!(value == 100, EValueMismatch); + assert!(received_id.to_inner() == id_bytes); + assert!(value == 100); received_id.delete(); }; // check that the object is no longer accessible after deletion @@ -228,7 +225,7 @@ module sui::test_scenario_tests { }; scenario.next_tx(sender); let ids = scenario.ids_for_sender(); - assert!(ids == vector[id1, id2, id3], EValueMismatch); + assert!(ids == vector[id1, id2, id3]); scenario.end(); } @@ -255,9 +252,9 @@ module sui::test_scenario_tests { let obj1 = scenario.take_from_sender_by_id(id1); let obj3 = scenario.take_from_sender_by_id(id3); let obj2 = scenario.take_from_sender_by_id(id2); - assert!(obj1.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(obj1.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); scenario.return_to_sender(obj1); scenario.return_to_sender(obj2); scenario.return_to_sender(obj3); @@ -303,9 +300,9 @@ module sui::test_scenario_tests { let obj1 = scenario.take_shared_by_id(id1); let obj3 = scenario.take_shared_by_id(id3); let obj2 = scenario.take_shared_by_id(id2); - assert!(obj1.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(obj1.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); test_scenario::return_shared(obj1); test_scenario::return_shared(obj2); test_scenario::return_shared(obj3); @@ -326,7 +323,7 @@ module sui::test_scenario_tests { { assert!(test_scenario::has_most_recent_shared()); let obj1 = scenario.take_shared(); - assert!(obj1.value == 10, EValueMismatch); + assert!(obj1.value == 10); test_scenario::return_shared(obj1); }; scenario.end(); @@ -345,7 +342,7 @@ module sui::test_scenario_tests { { assert!(test_scenario::has_most_recent_shared()); let obj1 = scenario.take_shared(); - assert!(obj1.value == 10, EValueMismatch); + assert!(obj1.value == 10); let Object { id, value: _ } = obj1; id.delete(); }; @@ -375,9 +372,9 @@ module sui::test_scenario_tests { let obj1 = scenario.take_immutable_by_id(id1); let obj3 = scenario.take_immutable_by_id(id3); let obj2 = scenario.take_immutable_by_id(id2); - assert!(obj1.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(obj1.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); test_scenario::return_immutable(obj1); test_scenario::return_immutable(obj2); test_scenario::return_immutable(obj3); @@ -398,7 +395,7 @@ module sui::test_scenario_tests { { assert!(test_scenario::has_most_recent_immutable()); let obj1 = scenario.take_immutable(); - assert!(obj1.value == 10, EValueMismatch); + assert!(obj1.value == 10); test_scenario::return_immutable(obj1); }; scenario.end(); @@ -433,9 +430,9 @@ module sui::test_scenario_tests { let t3 = test_scenario::receiving_ticket_by_id(id3); let obj2 = transfer::receive(&mut parent.id, t2); let obj3 = transfer::receive(&mut parent.id, t3); - assert!(parent.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(parent.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); scenario.return_to_sender(parent); transfer::public_transfer(obj2, id1_addr); transfer::public_transfer(obj3, id1_addr) @@ -464,8 +461,86 @@ module sui::test_scenario_tests { let mut parent = scenario.take_from_sender_by_id(id1); let t2 = test_scenario::most_recent_receiving_ticket(&id1); let obj2 = transfer::receive(&mut parent.id, t2); - assert!(parent.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); + assert!(parent.value == 10); + assert!(obj2.value == 20); + scenario.return_to_sender(parent); + transfer::public_transfer(obj2, id1_addr); + }; + scenario.end(); + } + + // Make sure that we properly handle the case where we receive an object + // and don't need to deallocate the receiving ticket and underlying object + // at the end of the transaction. + #[test] + fun test_receive_object_multiple_in_row() { + let sender = @0x0; + let mut scenario = test_scenario::begin(sender); + let uid1 = scenario.new_object(); + let uid2 = scenario.new_object(); + let id1 = uid1.uid_to_inner(); + let id1_addr = uid1.to_address(); + { + let obj1 = Object { id: uid1, value: 10 }; + let obj2 = Object { id: uid2, value: 20 }; + transfer::public_transfer(obj1, sender); + transfer::public_transfer(obj2, id1_addr); + }; + scenario.next_tx(sender); + { + let mut parent: Object = scenario.take_from_sender_by_id(id1); + let t2: transfer::Receiving = test_scenario::most_recent_receiving_ticket(&id1); + let obj2 = transfer::receive(&mut parent.id, t2); + assert!(parent.value == 10); + assert!(obj2.value == 20); + scenario.return_to_sender(parent); + transfer::public_transfer(obj2, id1_addr); + }; + scenario.next_tx(sender); + { + let mut parent: Object = scenario.take_from_sender_by_id(id1); + let t2: transfer::Receiving = test_scenario::most_recent_receiving_ticket(&id1); + let obj2 = transfer::receive(&mut parent.id, t2); + assert!(parent.value == 10); + assert!(obj2.value == 20); + scenario.return_to_sender(parent); + transfer::public_transfer(obj2, id1_addr); + }; + scenario.end(); + } + + // Make sure that we properly handle the case where we don't receive an + // object after allocating a ticket, and then receiving it in the next + // transaction. + #[test] + fun test_no_receive_object_then_use_next_tx() { + let sender = @0x0; + let mut scenario = test_scenario::begin(sender); + let uid1 = scenario.new_object(); + let uid2 = scenario.new_object(); + let id1 = uid1.uid_to_inner(); + let id1_addr = uid1.to_address(); + { + let obj1 = Object { id: uid1, value: 10 }; + let obj2 = Object { id: uid2, value: 20 }; + transfer::public_transfer(obj1, sender); + transfer::public_transfer(obj2, id1_addr); + }; + scenario.next_tx(sender); + { + // allocate a receiving ticket in this transaction, but don't use it or return it. + test_scenario::most_recent_receiving_ticket(&id1); + }; + scenario.next_tx(sender); + { + let mut parent: Object = scenario.take_from_sender_by_id(id1); + // Get the receiving ticket that was allocated in the previous + // transaction, again. If we failed to return unused receiving + // tickets at the end of the transaction above this will fail. + let t2: transfer::Receiving = test_scenario::most_recent_receiving_ticket(&id1); + let obj2 = transfer::receive(&mut parent.id, t2); + assert!(parent.value == 10); + assert!(obj2.value == 20); scenario.return_to_sender(parent); transfer::public_transfer(obj2, id1_addr); }; @@ -566,9 +641,9 @@ module sui::test_scenario_tests { let t3 = test_scenario::receiving_ticket_by_id(id3); let obj2 = transfer::receive(&mut parent.id, t2); let obj3 = transfer::receive(&mut parent.id, t3); - assert!(parent.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(parent.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); scenario.return_to_sender(parent); transfer::public_transfer(obj2, id1_addr); transfer::public_transfer(obj3, id1_addr) @@ -609,9 +684,9 @@ module sui::test_scenario_tests { let t3 = test_scenario::receiving_ticket_by_id(id3); let mut obj2 = transfer::receive(&mut parent.id, t2); let obj3 = transfer::receive(&mut parent.id, t3); - assert!(parent.value == 10, EValueMismatch); - assert!(obj2.value == 20, EValueMismatch); - assert!(obj3.value == 30, EValueMismatch); + assert!(parent.value == 10); + assert!(obj2.value == 20); + assert!(obj3.value == 30); obj2.value = 42; scenario.return_to_sender(parent); transfer::public_transfer(obj2, sender); @@ -620,7 +695,7 @@ module sui::test_scenario_tests { scenario.next_tx(sender); { let obj = scenario.take_from_sender_by_id(id2); - assert!(obj.value == 42, EValueMismatch); + assert!(obj.value == 42); scenario.return_to_sender(obj); }; scenario.end(); diff --git a/crates/sui-framework/packages/sui-framework/tests/token/token_test_utils.move b/crates/sui-framework/packages/sui-framework/tests/token/token_test_utils.move index 014fc677f990b..5f5b3888326c6 100644 --- a/crates/sui-framework/packages/sui-framework/tests/token/token_test_utils.move +++ b/crates/sui-framework/packages/sui-framework/tests/token/token_test_utils.move @@ -21,7 +21,6 @@ module sui::token_test_utils { coin::create_treasury_cap_for_testing(ctx) } - #[allow(lint(share_owned))] /// Return `TreasuryCap` (shares it for now). public fun return_treasury_cap(treasury_cap: TreasuryCap) { sui::transfer::public_share_object(treasury_cap) diff --git a/crates/sui-framework/packages_compiled/move-stdlib b/crates/sui-framework/packages_compiled/move-stdlib index e36721142a616..8526133aab6a4 100644 Binary files a/crates/sui-framework/packages_compiled/move-stdlib and b/crates/sui-framework/packages_compiled/move-stdlib differ diff --git a/crates/sui-framework/packages_compiled/sui-framework b/crates/sui-framework/packages_compiled/sui-framework index 68b97d274181b..343cb0eaea7ce 100644 Binary files a/crates/sui-framework/packages_compiled/sui-framework and b/crates/sui-framework/packages_compiled/sui-framework differ diff --git a/crates/sui-framework/published_api.txt b/crates/sui-framework/published_api.txt index 141916fdb4a24..7075e0355ff26 100644 --- a/crates/sui-framework/published_api.txt +++ b/crates/sui-framework/published_api.txt @@ -985,48 +985,6 @@ ecvrf_verify ed25519_verify public fun 0x2::ed25519 -Curve - public struct - 0x2::groth16 -PreparedVerifyingKey - public struct - 0x2::groth16 -PublicProofInputs - public struct - 0x2::groth16 -ProofPoints - public struct - 0x2::groth16 -bls12381 - public fun - 0x2::groth16 -bn254 - public fun - 0x2::groth16 -pvk_from_bytes - public fun - 0x2::groth16 -pvk_to_bytes - public fun - 0x2::groth16 -public_proof_inputs_from_bytes - public fun - 0x2::groth16 -proof_points_from_bytes - public fun - 0x2::groth16 -prepare_verifying_key - public fun - 0x2::groth16 -prepare_verifying_key_internal - fun - 0x2::groth16 -verify_groth16_proof - public fun - 0x2::groth16 -verify_groth16_proof_internal - fun - 0x2::groth16 blake2b256 public fun 0x2::hash @@ -2332,6 +2290,48 @@ create_internal add_internal fun 0x2::display +Curve + public struct + 0x2::groth16 +PreparedVerifyingKey + public struct + 0x2::groth16 +PublicProofInputs + public struct + 0x2::groth16 +ProofPoints + public struct + 0x2::groth16 +bls12381 + public fun + 0x2::groth16 +bn254 + public fun + 0x2::groth16 +pvk_from_bytes + public fun + 0x2::groth16 +pvk_to_bytes + public fun + 0x2::groth16 +public_proof_inputs_from_bytes + public fun + 0x2::groth16 +proof_points_from_bytes + public fun + 0x2::groth16 +prepare_verifying_key + public fun + 0x2::groth16 +prepare_verifying_key_internal + fun + 0x2::groth16 +verify_groth16_proof + public fun + 0x2::groth16 +verify_groth16_proof_internal + fun + 0x2::groth16 SUI public struct 0x2::sui @@ -4264,6 +4264,9 @@ insert swap_remove public fun 0x1::vector +flatten + public fun + 0x1::vector Option public struct 0x1::option diff --git a/crates/sui-graphql-e2e-tests/Cargo.toml b/crates/sui-graphql-e2e-tests/Cargo.toml index 554f81d570e21..b4cee0ecebe8e 100644 --- a/crates/sui-graphql-e2e-tests/Cargo.toml +++ b/crates/sui-graphql-e2e-tests/Cargo.toml @@ -23,10 +23,5 @@ harness = false [dependencies] -[features] -default = ["pg_backend"] -pg_integration = [] -pg_backend = [] - [target.'cfg(msim)'.dependencies] msim.workspace = true diff --git a/crates/sui-graphql-e2e-tests/README.md b/crates/sui-graphql-e2e-tests/README.md index 5da2d1cb98496..53b1d69885286 100644 --- a/crates/sui-graphql-e2e-tests/README.md +++ b/crates/sui-graphql-e2e-tests/README.md @@ -44,11 +44,8 @@ Linux # Running Locally -When running the tests locally, they must be run with the `pg_integration` -feature enabled: - ```sh -$ cargo nextest run --features pg_integration +$ cargo nextest run ``` # Snapshot Stability diff --git a/crates/sui-graphql-e2e-tests/tests/tests.rs b/crates/sui-graphql-e2e-tests/tests/tests.rs index 88a8ab65e12b2..fa7e0af2d67f6 100644 --- a/crates/sui-graphql-e2e-tests/tests/tests.rs +++ b/crates/sui-graphql-e2e-tests/tests/tests.rs @@ -16,7 +16,7 @@ datatest_stable::harness!(run_test, TEST_DIR, r".*\.(mvir|move)$"); #[cfg_attr(msim, msim::main)] async fn run_test(path: &Path) -> Result<(), Box> { telemetry_subscribers::init_for_testing(); - if cfg!(feature = "pg_integration") { + if !cfg!(msim) { run_test_impl::(path, Some(Arc::new(PRE_COMPILED.clone()))).await?; } Ok(()) diff --git a/crates/sui-graphql-rpc/Cargo.toml b/crates/sui-graphql-rpc/Cargo.toml index 35d832c36da77..83af3bcfc2c8f 100644 --- a/crates/sui-graphql-rpc/Cargo.toml +++ b/crates/sui-graphql-rpc/Cargo.toml @@ -91,7 +91,3 @@ sui-framework.workspace = true tower.workspace = true sui-test-transaction-builder.workspace = true sui-move-build.workspace = true - -[features] -default = [] -pg_integration = [] diff --git a/crates/sui-graphql-rpc/schema.graphql b/crates/sui-graphql-rpc/schema.graphql index b848cd8529973..f6d8946266fdc 100644 --- a/crates/sui-graphql-rpc/schema.graphql +++ b/crates/sui-graphql-rpc/schema.graphql @@ -1264,7 +1264,13 @@ type EventEdge { } input EventFilter { + """ + Filter down to events from transactions sent by this address. + """ sender: SuiAddress + """ + Filter down to the events from this transaction (given by its transaction digest). + """ transactionDigest: String """ Events emitted by a particular module. An event is emitted by a @@ -2836,11 +2842,8 @@ objects are ones whose """ input ObjectFilter { """ - This field is used to specify the type of objects that should be included in the query - results. - - Objects can be filtered by their type's package, package::module, or their fully qualified - type name. + Filter objects by their type's `package`, `package::module`, or their fully qualified type + name. Generic types can be queried by either the generic type name, e.g. `0x2::coin::Coin`, or by the full type name, such as `0x2::coin::Coin<0x2::sui::SUI>`. @@ -4288,18 +4291,46 @@ type TransactionBlockEffects { } input TransactionBlockFilter { + """ + Filter transactions by move function called. Calls can be filtered by the `package`, + `package::module`, or the `package::module::name` of their function. + """ function: String """ An input filter selecting for either system or programmable transactions. """ kind: TransactionBlockKindInput + """ + Limit to transactions that occured strictly after the given checkpoint. + """ afterCheckpoint: UInt53 + """ + Limit to transactions in the given checkpoint. + """ atCheckpoint: UInt53 + """ + Limit to transaction that occured strictly before the given checkpoint. + """ beforeCheckpoint: UInt53 + """ + Limit to transactions that were signed by the given address. + """ signAddress: SuiAddress + """ + Limit to transactions that sent an object to the given address. + """ recvAddress: SuiAddress + """ + Limit to transactions that accepted the given object as an input. + """ inputObject: SuiAddress + """ + Limit to transactions that output a versioon of this object. + """ changedObject: SuiAddress + """ + Select transactions by their digest. + """ transactionIds: [String!] } diff --git a/crates/sui-graphql-rpc/src/data/pg.rs b/crates/sui-graphql-rpc/src/data/pg.rs index a37d62f9cdbdd..f27462ec279fe 100644 --- a/crates/sui-graphql-rpc/src/data/pg.rs +++ b/crates/sui-graphql-rpc/src/data/pg.rs @@ -240,7 +240,7 @@ mod query_cost { } } -#[cfg(all(test, feature = "pg_integration"))] +#[cfg(test)] mod tests { use super::*; use diesel::QueryDsl; diff --git a/crates/sui-graphql-rpc/src/server/builder.rs b/crates/sui-graphql-rpc/src/server/builder.rs index c7533ec936cc0..5123125763873 100644 --- a/crates/sui-graphql-rpc/src/server/builder.rs +++ b/crates/sui-graphql-rpc/src/server/builder.rs @@ -228,7 +228,7 @@ impl ServerBuilder { self } - #[cfg(all(test, feature = "pg_integration"))] + #[cfg(test)] fn build_schema(self) -> Schema { self.schema.finish() } @@ -678,7 +678,7 @@ async fn get_or_init_server_start_time() -> &'static Instant { ONCE.get_or_init(|| async move { Instant::now() }).await } -#[cfg(all(test, feature = "pg_integration"))] +#[cfg(test)] pub mod tests { use super::*; use crate::test_infra::cluster::{prep_executor_cluster, start_cluster}; diff --git a/crates/sui-graphql-rpc/src/test_infra/cluster.rs b/crates/sui-graphql-rpc/src/test_infra/cluster.rs index 4f514ff85a8be..554981f19773d 100644 --- a/crates/sui-graphql-rpc/src/test_infra/cluster.rs +++ b/crates/sui-graphql-rpc/src/test_infra/cluster.rs @@ -33,8 +33,8 @@ use tokio::task::JoinHandle; use tokio_util::sync::CancellationToken; use tracing::info; -const VALIDATOR_COUNT: usize = 7; -const EPOCH_DURATION_MS: u64 = 15000; +const VALIDATOR_COUNT: usize = 4; +const EPOCH_DURATION_MS: u64 = 10000; const ACCOUNT_NUM: usize = 20; const GAS_OBJECT_COUNT: usize = 3; diff --git a/crates/sui-graphql-rpc/src/types/dynamic_field.rs b/crates/sui-graphql-rpc/src/types/dynamic_field.rs index 71999cb710693..ce8aa8cdef536 100644 --- a/crates/sui-graphql-rpc/src/types/dynamic_field.rs +++ b/crates/sui-graphql-rpc/src/types/dynamic_field.rs @@ -5,9 +5,12 @@ use async_graphql::connection::{Connection, CursorType, Edge}; use async_graphql::*; use diesel_async::scoped_futures::ScopedFutureExt; use move_core_types::annotated_value::{self as A, MoveStruct}; +use move_core_types::language_storage::TypeTag; use sui_indexer::models::objects::StoredHistoryObject; use sui_indexer::types::OwnerType; -use sui_types::dynamic_field::{derive_dynamic_field_id, DynamicFieldInfo, DynamicFieldType}; +use sui_types::dynamic_field::{ + derive_dynamic_field_id, extract_id_value, DynamicFieldInfo, DynamicFieldType, +}; use super::available_range::AvailableRange; use super::cursor::{Page, Target}; @@ -25,7 +28,6 @@ use crate::raw_query::RawQuery; pub(crate) struct DynamicField { pub super_: MoveObject, - pub df_object_id: SuiAddress, pub df_kind: DynamicFieldType, } @@ -100,37 +102,44 @@ impl DynamicField { /// will be from the latest version that is at most equal to its parent object's /// version async fn value(&self, ctx: &Context<'_>) -> Result> { + let resolver: &PackageResolver = ctx.data_unchecked(); + + // TODO(annotated-visitor): Use custom visitors to extract just the value. + let (struct_tag, move_struct) = deserialize_move_struct(&self.super_.native, resolver) + .await + .extend()?; + + let value_move_value = extract_field_from_move_struct(move_struct, "value").extend()?; + if self.df_kind == DynamicFieldType::DynamicObject { // If `df_kind` is a DynamicObject, the object we are currently on is the field object, - // and we must resolve one more level down to the value object. Becuase we only have - // checkpoint-level granularity, we may end up reading a later version of the value - // object. Thus, we use the version of the field object to bound the value object at the - // correct version. + // and we must resolve one more level down to the value object. + let df_object_id = extract_id_value(&value_move_value) + .ok_or_else(|| { + Error::Internal(format!( + "Couldn't find ID of dynamic object field value: {value_move_value:#?}" + )) + }) + .extend()?; + + // Because we only have checkpoint-level granularity, we may end up reading a later + // version of the value object. Thus, we use the version of the field object to bound + // the value object at the correct version. let obj = MoveObject::query( ctx, - self.df_object_id, + df_object_id.into(), Object::under_parent(self.root_version(), self.super_.super_.checkpoint_viewed_at), ) .await .extend()?; + Ok(obj.map(DynamicFieldValue::MoveObject)) } else { - let resolver: &PackageResolver = ctx - .data() - .map_err(|_| Error::Internal("Unable to fetch Package Cache.".to_string())) - .extend()?; - - let (struct_tag, move_struct) = deserialize_move_struct(&self.super_.native, resolver) - .await - .extend()?; - // Get TypeTag of the DynamicField value from StructTag of the MoveStruct let type_tag = DynamicFieldInfo::try_extract_field_value(&struct_tag) .map_err(|e| Error::Internal(e.to_string())) .extend()?; - let value_move_value = extract_field_from_move_struct(move_struct, "value").extend()?; - let undecorated = value_move_value.undecorate(); let bcs = bcs::to_bytes(&undecorated) .map_err(|e| Error::Internal(format!("Failed to serialize object: {e}"))) @@ -264,38 +273,43 @@ impl TryFrom for DynamicField { fn try_from(stored: MoveObject) -> Result { let super_ = &stored.super_; - let (df_object_id, df_kind) = match &super_.kind { - ObjectKind::Indexed(_, stored) => stored - .df_object_id - .as_ref() - .map(|id| (id, stored.df_kind)) - .ok_or_else(|| Error::Internal("Object is not a dynamic field.".to_string()))?, - _ => { + let native = match &super_.kind { + ObjectKind::NotIndexed(native) | ObjectKind::Indexed(native, _) => native, + + ObjectKind::WrappedOrDeleted(_) => { return Err(Error::Internal( - "A WrappedOrDeleted object cannot be converted into a DynamicField." - .to_string(), - )) + "DynamicField is wrapped or deleted.".to_string(), + )); } }; - let df_object_id = SuiAddress::from_bytes(df_object_id).map_err(|e| { - Error::Internal(format!("Failed to deserialize dynamic field ID: {e}.")) - })?; - - let df_kind = match df_kind { - Some(0) => DynamicFieldType::DynamicField, - Some(1) => DynamicFieldType::DynamicObject, - Some(k) => { - return Err(Error::Internal(format!( - "Unrecognized dynamic field kind: {k}." - ))) - } - None => return Err(Error::Internal("No dynamic field kind.".to_string())), + let Some(object) = native.data.try_as_move() else { + return Err(Error::Internal("DynamicField is not an object".to_string())); + }; + + let Some(tag) = object.type_().other() else { + return Err(Error::Internal("DynamicField is not a struct".to_string())); + }; + + if !DynamicFieldInfo::is_dynamic_field(tag) { + return Err(Error::Internal("Wrong type for DynamicField".to_string())); + } + + let Some(name) = tag.type_params.first() else { + return Err(Error::Internal("No type for DynamicField name".to_string())); + }; + + let df_kind = if matches!( + name, + TypeTag::Struct(s) if DynamicFieldInfo::is_dynamic_object_field_wrapper(s) + ) { + DynamicFieldType::DynamicObject + } else { + DynamicFieldType::DynamicField }; Ok(DynamicField { super_: stored, - df_object_id, df_kind, }) } diff --git a/crates/sui-graphql-rpc/src/types/event/filter.rs b/crates/sui-graphql-rpc/src/types/event/filter.rs index 0a02a31be8969..10144d5fa8a2f 100644 --- a/crates/sui-graphql-rpc/src/types/event/filter.rs +++ b/crates/sui-graphql-rpc/src/types/event/filter.rs @@ -10,10 +10,15 @@ use async_graphql::*; #[derive(InputObject, Clone, Default)] pub(crate) struct EventFilter { + /// Filter down to events from transactions sent by this address. pub sender: Option, + + /// Filter down to the events from this transaction (given by its transaction digest). pub transaction_digest: Option, + // Enhancement (post-MVP) // after_checkpoint + // at_checkpoint // before_checkpoint /// Events emitted by a particular module. An event is emitted by a /// particular module if some function in the module is called by a @@ -36,9 +41,4 @@ pub(crate) struct EventFilter { // Enhancement (post-MVP) // pub start_time // pub end_time - - // Enhancement (post-MVP) - // pub any - // pub all - // pub not } diff --git a/crates/sui-graphql-rpc/src/types/move_type.rs b/crates/sui-graphql-rpc/src/types/move_type.rs index 67a7ee6a73c41..4697739ce9fd6 100644 --- a/crates/sui-graphql-rpc/src/types/move_type.rs +++ b/crates/sui-graphql-rpc/src/types/move_type.rs @@ -259,8 +259,8 @@ impl TryFrom for MoveTypeLayout { TL::Address => Self::Address, TL::Vector(v) => Self::Vector(Box::new(Self::try_from(*v)?)), - TL::Struct(s) => Self::Struct(s.try_into()?), - TL::Enum(e) => Self::Enum(e.try_into()?), + TL::Struct(s) => Self::Struct((*s).try_into()?), + TL::Enum(e) => Self::Enum((*e).try_into()?), }) } } diff --git a/crates/sui-graphql-rpc/src/types/move_value.rs b/crates/sui-graphql-rpc/src/types/move_value.rs index cc498144ebbe1..f786c74f59c57 100644 --- a/crates/sui-graphql-rpc/src/types/move_value.rs +++ b/crates/sui-graphql-rpc/src/types/move_value.rs @@ -474,13 +474,13 @@ mod tests { macro_rules! struct_layout { ($type:literal { $($name:literal : $layout:expr),* $(,)?}) => { - A::MoveTypeLayout::Struct(S { + A::MoveTypeLayout::Struct(Box::new(S { type_: StructTag::from_str($type).expect("Failed to parse struct"), - fields: vec![$(MoveFieldLayout { + fields: Box::new(vec![$(MoveFieldLayout { name: ident_str!($name).to_owned(), layout: $layout, - }),*] - }) + }),*]) + })) } } diff --git a/crates/sui-graphql-rpc/src/types/object.rs b/crates/sui-graphql-rpc/src/types/object.rs index 96c7c298b7530..2991bc9e83e34 100644 --- a/crates/sui-graphql-rpc/src/types/object.rs +++ b/crates/sui-graphql-rpc/src/types/object.rs @@ -41,7 +41,7 @@ use diesel_async::scoped_futures::ScopedFutureExt; use move_core_types::annotated_value::{MoveStruct, MoveTypeLayout}; use move_core_types::language_storage::StructTag; use serde::{Deserialize, Serialize}; -use sui_indexer::models::objects::{StoredDeletedHistoryObject, StoredHistoryObject}; +use sui_indexer::models::objects::StoredHistoryObject; use sui_indexer::schema::{objects_history, objects_version}; use sui_indexer::types::ObjectStatus as NativeObjectStatus; use sui_indexer::types::OwnerType; @@ -83,8 +83,8 @@ pub(crate) enum ObjectKind { /// An object fetched from the index. Indexed(NativeObject, StoredHistoryObject), /// The object is wrapped or deleted and only partial information can be loaded from the - /// indexer. - WrappedOrDeleted(StoredDeletedHistoryObject), + /// indexer. The `u64` is the version of the object. + WrappedOrDeleted(u64), } #[derive(Enum, Copy, Clone, Eq, PartialEq, Debug)] @@ -118,11 +118,8 @@ pub(crate) struct ObjectRef { /// - AND, whose ID is in `objectIds` OR whose ID and version is in `objectKeys`. #[derive(InputObject, Default, Debug, Clone, Eq, PartialEq)] pub(crate) struct ObjectFilter { - /// This field is used to specify the type of objects that should be included in the query - /// results. - /// - /// Objects can be filtered by their type's package, package::module, or their fully qualified - /// type name. + /// Filter objects by their type's `package`, `package::module`, or their fully qualified type + /// name. /// /// Generic types can be queried by either the generic type name, e.g. `0x2::coin::Coin`, or by /// the full type name, such as `0x2::coin::Coin<0x2::sui::SUI>`. @@ -754,7 +751,7 @@ impl Object { match &self.kind { K::NotIndexed(native) | K::Indexed(native, _) => native.version().value(), - K::WrappedOrDeleted(stored) => stored.object_version as u64, + K::WrappedOrDeleted(object_version) => *object_version, } } @@ -981,12 +978,7 @@ impl Object { } NativeObjectStatus::WrappedOrDeleted => Ok(Self { address, - kind: ObjectKind::WrappedOrDeleted(StoredDeletedHistoryObject { - object_id: history_object.object_id, - object_version: history_object.object_version, - object_status: history_object.object_status, - checkpoint_sequence_number: history_object.checkpoint_sequence_number, - }), + kind: ObjectKind::WrappedOrDeleted(history_object.object_version as u64), checkpoint_viewed_at, root_version: history_object.object_version as u64, }), diff --git a/crates/sui-graphql-rpc/src/types/transaction_block/filter.rs b/crates/sui-graphql-rpc/src/types/transaction_block/filter.rs index 29c104ac9484c..2e40d2e63ae48 100644 --- a/crates/sui-graphql-rpc/src/types/transaction_block/filter.rs +++ b/crates/sui-graphql-rpc/src/types/transaction_block/filter.rs @@ -10,20 +10,35 @@ use sui_types::base_types::SuiAddress as NativeSuiAddress; #[derive(InputObject, Debug, Default, Clone)] pub(crate) struct TransactionBlockFilter { + /// Filter transactions by move function called. Calls can be filtered by the `package`, + /// `package::module`, or the `package::module::name` of their function. pub function: Option, /// An input filter selecting for either system or programmable transactions. pub kind: Option, + + /// Limit to transactions that occured strictly after the given checkpoint. pub after_checkpoint: Option, + + /// Limit to transactions in the given checkpoint. pub at_checkpoint: Option, + + /// Limit to transaction that occured strictly before the given checkpoint. pub before_checkpoint: Option, + /// Limit to transactions that were signed by the given address. pub sign_address: Option, + + /// Limit to transactions that sent an object to the given address. pub recv_address: Option, + /// Limit to transactions that accepted the given object as an input. pub input_object: Option, + + /// Limit to transactions that output a versioon of this object. pub changed_object: Option, + /// Select transactions by their digest. pub transaction_ids: Option>, } diff --git a/crates/sui-graphql-rpc/src/types/transaction_block/tx_lookups.rs b/crates/sui-graphql-rpc/src/types/transaction_block/tx_lookups.rs index 940df4309a2ec..327c23c7407f9 100644 --- a/crates/sui-graphql-rpc/src/types/transaction_block/tx_lookups.rs +++ b/crates/sui-graphql-rpc/src/types/transaction_block/tx_lookups.rs @@ -1,6 +1,55 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +//! # Transaction Filter Lookup Tables +//! +//! ## Schemas +//! +//! Tables backing Transaction filters in GraphQL all follow the same rough shape: +//! +//! 1. They each get their own table, mapping the filter value to the transaction sequence number. +//! +//! 2. They also include a `sender` column, and a secondary index over the sender, filter values +//! and the transaction sequence number. +//! +//! 3. They also include a secondary index over the transaction sequence number. +//! +//! This pattern allows us to offer a simple rule for users: If you are filtering on a single +//! value, you can do so without worrying. If you want to additionally filter by the sender, that +//! is also possible, but if you want to combine any other set of filters, you need to use a "scan +//! limit". +//! +//! ## Query construction +//! +//! Queries that filter transactions work in two phases: Identify the transaction sequence numbers +//! to fetch, and then fetch their contents. Filtering all happens in the first phase: +//! +//! - Firstly filters are broken down into individual queries targeting the appropriate lookup +//! table. Each constituent query is expected to return a sorted run of transaction sequence +//! numbers. +//! +//! - If a `sender` filter is included, then it is incorporated into each constituent query, +//! leveraging their secondary indices (2), otherwise each constituent query filters only based on +//! its filter value using the primary index (1). +//! +//! - The fact that both the primary and secondary indices contain the transaction sequence number +//! help to ensure that the output from an index scan is already sorted, which avoids a +//! potentially expensive materialize and sort operation. +//! +//! - If there are multiple constituent queries, they are intersected using inner joins. Postgres +//! can occasionally pick a poor query plan for this merge, so we require that filters resulting in +//! such merges also use a "scan limit" (see below). +//! +//! ## Scan limits +//! +//! The scan limit restricts the number of transactions considered as candidates for the results. +//! It is analogous to the page size limit, which restricts the number of results returned to the +//! user, but it operates at the top of the funnel rather than the top. +//! +//! When postgres picks a poor query plan, it can end up performing a sequential scan over all +//! candidate transactions. By limiting the size of the candidate set, we bound the work done in +//! the worse case (whereas otherwise, the worst case would grow with the history of the chain). + use super::{Cursor, TransactionBlockFilter}; use crate::{ data::{pg::bytea_literal, Conn, DbConnection}, diff --git a/crates/sui-graphql-rpc/tests/dot_move_e2e.rs b/crates/sui-graphql-rpc/tests/dot_move_e2e.rs index 514223ea2abbc..d61f2bb981c78 100644 --- a/crates/sui-graphql-rpc/tests/dot_move_e2e.rs +++ b/crates/sui-graphql-rpc/tests/dot_move_e2e.rs @@ -1,511 +1,508 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -#[cfg(feature = "pg_integration")] -mod tests { - use std::{path::PathBuf, time::Duration}; - - use sui_graphql_rpc::{ - config::{ConnectionConfig, ServiceConfig}, - test_infra::cluster::{ - start_graphql_server_with_fn_rpc, start_network_cluster, - wait_for_graphql_checkpoint_catchup, wait_for_graphql_server, NetworkCluster, +use std::{path::PathBuf, time::Duration}; + +use sui_graphql_rpc::{ + config::{ConnectionConfig, ServiceConfig}, + test_infra::cluster::{ + start_graphql_server_with_fn_rpc, start_network_cluster, + wait_for_graphql_checkpoint_catchup, wait_for_graphql_server, NetworkCluster, + }, +}; +use sui_graphql_rpc_client::simple_client::SimpleClient; +use sui_indexer::tempdb::get_available_port; +use sui_json_rpc_types::ObjectChange; +use sui_move_build::BuildConfig; +use sui_types::{ + base_types::{ObjectID, SequenceNumber}, + digests::ObjectDigest, + move_package::UpgradePolicy, + object::Owner, + programmable_transaction_builder::ProgrammableTransactionBuilder, + transaction::{CallArg, ObjectArg}, + Identifier, SUI_FRAMEWORK_PACKAGE_ID, +}; +const DOT_MOVE_PKG: &str = "tests/dot_move/dot_move/"; +const DEMO_PKG: &str = "tests/dot_move/demo/"; +const DEMO_PKG_V2: &str = "tests/dot_move/demo_v2/"; +const DEMO_PKG_V3: &str = "tests/dot_move/demo_v3/"; + +const DEMO_TYPE: &str = "::demo::V1Type"; +const DEMO_TYPE_V2: &str = "::demo::V2Type"; +const DEMO_TYPE_V3: &str = "::demo::V3Type"; + +#[derive(Clone, Debug)] +struct UpgradeCap(ObjectID, SequenceNumber, ObjectDigest); + +#[tokio::test] +async fn test_dot_move_e2e() { + let network_cluster = start_network_cluster().await; + + let external_network_chain_id = network_cluster + .validator_fullnode_handle + .fullnode_handle + .sui_client + .read_api() + .get_chain_identifier() + .await + .unwrap(); + + eprintln!("External chain id: {:?}", external_network_chain_id); + + // publish the dot move package in the internal resolution cluster. + let (pkg_id, registry_id) = publish_dot_move_package(&network_cluster).await; + + let (v1, v2, v3) = publish_demo_pkg(&network_cluster).await; + + let name = "app@org".to_string(); + + // Register the package: First, for the "base" chain state. + register_pkg( + &network_cluster, + pkg_id, + registry_id, + v1, + name.clone(), + None, + ) + .await; + + // Register the package for the external resolver. + register_pkg( + &network_cluster, + pkg_id, + registry_id, + v1, + name.clone(), + Some(external_network_chain_id.clone()), + ) + .await; + + // Initialize the internal and external clients of GraphQL. + + // The first cluster uses internal resolution (mimics our base network, does not rely on external chain). + let internal_client = init_dot_move_gql( + network_cluster.graphql_connection_config.clone(), + ServiceConfig::dot_move_test_defaults( + false, + None, + Some(pkg_id.into()), + Some(registry_id.0), + None, + ), + ) + .await; + + let external_client = init_dot_move_gql( + ConnectionConfig { + port: get_available_port(), + prom_port: get_available_port(), + ..network_cluster.graphql_connection_config.clone() }, - }; - use sui_graphql_rpc_client::simple_client::SimpleClient; - use sui_indexer::tempdb::get_available_port; - use sui_json_rpc_types::ObjectChange; - use sui_move_build::BuildConfig; - use sui_types::{ - base_types::{ObjectID, SequenceNumber}, - digests::ObjectDigest, - move_package::UpgradePolicy, - object::Owner, - programmable_transaction_builder::ProgrammableTransactionBuilder, - transaction::{CallArg, ObjectArg}, - Identifier, SUI_FRAMEWORK_PACKAGE_ID, - }; - const DOT_MOVE_PKG: &str = "tests/dot_move/dot_move/"; - const DEMO_PKG: &str = "tests/dot_move/demo/"; - const DEMO_PKG_V2: &str = "tests/dot_move/demo_v2/"; - const DEMO_PKG_V3: &str = "tests/dot_move/demo_v3/"; - - const DEMO_TYPE: &str = "::demo::V1Type"; - const DEMO_TYPE_V2: &str = "::demo::V2Type"; - const DEMO_TYPE_V3: &str = "::demo::V3Type"; - - #[derive(Clone, Debug)] - struct UpgradeCap(ObjectID, SequenceNumber, ObjectDigest); - - #[tokio::test] - async fn test_dot_move_e2e() { - let network_cluster = start_network_cluster().await; - - let external_network_chain_id = network_cluster - .validator_fullnode_handle - .fullnode_handle - .sui_client - .read_api() - .get_chain_identifier() - .await - .unwrap(); - - eprintln!("External chain id: {:?}", external_network_chain_id); - - // publish the dot move package in the internal resolution cluster. - let (pkg_id, registry_id) = publish_dot_move_package(&network_cluster).await; - - let (v1, v2, v3) = publish_demo_pkg(&network_cluster).await; - - let name = "app@org".to_string(); - - // Register the package: First, for the "base" chain state. - register_pkg( - &network_cluster, - pkg_id, - registry_id, - v1, - name.clone(), + ServiceConfig::dot_move_test_defaults( + true, // external resolution + Some(internal_client.url()), + Some(pkg_id.into()), + Some(registry_id.0), None, - ) - .await; - - // Register the package for the external resolver. - register_pkg( - &network_cluster, - pkg_id, - registry_id, - v1, - name.clone(), - Some(external_network_chain_id.clone()), - ) - .await; + ), + ) + .await; + + // Await for the internal cluster to catch up with the latest checkpoint. + // That way we're certain that the data is available for querying (committed & indexed). + let latest_checkpoint = network_cluster + .validator_fullnode_handle + .fullnode_handle + .sui_node + .inner() + .state() + .get_latest_checkpoint_sequence_number() + .expect("To have a checkpoint"); + + eprintln!("Latest checkpoint: {:?}", latest_checkpoint); + + wait_for_graphql_checkpoint_catchup( + &internal_client, + latest_checkpoint, + Duration::from_millis(500), + ) + .await; + + // We craft a big query, which we'll use to test both the internal and the external resolution. + // Same query is used across both nodes, since we're testing on top of the same data, just with a different + // lookup approach. + let query = format!( + r#"{{ valid_latest: {}, v1: {}, v2: {}, v3: {}, v4: {}, v1_type: {}, v2_type: {}, v3_type: {} }}"#, + name_query(&name), + name_query(&format!("{}{}", &name, "/v1")), + name_query(&format!("{}{}", &name, "/v2")), + name_query(&format!("{}{}", &name, "/v3")), + name_query(&format!("{}{}", &name, "/v4")), + type_query(&format!("{}{}", &name, DEMO_TYPE)), + type_query(&format!("{}{}", &name, DEMO_TYPE_V2)), + type_query(&format!("{}{}", &name, DEMO_TYPE_V3)), + ); + + let internal_resolution = internal_client + .execute(query.clone(), vec![]) + .await + .unwrap(); + + let external_resolution = external_client + .execute(query.clone(), vec![]) + .await + .unwrap(); + + test_results(internal_resolution, &v1, &v2, &v3, "internal resolution"); + test_results(external_resolution, &v1, &v2, &v3, "external resolution"); + + eprintln!("Tests have finished successfully now!"); +} - // Initialize the internal and external clients of GraphQL. - - // The first cluster uses internal resolution (mimics our base network, does not rely on external chain). - let internal_client = init_dot_move_gql( - network_cluster.graphql_connection_config.clone(), - ServiceConfig::dot_move_test_defaults( - false, - None, - Some(pkg_id.into()), - Some(registry_id.0), - None, - ), - ) - .await; +fn test_results( + query_result: serde_json::Value, + v1: &ObjectID, + v2: &ObjectID, + v3: &ObjectID, + // an indicator to help identify the test case that failed using this. + indicator: &str, +) { + eprintln!("Testing results for: {}", indicator); + assert_eq!( + query_result["data"]["valid_latest"]["address"] + .as_str() + .unwrap(), + v3.to_string(), + "The latest version should have been v3", + ); + + assert_eq!( + query_result["data"]["v1"]["address"].as_str().unwrap(), + v1.to_string(), + "V1 response did not correspond to the expected value", + ); + + assert_eq!( + query_result["data"]["v2"]["address"].as_str().unwrap(), + v2.to_string(), + "V2 response did not correspond to the expected value", + ); + + assert_eq!( + query_result["data"]["v3"]["address"].as_str().unwrap(), + v3.to_string(), + "V3 response did not correspond to the expected value", + ); + + assert!( + query_result["data"]["v4"].is_null(), + "V4 should not have been found" + ); + + assert_eq!( + query_result["data"]["v1_type"]["layout"]["struct"]["type"] + .as_str() + .unwrap(), + format!("{}{}", v1, DEMO_TYPE) + ); + + assert_eq!( + query_result["data"]["v2_type"]["layout"]["struct"]["type"] + .as_str() + .unwrap(), + format!("{}{}", v2, DEMO_TYPE_V2) + ); + + assert_eq!( + query_result["data"]["v3_type"]["layout"]["struct"]["type"] + .as_str() + .unwrap(), + format!("{}{}", v3, DEMO_TYPE_V3) + ); +} - let external_client = init_dot_move_gql( - ConnectionConfig { - port: get_available_port(), - prom_port: get_available_port(), - ..network_cluster.graphql_connection_config.clone() - }, - ServiceConfig::dot_move_test_defaults( - true, // external resolution - Some(internal_client.url()), - Some(pkg_id.into()), - Some(registry_id.0), - None, - ), - ) - .await; +async fn init_dot_move_gql( + connection_config: ConnectionConfig, + config: ServiceConfig, +) -> SimpleClient { + let _gql_handle = + start_graphql_server_with_fn_rpc(connection_config.clone(), None, None, config).await; - // Await for the internal cluster to catch up with the latest checkpoint. - // That way we're certain that the data is available for querying (committed & indexed). - let latest_checkpoint = network_cluster - .validator_fullnode_handle - .fullnode_handle - .sui_node - .inner() - .state() - .get_latest_checkpoint_sequence_number() - .expect("To have a checkpoint"); - - eprintln!("Latest checkpoint: {:?}", latest_checkpoint); - - wait_for_graphql_checkpoint_catchup( - &internal_client, - latest_checkpoint, - Duration::from_millis(500), - ) - .await; + let server_url = format!( + "http://{}:{}/", + connection_config.host(), + connection_config.port() + ); - // We craft a big query, which we'll use to test both the internal and the external resolution. - // Same query is used across both nodes, since we're testing on top of the same data, just with a different - // lookup approach. - let query = format!( - r#"{{ valid_latest: {}, v1: {}, v2: {}, v3: {}, v4: {}, v1_type: {}, v2_type: {}, v3_type: {} }}"#, - name_query(&name), - name_query(&format!("{}{}", &name, "/v1")), - name_query(&format!("{}{}", &name, "/v2")), - name_query(&format!("{}{}", &name, "/v3")), - name_query(&format!("{}{}", &name, "/v4")), - type_query(&format!("{}{}", &name, DEMO_TYPE)), - type_query(&format!("{}{}", &name, DEMO_TYPE_V2)), - type_query(&format!("{}{}", &name, DEMO_TYPE_V3)), - ); - - let internal_resolution = internal_client - .execute(query.clone(), vec![]) - .await - .unwrap(); - - let external_resolution = external_client - .execute(query.clone(), vec![]) - .await - .unwrap(); - - test_results(internal_resolution, &v1, &v2, &v3, "internal resolution"); - test_results(external_resolution, &v1, &v2, &v3, "external resolution"); - - eprintln!("Tests have finished successfully now!"); - } + // Starts graphql client + let client = SimpleClient::new(server_url); + wait_for_graphql_server(&client).await; - fn test_results( - query_result: serde_json::Value, - v1: &ObjectID, - v2: &ObjectID, - v3: &ObjectID, - // an indicator to help identify the test case that failed using this. - indicator: &str, - ) { - eprintln!("Testing results for: {}", indicator); - assert_eq!( - query_result["data"]["valid_latest"]["address"] - .as_str() - .unwrap(), - v3.to_string(), - "The latest version should have been v3", - ); - - assert_eq!( - query_result["data"]["v1"]["address"].as_str().unwrap(), - v1.to_string(), - "V1 response did not correspond to the expected value", - ); - - assert_eq!( - query_result["data"]["v2"]["address"].as_str().unwrap(), - v2.to_string(), - "V2 response did not correspond to the expected value", - ); - - assert_eq!( - query_result["data"]["v3"]["address"].as_str().unwrap(), - v3.to_string(), - "V3 response did not correspond to the expected value", - ); - - assert!( - query_result["data"]["v4"].is_null(), - "V4 should not have been found" - ); - - assert_eq!( - query_result["data"]["v1_type"]["layout"]["struct"]["type"] - .as_str() - .unwrap(), - format!("{}{}", v1, DEMO_TYPE) - ); - - assert_eq!( - query_result["data"]["v2_type"]["layout"]["struct"]["type"] - .as_str() - .unwrap(), - format!("{}{}", v2, DEMO_TYPE_V2) - ); - - assert_eq!( - query_result["data"]["v3_type"]["layout"]["struct"]["type"] - .as_str() - .unwrap(), - format!("{}{}", v3, DEMO_TYPE_V3) - ); - } + client +} - async fn init_dot_move_gql( - connection_config: ConnectionConfig, - config: ServiceConfig, - ) -> SimpleClient { - let _gql_handle = - start_graphql_server_with_fn_rpc(connection_config.clone(), None, None, config).await; +async fn register_pkg( + cluster: &NetworkCluster, + dot_move_package_id: ObjectID, + registry_id: (ObjectID, SequenceNumber), + package_id: ObjectID, + name: String, + chain_id: Option, +) { + let is_network_call = chain_id.is_some(); + let function = if is_network_call { + "set_network" + } else { + "add_record" + }; - let server_url = format!( - "http://{}:{}/", - connection_config.host(), - connection_config.port() - ); + let mut args = vec![ + CallArg::Object(ObjectArg::SharedObject { + id: registry_id.0, + initial_shared_version: registry_id.1, + mutable: true, + }), + CallArg::from(&name.as_bytes().to_vec()), + CallArg::Pure(bcs::to_bytes(&package_id).unwrap()), + ]; + + if let Some(ref chain_id) = chain_id { + args.push(CallArg::from(&chain_id.as_bytes().to_vec())); + }; - // Starts graphql client - let client = SimpleClient::new(server_url); - wait_for_graphql_server(&client).await; + let tx = cluster + .validator_fullnode_handle + .test_transaction_builder() + .await + .move_call(dot_move_package_id, "dotmove", function, args) + .build(); - client - } + cluster + .validator_fullnode_handle + .sign_and_execute_transaction(&tx) + .await; - async fn register_pkg( - cluster: &NetworkCluster, - dot_move_package_id: ObjectID, - registry_id: (ObjectID, SequenceNumber), - package_id: ObjectID, - name: String, - chain_id: Option, - ) { - let is_network_call = chain_id.is_some(); - let function = if is_network_call { - "set_network" - } else { - "add_record" - }; - - let mut args = vec![ - CallArg::Object(ObjectArg::SharedObject { - id: registry_id.0, - initial_shared_version: registry_id.1, - mutable: true, - }), - CallArg::from(&name.as_bytes().to_vec()), - CallArg::Pure(bcs::to_bytes(&package_id).unwrap()), - ]; - - if let Some(ref chain_id) = chain_id { - args.push(CallArg::from(&chain_id.as_bytes().to_vec())); - }; - - let tx = cluster - .validator_fullnode_handle - .test_transaction_builder() - .await - .move_call(dot_move_package_id, "dotmove", function, args) - .build(); - - cluster - .validator_fullnode_handle - .sign_and_execute_transaction(&tx) - .await; - - eprintln!("Added record successfully: {:?}", (name, chain_id)); - } + eprintln!("Added record successfully: {:?}", (name, chain_id)); +} - // Publishes the Demo PKG, upgrades it twice and returns v1, v2 and v3 package ids. - async fn publish_demo_pkg(cluster: &NetworkCluster) -> (ObjectID, ObjectID, ObjectID) { - let tx = cluster - .validator_fullnode_handle - .test_transaction_builder() - .await - .publish(PathBuf::from(DEMO_PKG)) - .build(); - - let executed = cluster - .validator_fullnode_handle - .sign_and_execute_transaction(&tx) - .await; - let object_changes = executed.object_changes.unwrap(); - - let v1 = object_changes - .iter() - .find_map(|object| { - if let ObjectChange::Published { package_id, .. } = object { - Some(*package_id) - } else { - None - } - }) - .unwrap(); - - let upgrade_cap = object_changes - .iter() - .find_map(|object| { - if let ObjectChange::Created { - object_id, - object_type, - digest, - version, - .. - } = object +// Publishes the Demo PKG, upgrades it twice and returns v1, v2 and v3 package ids. +async fn publish_demo_pkg(cluster: &NetworkCluster) -> (ObjectID, ObjectID, ObjectID) { + let tx = cluster + .validator_fullnode_handle + .test_transaction_builder() + .await + .publish(PathBuf::from(DEMO_PKG)) + .build(); + + let executed = cluster + .validator_fullnode_handle + .sign_and_execute_transaction(&tx) + .await; + let object_changes = executed.object_changes.unwrap(); + + let v1 = object_changes + .iter() + .find_map(|object| { + if let ObjectChange::Published { package_id, .. } = object { + Some(*package_id) + } else { + None + } + }) + .unwrap(); + + let upgrade_cap = object_changes + .iter() + .find_map(|object| { + if let ObjectChange::Created { + object_id, + object_type, + digest, + version, + .. + } = object + { + if object_type.module.as_str() == "package" + && object_type.name.as_str() == "UpgradeCap" { - if object_type.module.as_str() == "package" - && object_type.name.as_str() == "UpgradeCap" - { - Some(UpgradeCap(*object_id, *version, *digest)) - } else { - None - } + Some(UpgradeCap(*object_id, *version, *digest)) } else { None } - }) - .unwrap(); + } else { + None + } + }) + .unwrap(); - let (v2, upgrade_cap) = upgrade_pkg(cluster, DEMO_PKG_V2, upgrade_cap, v1).await; - let (v3, _) = upgrade_pkg(cluster, DEMO_PKG_V3, upgrade_cap, v2).await; + let (v2, upgrade_cap) = upgrade_pkg(cluster, DEMO_PKG_V2, upgrade_cap, v1).await; + let (v3, _) = upgrade_pkg(cluster, DEMO_PKG_V3, upgrade_cap, v2).await; - (v1, v2, v3) - } + (v1, v2, v3) +} - async fn upgrade_pkg( - cluster: &NetworkCluster, - package_path: &str, - upgrade_cap: UpgradeCap, - current_package_object_id: ObjectID, - ) -> (ObjectID, UpgradeCap) { - // build the package upgrade to V2. - let mut builder = ProgrammableTransactionBuilder::new(); - - let compiled_package = BuildConfig::new_for_testing() - .build(&PathBuf::from(package_path)) - .unwrap(); - let digest = compiled_package.get_package_digest(false); - let modules = compiled_package.get_package_bytes(false); - let dependencies = compiled_package.get_dependency_storage_package_ids(); - - let cap = builder - .obj(ObjectArg::ImmOrOwnedObject(( - upgrade_cap.0, - upgrade_cap.1, - upgrade_cap.2, - ))) - .unwrap(); - - let policy = builder.pure(UpgradePolicy::Compatible as u8).unwrap(); - - let digest = builder.pure(digest.to_vec()).unwrap(); - - let ticket = builder.programmable_move_call( - SUI_FRAMEWORK_PACKAGE_ID, - Identifier::new("package").unwrap(), - Identifier::new("authorize_upgrade").unwrap(), - vec![], - vec![cap, policy, digest], - ); - - let receipt = builder.upgrade(current_package_object_id, ticket, dependencies, modules); - - builder.programmable_move_call( - SUI_FRAMEWORK_PACKAGE_ID, - Identifier::new("package").unwrap(), - Identifier::new("commit_upgrade").unwrap(), - vec![], - vec![cap, receipt], - ); - - let tx = cluster - .validator_fullnode_handle - .test_transaction_builder() - .await - .programmable(builder.finish()) - .build(); - - let upgraded = cluster - .validator_fullnode_handle - .sign_and_execute_transaction(&tx) - .await; - - let object_changes = upgraded.object_changes.unwrap(); - - let pkg_id = object_changes - .iter() - .find_map(|object| { - if let ObjectChange::Published { package_id, .. } = object { - Some(*package_id) - } else { - None - } - }) - .unwrap(); - - let upgrade_cap = object_changes - .iter() - .find_map(|object| { - if let ObjectChange::Mutated { - object_id, - object_type, - digest, - version, - .. - } = object +async fn upgrade_pkg( + cluster: &NetworkCluster, + package_path: &str, + upgrade_cap: UpgradeCap, + current_package_object_id: ObjectID, +) -> (ObjectID, UpgradeCap) { + // build the package upgrade to V2. + let mut builder = ProgrammableTransactionBuilder::new(); + + let compiled_package = BuildConfig::new_for_testing() + .build(&PathBuf::from(package_path)) + .unwrap(); + let digest = compiled_package.get_package_digest(false); + let modules = compiled_package.get_package_bytes(false); + let dependencies = compiled_package.get_dependency_storage_package_ids(); + + let cap = builder + .obj(ObjectArg::ImmOrOwnedObject(( + upgrade_cap.0, + upgrade_cap.1, + upgrade_cap.2, + ))) + .unwrap(); + + let policy = builder.pure(UpgradePolicy::Compatible as u8).unwrap(); + + let digest = builder.pure(digest.to_vec()).unwrap(); + + let ticket = builder.programmable_move_call( + SUI_FRAMEWORK_PACKAGE_ID, + Identifier::new("package").unwrap(), + Identifier::new("authorize_upgrade").unwrap(), + vec![], + vec![cap, policy, digest], + ); + + let receipt = builder.upgrade(current_package_object_id, ticket, dependencies, modules); + + builder.programmable_move_call( + SUI_FRAMEWORK_PACKAGE_ID, + Identifier::new("package").unwrap(), + Identifier::new("commit_upgrade").unwrap(), + vec![], + vec![cap, receipt], + ); + + let tx = cluster + .validator_fullnode_handle + .test_transaction_builder() + .await + .programmable(builder.finish()) + .build(); + + let upgraded = cluster + .validator_fullnode_handle + .sign_and_execute_transaction(&tx) + .await; + + let object_changes = upgraded.object_changes.unwrap(); + + let pkg_id = object_changes + .iter() + .find_map(|object| { + if let ObjectChange::Published { package_id, .. } = object { + Some(*package_id) + } else { + None + } + }) + .unwrap(); + + let upgrade_cap = object_changes + .iter() + .find_map(|object| { + if let ObjectChange::Mutated { + object_id, + object_type, + digest, + version, + .. + } = object + { + if object_type.module.as_str() == "package" + && object_type.name.as_str() == "UpgradeCap" { - if object_type.module.as_str() == "package" - && object_type.name.as_str() == "UpgradeCap" - { - Some(UpgradeCap(*object_id, *version, *digest)) - } else { - None - } + Some(UpgradeCap(*object_id, *version, *digest)) } else { None } - }) - .unwrap(); + } else { + None + } + }) + .unwrap(); - (pkg_id, upgrade_cap) - } + (pkg_id, upgrade_cap) +} - async fn publish_dot_move_package( - cluster: &NetworkCluster, - ) -> (ObjectID, (ObjectID, SequenceNumber)) { - let package_path = PathBuf::from(DOT_MOVE_PKG); - let tx = cluster - .validator_fullnode_handle - .test_transaction_builder() - .await - .publish(package_path) - .build(); - - let sig = cluster - .validator_fullnode_handle - .wallet - .sign_transaction(&tx); - - let executed = cluster - .validator_fullnode_handle - .execute_transaction(sig) - .await; - - let (mut pkg_id, mut obj_id) = (None, None); - - for object in executed.object_changes.unwrap() { - match object { - ObjectChange::Published { package_id, .. } => { - pkg_id = Some(package_id); - } - ObjectChange::Created { - object_id, - object_type, - owner, - .. - } => { - if object_type.module.as_str() == "dotmove" - && object_type.name.as_str() == "AppRegistry" - { - let initial_shared_version = match owner { - Owner::Shared { - initial_shared_version, - } => initial_shared_version, - _ => panic!("AppRegistry should be shared"), - }; - - if !owner.is_shared() { - panic!("AppRegistry should be shared"); - }; - - obj_id = Some((object_id, initial_shared_version)); - } +async fn publish_dot_move_package( + cluster: &NetworkCluster, +) -> (ObjectID, (ObjectID, SequenceNumber)) { + let package_path = PathBuf::from(DOT_MOVE_PKG); + let tx = cluster + .validator_fullnode_handle + .test_transaction_builder() + .await + .publish(package_path) + .build(); + + let sig = cluster + .validator_fullnode_handle + .wallet + .sign_transaction(&tx); + + let executed = cluster + .validator_fullnode_handle + .execute_transaction(sig) + .await; + + let (mut pkg_id, mut obj_id) = (None, None); + + for object in executed.object_changes.unwrap() { + match object { + ObjectChange::Published { package_id, .. } => { + pkg_id = Some(package_id); + } + ObjectChange::Created { + object_id, + object_type, + owner, + .. + } => { + if object_type.module.as_str() == "dotmove" + && object_type.name.as_str() == "AppRegistry" + { + let initial_shared_version = match owner { + Owner::Shared { + initial_shared_version, + } => initial_shared_version, + _ => panic!("AppRegistry should be shared"), + }; + + if !owner.is_shared() { + panic!("AppRegistry should be shared"); + }; + + obj_id = Some((object_id, initial_shared_version)); } - _ => {} } + _ => {} } - - (pkg_id.unwrap(), obj_id.unwrap()) } - fn name_query(name: &str) -> String { - format!(r#"packageByName(name: "{}") {{ address, version }}"#, name) - } + (pkg_id.unwrap(), obj_id.unwrap()) +} - fn type_query(named_type: &str) -> String { - format!(r#"typeByName(name: "{}") {{ layout }}"#, named_type) - } +fn name_query(name: &str) -> String { + format!(r#"packageByName(name: "{}") {{ address, version }}"#, name) +} + +fn type_query(named_type: &str) -> String { + format!(r#"typeByName(name: "{}") {{ layout }}"#, named_type) } diff --git a/crates/sui-graphql-rpc/tests/e2e_tests.rs b/crates/sui-graphql-rpc/tests/e2e_tests.rs index b818d7fb20c19..24d3388aefe03 100644 --- a/crates/sui-graphql-rpc/tests/e2e_tests.rs +++ b/crates/sui-graphql-rpc/tests/e2e_tests.rs @@ -1,351 +1,349 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -#[cfg(feature = "pg_integration")] -mod tests { - use fastcrypto::encoding::{Base64, Encoding}; - use rand::rngs::StdRng; - use rand::SeedableRng; - use serde_json::json; - use simulacrum::Simulacrum; - use std::sync::Arc; - use std::time::Duration; - use sui_graphql_rpc::client::simple_client::GraphqlQueryVariable; - use sui_graphql_rpc::client::ClientError; - use sui_graphql_rpc::config::Limits; - use sui_graphql_rpc::config::ServiceConfig; - use sui_graphql_rpc::test_infra::cluster::prep_executor_cluster; - use sui_graphql_rpc::test_infra::cluster::start_cluster; - use sui_types::digests::ChainIdentifier; - use sui_types::gas_coin::GAS; - use sui_types::transaction::CallArg; - use sui_types::transaction::ObjectArg; - use sui_types::transaction::TransactionDataAPI; - use sui_types::DEEPBOOK_ADDRESS; - use sui_types::SUI_FRAMEWORK_ADDRESS; - use sui_types::SUI_FRAMEWORK_PACKAGE_ID; - use tempfile::tempdir; - use tokio::time::sleep; - - #[tokio::test] - async fn test_simple_client_validator_cluster() { - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - cluster - .wait_for_checkpoint_catchup(1, Duration::from_secs(10)) - .await; +use fastcrypto::encoding::{Base64, Encoding}; +use rand::rngs::StdRng; +use rand::SeedableRng; +use serde_json::json; +use simulacrum::Simulacrum; +use std::sync::Arc; +use std::time::Duration; +use sui_graphql_rpc::client::simple_client::GraphqlQueryVariable; +use sui_graphql_rpc::client::ClientError; +use sui_graphql_rpc::config::Limits; +use sui_graphql_rpc::config::ServiceConfig; +use sui_graphql_rpc::test_infra::cluster::prep_executor_cluster; +use sui_graphql_rpc::test_infra::cluster::start_cluster; +use sui_types::digests::ChainIdentifier; +use sui_types::gas_coin::GAS; +use sui_types::transaction::CallArg; +use sui_types::transaction::ObjectArg; +use sui_types::transaction::TransactionDataAPI; +use sui_types::DEEPBOOK_ADDRESS; +use sui_types::SUI_FRAMEWORK_ADDRESS; +use sui_types::SUI_FRAMEWORK_PACKAGE_ID; +use tempfile::tempdir; +use tokio::time::sleep; + +#[tokio::test] +async fn test_simple_client_validator_cluster() { + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + cluster + .wait_for_checkpoint_catchup(1, Duration::from_secs(10)) + .await; - let query = r#" + let query = r#" { chainIdentifier } "#; - let res = cluster - .graphql_client - .execute(query.to_string(), vec![]) - .await - .unwrap(); - let chain_id_actual = cluster - .network - .validator_fullnode_handle - .fullnode_handle - .sui_client - .read_api() - .get_chain_identifier() - .await - .unwrap(); - - let exp = format!( - "{{\"data\":{{\"chainIdentifier\":\"{}\"}}}}", - chain_id_actual - ); - assert_eq!(&format!("{}", res), &exp); - } - - #[tokio::test] - async fn test_simple_client_simulator_cluster() { - let rng = StdRng::from_seed([12; 32]); - let mut sim = Simulacrum::new_with_rng(rng); - let data_ingestion_path = tempdir().unwrap(); - sim.set_data_ingestion_path(data_ingestion_path.path().to_path_buf()); - - sim.create_checkpoint(); - sim.create_checkpoint(); - - let genesis_checkpoint_digest1 = *sim - .store() - .get_checkpoint_by_sequence_number(0) - .unwrap() - .digest(); + let res = cluster + .graphql_client + .execute(query.to_string(), vec![]) + .await + .unwrap(); + let chain_id_actual = cluster + .network + .validator_fullnode_handle + .fullnode_handle + .sui_client + .read_api() + .get_chain_identifier() + .await + .unwrap(); + + let exp = format!( + "{{\"data\":{{\"chainIdentifier\":\"{}\"}}}}", + chain_id_actual + ); + assert_eq!(&format!("{}", res), &exp); +} - let chain_id_actual = format!("{}", ChainIdentifier::from(genesis_checkpoint_digest1)); - let exp = format!( - "{{\"data\":{{\"chainIdentifier\":\"{}\"}}}}", - chain_id_actual - ); - let cluster = sui_graphql_rpc::test_infra::cluster::serve_executor( - Arc::new(sim), - None, - None, - data_ingestion_path.path().to_path_buf(), - ) +#[tokio::test] +async fn test_simple_client_simulator_cluster() { + let rng = StdRng::from_seed([12; 32]); + let mut sim = Simulacrum::new_with_rng(rng); + let data_ingestion_path = tempdir().unwrap(); + sim.set_data_ingestion_path(data_ingestion_path.path().to_path_buf()); + + sim.create_checkpoint(); + sim.create_checkpoint(); + + let genesis_checkpoint_digest1 = *sim + .store() + .get_checkpoint_by_sequence_number(0) + .unwrap() + .digest(); + + let chain_id_actual = format!("{}", ChainIdentifier::from(genesis_checkpoint_digest1)); + let exp = format!( + "{{\"data\":{{\"chainIdentifier\":\"{}\"}}}}", + chain_id_actual + ); + let cluster = sui_graphql_rpc::test_infra::cluster::serve_executor( + Arc::new(sim), + None, + None, + data_ingestion_path.path().to_path_buf(), + ) + .await; + cluster + .wait_for_checkpoint_catchup(1, Duration::from_secs(10)) .await; - cluster - .wait_for_checkpoint_catchup(1, Duration::from_secs(10)) - .await; - let query = r#" + let query = r#" { chainIdentifier } "#; - let res = cluster - .graphql_client - .execute(query.to_string(), vec![]) - .await - .unwrap(); + let res = cluster + .graphql_client + .execute(query.to_string(), vec![]) + .await + .unwrap(); - assert_eq!(&format!("{}", res), &exp); - } + assert_eq!(&format!("{}", res), &exp); +} - #[tokio::test] - async fn test_graphql_client_response() { - let cluster = prep_executor_cluster().await; +#[tokio::test] +async fn test_graphql_client_response() { + let cluster = prep_executor_cluster().await; - let query = r#" + let query = r#" { chainIdentifier } "#; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, vec![], vec![]) - .await - .unwrap(); - - assert_eq!(res.http_status().as_u16(), 200); - assert_eq!(res.http_version(), reqwest::Version::HTTP_11); - assert!(res.graphql_version().unwrap().len() >= 5); - assert!(res.errors().is_empty()); - - let usage = res.usage().unwrap().unwrap(); - assert_eq!(*usage.get("inputNodes").unwrap(), 1); - assert_eq!(*usage.get("outputNodes").unwrap(), 1); - assert_eq!(*usage.get("depth").unwrap(), 1); - assert_eq!(*usage.get("variables").unwrap(), 0); - assert_eq!(*usage.get("fragments").unwrap(), 0); - } + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, vec![], vec![]) + .await + .unwrap(); + + assert_eq!(res.http_status().as_u16(), 200); + assert_eq!(res.http_version(), reqwest::Version::HTTP_11); + assert!(res.graphql_version().unwrap().len() >= 5); + assert!(res.errors().is_empty()); + + let usage = res.usage().unwrap().unwrap(); + assert_eq!(*usage.get("inputNodes").unwrap(), 1); + assert_eq!(*usage.get("outputNodes").unwrap(), 1); + assert_eq!(*usage.get("depth").unwrap(), 1); + assert_eq!(*usage.get("variables").unwrap(), 0); + assert_eq!(*usage.get("fragments").unwrap(), 0); +} - #[tokio::test] - async fn test_graphql_client_variables() { - let cluster = prep_executor_cluster().await; +#[tokio::test] +async fn test_graphql_client_variables() { + let cluster = prep_executor_cluster().await; - let query = r#"{obj1: object(address: $framework_addr) {address} + let query = r#"{obj1: object(address: $framework_addr) {address} obj2: object(address: $deepbook_addr) {address}}"#; - let variables = vec![ - GraphqlQueryVariable { - name: "framework_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0x2"), - }, - GraphqlQueryVariable { - name: "deepbook_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - ]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - - assert!(res.errors().is_empty()); - let data = res.response_body().data.clone().into_json().unwrap(); - data.get("obj1").unwrap().get("address").unwrap(); - assert_eq!( - data.get("obj1") - .unwrap() - .get("address") - .unwrap() - .as_str() - .unwrap(), - SUI_FRAMEWORK_ADDRESS.to_canonical_string(true) - ); - assert_eq!( - data.get("obj2") - .unwrap() - .get("address") - .unwrap() - .as_str() - .unwrap(), - DEEPBOOK_ADDRESS.to_canonical_string(true) - ); - - let bad_variables = vec![ - GraphqlQueryVariable { - name: "framework_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0x2"), - }, - GraphqlQueryVariable { - name: "deepbook_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - GraphqlQueryVariable { - name: "deepbook_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee96666666"), - }, - ]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, bad_variables, vec![]) - .await; + let variables = vec![ + GraphqlQueryVariable { + name: "framework_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0x2"), + }, + GraphqlQueryVariable { + name: "deepbook_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + ]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + + assert!(res.errors().is_empty()); + let data = res.response_body().data.clone().into_json().unwrap(); + data.get("obj1").unwrap().get("address").unwrap(); + assert_eq!( + data.get("obj1") + .unwrap() + .get("address") + .unwrap() + .as_str() + .unwrap(), + SUI_FRAMEWORK_ADDRESS.to_canonical_string(true) + ); + assert_eq!( + data.get("obj2") + .unwrap() + .get("address") + .unwrap() + .as_str() + .unwrap(), + DEEPBOOK_ADDRESS.to_canonical_string(true) + ); + + let bad_variables = vec![ + GraphqlQueryVariable { + name: "framework_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0x2"), + }, + GraphqlQueryVariable { + name: "deepbook_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + GraphqlQueryVariable { + name: "deepbook_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee96666666"), + }, + ]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, bad_variables, vec![]) + .await; - assert!(res.is_err()); + assert!(res.is_err()); + + let bad_variables = vec![ + GraphqlQueryVariable { + name: "framework_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0x2"), + }, + GraphqlQueryVariable { + name: "deepbook_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + GraphqlQueryVariable { + name: "deepbook_addr".to_string(), + ty: "SuiAddressP!".to_string(), + value: json!("0xdee9"), + }, + ]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, bad_variables, vec![]) + .await; - let bad_variables = vec![ - GraphqlQueryVariable { - name: "framework_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0x2"), - }, - GraphqlQueryVariable { - name: "deepbook_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - GraphqlQueryVariable { - name: "deepbook_addr".to_string(), - ty: "SuiAddressP!".to_string(), - value: json!("0xdee9"), - }, - ]; + assert!(res.is_err()); + + let bad_variables = vec![ + GraphqlQueryVariable { + name: "framework addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0x2"), + }, + GraphqlQueryVariable { + name: " deepbook_addr".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + GraphqlQueryVariable { + name: "4deepbook_addr".to_string(), + ty: "SuiAddressP!".to_string(), + value: json!("0xdee9"), + }, + GraphqlQueryVariable { + name: "".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + GraphqlQueryVariable { + name: " ".to_string(), + ty: "SuiAddress!".to_string(), + value: json!("0xdee9"), + }, + ]; + + for var in bad_variables { let res = cluster .graphql_client - .execute_to_graphql(query.to_string(), true, bad_variables, vec![]) + .execute_to_graphql(query.to_string(), true, vec![var.clone()], vec![]) .await; assert!(res.is_err()); - - let bad_variables = vec![ - GraphqlQueryVariable { - name: "framework addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0x2"), - }, - GraphqlQueryVariable { - name: " deepbook_addr".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - GraphqlQueryVariable { - name: "4deepbook_addr".to_string(), - ty: "SuiAddressP!".to_string(), - value: json!("0xdee9"), - }, - GraphqlQueryVariable { - name: "".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - GraphqlQueryVariable { - name: " ".to_string(), - ty: "SuiAddress!".to_string(), - value: json!("0xdee9"), - }, - ]; - - for var in bad_variables { - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, vec![var.clone()], vec![]) - .await; - - assert!(res.is_err()); - assert!( - res.unwrap_err().to_string() - == ClientError::InvalidVariableName { - var_name: var.name.clone() - } - .to_string() - ); - } + assert!( + res.unwrap_err().to_string() + == ClientError::InvalidVariableName { + var_name: var.name.clone() + } + .to_string() + ); } +} - #[tokio::test] - async fn test_transaction_execution() { - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - let addresses = cluster - .network - .validator_fullnode_handle - .wallet - .get_addresses(); - - let sender = addresses[0]; - let recipient = addresses[1]; - let tx = cluster - .network - .validator_fullnode_handle - .test_transaction_builder() - .await - .transfer_sui(Some(1_000), recipient) - .build(); - let signed_tx = cluster - .network - .validator_fullnode_handle - .wallet - .sign_transaction(&tx); - let original_digest = signed_tx.digest(); - let (tx_bytes, sigs) = signed_tx.to_tx_bytes_and_signatures(); - let tx_bytes = tx_bytes.encoded(); - let sigs = sigs.iter().map(|sig| sig.encoded()).collect::>(); - - let mutation = r#"{ executeTransactionBlock(txBytes: $tx, signatures: $sigs) { effects { transactionBlock { digest } } errors}}"#; - - let variables = vec![ - GraphqlQueryVariable { - name: "tx".to_string(), - ty: "String!".to_string(), - value: json!(tx_bytes), - }, - GraphqlQueryVariable { - name: "sigs".to_string(), - ty: "[String!]!".to_string(), - value: json!(sigs), - }, - ]; - let res = cluster - .graphql_client - .execute_mutation_to_graphql(mutation.to_string(), variables) - .await - .unwrap(); - let binding = res.response_body().data.clone().into_json().unwrap(); - let res = binding.get("executeTransactionBlock").unwrap(); - - let digest = res - .get("effects") - .unwrap() - .get("transactionBlock") - .unwrap() - .get("digest") - .unwrap() - .as_str() - .unwrap(); - assert!(res.get("errors").unwrap().is_null()); - assert_eq!(digest, original_digest.to_string()); - - // Wait for the transaction to be committed and indexed - sleep(Duration::from_secs(10)).await; - // Query the transaction - let query = r#" +#[tokio::test] +async fn test_transaction_execution() { + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + let addresses = cluster + .network + .validator_fullnode_handle + .wallet + .get_addresses(); + + let sender = addresses[0]; + let recipient = addresses[1]; + let tx = cluster + .network + .validator_fullnode_handle + .test_transaction_builder() + .await + .transfer_sui(Some(1_000), recipient) + .build(); + let signed_tx = cluster + .network + .validator_fullnode_handle + .wallet + .sign_transaction(&tx); + let original_digest = signed_tx.digest(); + let (tx_bytes, sigs) = signed_tx.to_tx_bytes_and_signatures(); + let tx_bytes = tx_bytes.encoded(); + let sigs = sigs.iter().map(|sig| sig.encoded()).collect::>(); + + let mutation = r#"{ executeTransactionBlock(txBytes: $tx, signatures: $sigs) { effects { transactionBlock { digest } } errors}}"#; + + let variables = vec![ + GraphqlQueryVariable { + name: "tx".to_string(), + ty: "String!".to_string(), + value: json!(tx_bytes), + }, + GraphqlQueryVariable { + name: "sigs".to_string(), + ty: "[String!]!".to_string(), + value: json!(sigs), + }, + ]; + let res = cluster + .graphql_client + .execute_mutation_to_graphql(mutation.to_string(), variables) + .await + .unwrap(); + let binding = res.response_body().data.clone().into_json().unwrap(); + let res = binding.get("executeTransactionBlock").unwrap(); + + let digest = res + .get("effects") + .unwrap() + .get("transactionBlock") + .unwrap() + .get("digest") + .unwrap() + .as_str() + .unwrap(); + assert!(res.get("errors").unwrap().is_null()); + assert_eq!(digest, original_digest.to_string()); + + // Wait for the transaction to be committed and indexed + sleep(Duration::from_secs(10)).await; + // Query the transaction + let query = r#" { transactionBlock(digest: $dig){ sender { @@ -355,171 +353,171 @@ mod tests { } "#; - let variables = vec![GraphqlQueryVariable { - name: "dig".to_string(), - ty: "String!".to_string(), - value: json!(digest), - }]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - - let binding = res.response_body().data.clone().into_json().unwrap(); - let sender_read = binding - .get("transactionBlock") - .unwrap() - .get("sender") - .unwrap() - .get("address") - .unwrap() - .as_str() - .unwrap(); - assert_eq!(sender_read, sender.to_string()); - } + let variables = vec![GraphqlQueryVariable { + name: "dig".to_string(), + ty: "String!".to_string(), + value: json!(digest), + }]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + + let binding = res.response_body().data.clone().into_json().unwrap(); + let sender_read = binding + .get("transactionBlock") + .unwrap() + .get("sender") + .unwrap() + .get("address") + .unwrap() + .as_str() + .unwrap(); + assert_eq!(sender_read, sender.to_string()); +} - #[tokio::test] - async fn test_zklogin_sig_verify() { - use shared_crypto::intent::Intent; - use shared_crypto::intent::IntentMessage; - use sui_test_transaction_builder::TestTransactionBuilder; - use sui_types::base_types::SuiAddress; - use sui_types::crypto::Signature; - use sui_types::signature::GenericSignature; - use sui_types::utils::load_test_vectors; - use sui_types::zk_login_authenticator::ZkLoginAuthenticator; - - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - let test_cluster = &cluster.network.validator_fullnode_handle; - test_cluster.wait_for_epoch_all_nodes(1).await; - test_cluster.wait_for_authenticator_state_update().await; - - // Construct a valid zkLogin transaction data, signature. - let (kp, pk_zklogin, inputs) = - &load_test_vectors("../sui-types/src/unit_tests/zklogin_test_vectors.json")[1]; - - let zklogin_addr = (pk_zklogin).into(); - let rgp = test_cluster.get_reference_gas_price().await; - let gas = test_cluster - .fund_address_and_return_gas(rgp, Some(20000000000), zklogin_addr) - .await; - let tx_data = TestTransactionBuilder::new(zklogin_addr, gas, rgp) - .transfer_sui(None, SuiAddress::ZERO) - .build(); - let msg = IntentMessage::new(Intent::sui_transaction(), tx_data.clone()); - let eph_sig = Signature::new_secure(&msg, kp); - let generic_sig = GenericSignature::ZkLoginAuthenticator(ZkLoginAuthenticator::new( - inputs.clone(), - 2, - eph_sig.clone(), - )); - - // construct all parameters for the query - let bytes = Base64::encode(bcs::to_bytes(&tx_data).unwrap()); - let signature = Base64::encode(generic_sig.as_ref()); - let intent_scope = "TRANSACTION_DATA"; - let author = zklogin_addr.to_string(); - - // now query the endpoint with a valid tx data bytes and a valid signature with the correct proof for dev env. - let query = r#"{ verifyZkloginSignature(bytes: $bytes, signature: $signature, intentScope: $intent_scope, author: $author ) { success, errors}}"#; - let variables = vec![ - GraphqlQueryVariable { - name: "bytes".to_string(), - ty: "String!".to_string(), - value: json!(bytes), - }, - GraphqlQueryVariable { - name: "signature".to_string(), - ty: "String!".to_string(), - value: json!(signature), - }, - GraphqlQueryVariable { - name: "intent_scope".to_string(), - ty: "ZkLoginIntentScope!".to_string(), - value: json!(intent_scope), - }, - GraphqlQueryVariable { - name: "author".to_string(), - ty: "SuiAddress!".to_string(), - value: json!(author), - }, - ]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - - // a valid signature with tx bytes returns success as true. - let binding = res.response_body().data.clone().into_json().unwrap(); - tracing::info!("tktkbinding: {:?}", binding); - let res = binding.get("verifyZkloginSignature").unwrap(); - assert_eq!(res.get("success").unwrap(), true); - - // set up an invalid intent scope. - let incorrect_intent_scope = "PERSONAL_MESSAGE"; - let incorrect_variables = vec![ - GraphqlQueryVariable { - name: "bytes".to_string(), - ty: "String!".to_string(), - value: json!(bytes), - }, - GraphqlQueryVariable { - name: "signature".to_string(), - ty: "String!".to_string(), - value: json!(signature), - }, - GraphqlQueryVariable { - name: "intent_scope".to_string(), - ty: "ZkLoginIntentScope!".to_string(), - value: json!(incorrect_intent_scope), - }, - GraphqlQueryVariable { - name: "author".to_string(), - ty: "SuiAddress!".to_string(), - value: json!(author), - }, - ]; - // returns a non-empty errors list in response - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, incorrect_variables, vec![]) - .await - .unwrap(); - let binding = res.response_body().data.clone().into_json().unwrap(); - let res = binding.get("verifyZkloginSignature").unwrap(); - assert_eq!(res.get("success").unwrap(), false); - } +#[tokio::test] +async fn test_zklogin_sig_verify() { + use shared_crypto::intent::Intent; + use shared_crypto::intent::IntentMessage; + use sui_test_transaction_builder::TestTransactionBuilder; + use sui_types::base_types::SuiAddress; + use sui_types::crypto::Signature; + use sui_types::signature::GenericSignature; + use sui_types::utils::load_test_vectors; + use sui_types::zk_login_authenticator::ZkLoginAuthenticator; + + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + let test_cluster = &cluster.network.validator_fullnode_handle; + test_cluster.wait_for_epoch_all_nodes(1).await; + test_cluster.wait_for_authenticator_state_update().await; + + // Construct a valid zkLogin transaction data, signature. + let (kp, pk_zklogin, inputs) = + &load_test_vectors("../sui-types/src/unit_tests/zklogin_test_vectors.json")[1]; + + let zklogin_addr = (pk_zklogin).into(); + let rgp = test_cluster.get_reference_gas_price().await; + let gas = test_cluster + .fund_address_and_return_gas(rgp, Some(20000000000), zklogin_addr) + .await; + let tx_data = TestTransactionBuilder::new(zklogin_addr, gas, rgp) + .transfer_sui(None, SuiAddress::ZERO) + .build(); + let msg = IntentMessage::new(Intent::sui_transaction(), tx_data.clone()); + let eph_sig = Signature::new_secure(&msg, kp); + let generic_sig = GenericSignature::ZkLoginAuthenticator(ZkLoginAuthenticator::new( + inputs.clone(), + 2, + eph_sig.clone(), + )); + + // construct all parameters for the query + let bytes = Base64::encode(bcs::to_bytes(&tx_data).unwrap()); + let signature = Base64::encode(generic_sig.as_ref()); + let intent_scope = "TRANSACTION_DATA"; + let author = zklogin_addr.to_string(); + + // now query the endpoint with a valid tx data bytes and a valid signature with the correct proof for dev env. + let query = r#"{ verifyZkloginSignature(bytes: $bytes, signature: $signature, intentScope: $intent_scope, author: $author ) { success, errors}}"#; + let variables = vec![ + GraphqlQueryVariable { + name: "bytes".to_string(), + ty: "String!".to_string(), + value: json!(bytes), + }, + GraphqlQueryVariable { + name: "signature".to_string(), + ty: "String!".to_string(), + value: json!(signature), + }, + GraphqlQueryVariable { + name: "intent_scope".to_string(), + ty: "ZkLoginIntentScope!".to_string(), + value: json!(intent_scope), + }, + GraphqlQueryVariable { + name: "author".to_string(), + ty: "SuiAddress!".to_string(), + value: json!(author), + }, + ]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + + // a valid signature with tx bytes returns success as true. + let binding = res.response_body().data.clone().into_json().unwrap(); + tracing::info!("tktkbinding: {:?}", binding); + let res = binding.get("verifyZkloginSignature").unwrap(); + assert_eq!(res.get("success").unwrap(), true); + + // set up an invalid intent scope. + let incorrect_intent_scope = "PERSONAL_MESSAGE"; + let incorrect_variables = vec![ + GraphqlQueryVariable { + name: "bytes".to_string(), + ty: "String!".to_string(), + value: json!(bytes), + }, + GraphqlQueryVariable { + name: "signature".to_string(), + ty: "String!".to_string(), + value: json!(signature), + }, + GraphqlQueryVariable { + name: "intent_scope".to_string(), + ty: "ZkLoginIntentScope!".to_string(), + value: json!(incorrect_intent_scope), + }, + GraphqlQueryVariable { + name: "author".to_string(), + ty: "SuiAddress!".to_string(), + value: json!(author), + }, + ]; + // returns a non-empty errors list in response + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, incorrect_variables, vec![]) + .await + .unwrap(); + let binding = res.response_body().data.clone().into_json().unwrap(); + let res = binding.get("verifyZkloginSignature").unwrap(); + assert_eq!(res.get("success").unwrap(), false); +} - // TODO: add more test cases for transaction execution/dry run in transactional test runner. - #[tokio::test] - async fn test_transaction_dry_run() { - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - let addresses = cluster - .network - .validator_fullnode_handle - .wallet - .get_addresses(); - - let sender = addresses[0]; - let recipient = addresses[1]; - let tx = cluster - .network - .validator_fullnode_handle - .test_transaction_builder() - .await - .transfer_sui(Some(1_000), recipient) - .build(); - let tx_bytes = Base64::encode(bcs::to_bytes(&tx).unwrap()); - - let query = r#"{ dryRunTransactionBlock(txBytes: $tx) { +// TODO: add more test cases for transaction execution/dry run in transactional test runner. +#[tokio::test] +async fn test_transaction_dry_run() { + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + let addresses = cluster + .network + .validator_fullnode_handle + .wallet + .get_addresses(); + + let sender = addresses[0]; + let recipient = addresses[1]; + let tx = cluster + .network + .validator_fullnode_handle + .test_transaction_builder() + .await + .transfer_sui(Some(1_000), recipient) + .build(); + let tx_bytes = Base64::encode(bcs::to_bytes(&tx).unwrap()); + + let query = r#"{ dryRunTransactionBlock(txBytes: $tx) { transaction { digest sender { @@ -558,60 +556,60 @@ mod tests { } } }"#; - let variables = vec![GraphqlQueryVariable { - name: "tx".to_string(), - ty: "String!".to_string(), - value: json!(tx_bytes), - }]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - let binding = res.response_body().data.clone().into_json().unwrap(); - let res = binding.get("dryRunTransactionBlock").unwrap(); - - let digest = res.get("transaction").unwrap().get("digest").unwrap(); - // Dry run txn does not have digest - assert!(digest.is_null()); - assert!(res.get("error").unwrap().is_null()); - let sender_read = res - .get("transaction") - .unwrap() - .get("sender") - .unwrap() - .get("address") - .unwrap() - .as_str() - .unwrap(); - assert_eq!(sender_read, sender.to_string()); - assert!(res.get("results").unwrap().is_array()); - } + let variables = vec![GraphqlQueryVariable { + name: "tx".to_string(), + ty: "String!".to_string(), + value: json!(tx_bytes), + }]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + let binding = res.response_body().data.clone().into_json().unwrap(); + let res = binding.get("dryRunTransactionBlock").unwrap(); + + let digest = res.get("transaction").unwrap().get("digest").unwrap(); + // Dry run txn does not have digest + assert!(digest.is_null()); + assert!(res.get("error").unwrap().is_null()); + let sender_read = res + .get("transaction") + .unwrap() + .get("sender") + .unwrap() + .get("address") + .unwrap() + .as_str() + .unwrap(); + assert_eq!(sender_read, sender.to_string()); + assert!(res.get("results").unwrap().is_array()); +} - // Test dry run where the transaction kind is provided instead of the full transaction. - #[tokio::test] - async fn test_transaction_dry_run_with_kind() { - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - let addresses = cluster - .network - .validator_fullnode_handle - .wallet - .get_addresses(); - - let recipient = addresses[1]; - let tx = cluster - .network - .validator_fullnode_handle - .test_transaction_builder() - .await - .transfer_sui(Some(1_000), recipient) - .build(); - let tx_kind_bytes = Base64::encode(bcs::to_bytes(&tx.into_kind()).unwrap()); - - let query = r#"{ dryRunTransactionBlock(txBytes: $tx, txMeta: {}) { +// Test dry run where the transaction kind is provided instead of the full transaction. +#[tokio::test] +async fn test_transaction_dry_run_with_kind() { + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + let addresses = cluster + .network + .validator_fullnode_handle + .wallet + .get_addresses(); + + let recipient = addresses[1]; + let tx = cluster + .network + .validator_fullnode_handle + .test_transaction_builder() + .await + .transfer_sui(Some(1_000), recipient) + .build(); + let tx_kind_bytes = Base64::encode(bcs::to_bytes(&tx.into_kind()).unwrap()); + + let query = r#"{ dryRunTransactionBlock(txBytes: $tx, txMeta: {}) { results { mutatedReferences { input { @@ -634,73 +632,73 @@ mod tests { error } }"#; - let variables = vec![GraphqlQueryVariable { - name: "tx".to_string(), - ty: "String!".to_string(), - value: json!(tx_kind_bytes), - }]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - let binding = res.response_body().data.clone().into_json().unwrap(); - let res = binding.get("dryRunTransactionBlock").unwrap(); - - let digest = res.get("transaction").unwrap().get("digest").unwrap(); - // Dry run txn does not have digest - assert!(digest.is_null()); - assert!(res.get("error").unwrap().is_null()); - let sender_read = res.get("transaction").unwrap().get("sender").unwrap(); - // Since no transaction metadata is provided, we use 0x0 as the sender while dry running the trasanction - // in which case the sender is null. - assert!(sender_read.is_null()); - assert!(res.get("results").unwrap().is_array()); - } + let variables = vec![GraphqlQueryVariable { + name: "tx".to_string(), + ty: "String!".to_string(), + value: json!(tx_kind_bytes), + }]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + let binding = res.response_body().data.clone().into_json().unwrap(); + let res = binding.get("dryRunTransactionBlock").unwrap(); + + let digest = res.get("transaction").unwrap().get("digest").unwrap(); + // Dry run txn does not have digest + assert!(digest.is_null()); + assert!(res.get("error").unwrap().is_null()); + let sender_read = res.get("transaction").unwrap().get("sender").unwrap(); + // Since no transaction metadata is provided, we use 0x0 as the sender while dry running the trasanction + // in which case the sender is null. + assert!(sender_read.is_null()); + assert!(res.get("results").unwrap().is_array()); +} - // Test that we can handle dry run with failures at execution stage too. - #[tokio::test] - async fn test_dry_run_failed_execution() { - telemetry_subscribers::init_for_testing(); - - let cluster = start_cluster(ServiceConfig::test_defaults()).await; - - let addresses = cluster - .network - .validator_fullnode_handle - .wallet - .get_addresses(); - - let sender = addresses[0]; - let coin = *cluster - .network - .validator_fullnode_handle - .wallet - .get_gas_objects_owned_by_address(sender, None) - .await - .unwrap() - .get(1) - .unwrap(); - let tx = cluster - .network - .validator_fullnode_handle - .test_transaction_builder() - .await - // A split coin that goes nowhere -> execution failure - .move_call( - SUI_FRAMEWORK_PACKAGE_ID, - "coin", - "split", - vec![ - CallArg::Object(ObjectArg::ImmOrOwnedObject(coin)), - CallArg::Pure(bcs::to_bytes(&1000u64).unwrap()), - ], - ) - .with_type_args(vec![GAS::type_tag()]) - .build(); - let tx_bytes = Base64::encode(bcs::to_bytes(&tx).unwrap()); - - let query = r#"{ dryRunTransactionBlock(txBytes: $tx) { +// Test that we can handle dry run with failures at execution stage too. +#[tokio::test] +async fn test_dry_run_failed_execution() { + telemetry_subscribers::init_for_testing(); + + let cluster = start_cluster(ServiceConfig::test_defaults()).await; + + let addresses = cluster + .network + .validator_fullnode_handle + .wallet + .get_addresses(); + + let sender = addresses[0]; + let coin = *cluster + .network + .validator_fullnode_handle + .wallet + .get_gas_objects_owned_by_address(sender, None) + .await + .unwrap() + .get(1) + .unwrap(); + let tx = cluster + .network + .validator_fullnode_handle + .test_transaction_builder() + .await + // A split coin that goes nowhere -> execution failure + .move_call( + SUI_FRAMEWORK_PACKAGE_ID, + "coin", + "split", + vec![ + CallArg::Object(ObjectArg::ImmOrOwnedObject(coin)), + CallArg::Pure(bcs::to_bytes(&1000u64).unwrap()), + ], + ) + .with_type_args(vec![GAS::type_tag()]) + .build(); + let tx_bytes = Base64::encode(bcs::to_bytes(&tx).unwrap()); + + let query = r#"{ dryRunTransactionBlock(txBytes: $tx) { results { mutatedReferences { input { @@ -723,47 +721,47 @@ mod tests { error } }"#; - let variables = vec![GraphqlQueryVariable { - name: "tx".to_string(), - ty: "String!".to_string(), - value: json!(tx_bytes), - }]; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, variables, vec![]) - .await - .unwrap(); - let binding = res.response_body().data.clone().into_json().unwrap(); - let res = binding.get("dryRunTransactionBlock").unwrap(); - - // Execution failed so the results are null. - assert!(res.get("results").unwrap().is_null()); - // Check that the error is not null and contains the error message. - assert!(res - .get("error") - .unwrap() - .as_str() - .unwrap() - .contains("UnusedValueWithoutDrop")); - } + let variables = vec![GraphqlQueryVariable { + name: "tx".to_string(), + ty: "String!".to_string(), + value: json!(tx_bytes), + }]; + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, variables, vec![]) + .await + .unwrap(); + let binding = res.response_body().data.clone().into_json().unwrap(); + let res = binding.get("dryRunTransactionBlock").unwrap(); + + // Execution failed so the results are null. + assert!(res.get("results").unwrap().is_null()); + // Check that the error is not null and contains the error message. + assert!(res + .get("error") + .unwrap() + .as_str() + .unwrap() + .contains("UnusedValueWithoutDrop")); +} - #[tokio::test] - async fn test_epoch_data() { - telemetry_subscribers::init_for_testing(); +#[tokio::test] +async fn test_epoch_data() { + telemetry_subscribers::init_for_testing(); - let cluster = start_cluster(ServiceConfig::test_defaults()).await; + let cluster = start_cluster(ServiceConfig::test_defaults()).await; - cluster - .network - .validator_fullnode_handle - .trigger_reconfiguration() - .await; + cluster + .network + .validator_fullnode_handle + .trigger_reconfiguration() + .await; - // Wait for the epoch to be indexed - sleep(Duration::from_secs(10)).await; + // Wait for the epoch to be indexed + sleep(Duration::from_secs(10)).await; - // Query the epoch - let query = " + // Query the epoch + let query = " { epoch(id: 0){ liveObjectSetDigest @@ -771,59 +769,59 @@ mod tests { } "; - let res = cluster - .graphql_client - .execute_to_graphql(query.to_string(), true, vec![], vec![]) - .await - .unwrap(); - - let binding = res.response_body().data.clone().into_json().unwrap(); - - // Check that liveObjectSetDigest is not null - assert!(!binding - .get("epoch") - .unwrap() - .get("liveObjectSetDigest") - .unwrap() - .is_null()); - } + let res = cluster + .graphql_client + .execute_to_graphql(query.to_string(), true, vec![], vec![]) + .await + .unwrap(); + + let binding = res.response_body().data.clone().into_json().unwrap(); + + // Check that liveObjectSetDigest is not null + assert!(!binding + .get("epoch") + .unwrap() + .get("liveObjectSetDigest") + .unwrap() + .is_null()); +} - #[tokio::test] - async fn test_payload_using_vars_mutation_passes() { - telemetry_subscribers::init_for_testing(); - let cluster = sui_graphql_rpc::test_infra::cluster::start_cluster(ServiceConfig { - limits: Limits { - max_query_payload_size: 5000, - max_tx_payload_size: 6000, - ..Default::default() - }, - ..ServiceConfig::test_defaults() - }) - .await; - let addresses = cluster - .network - .validator_fullnode_handle - .wallet - .get_addresses(); - - let recipient = addresses[1]; - let tx = cluster - .network - .validator_fullnode_handle - .test_transaction_builder() - .await - .transfer_sui(Some(1_000), recipient) - .build(); - let signed_tx = cluster - .network - .validator_fullnode_handle - .wallet - .sign_transaction(&tx); - let (tx_bytes, sigs) = signed_tx.to_tx_bytes_and_signatures(); - let tx_bytes = tx_bytes.encoded(); - let sigs = sigs.iter().map(|sig| sig.encoded()).collect::>(); - - let mutation = r#"{ +#[tokio::test] +async fn test_payload_using_vars_mutation_passes() { + telemetry_subscribers::init_for_testing(); + let cluster = sui_graphql_rpc::test_infra::cluster::start_cluster(ServiceConfig { + limits: Limits { + max_query_payload_size: 5000, + max_tx_payload_size: 6000, + ..Default::default() + }, + ..ServiceConfig::test_defaults() + }) + .await; + let addresses = cluster + .network + .validator_fullnode_handle + .wallet + .get_addresses(); + + let recipient = addresses[1]; + let tx = cluster + .network + .validator_fullnode_handle + .test_transaction_builder() + .await + .transfer_sui(Some(1_000), recipient) + .build(); + let signed_tx = cluster + .network + .validator_fullnode_handle + .wallet + .sign_transaction(&tx); + let (tx_bytes, sigs) = signed_tx.to_tx_bytes_and_signatures(); + let tx_bytes = tx_bytes.encoded(); + let sigs = sigs.iter().map(|sig| sig.encoded()).collect::>(); + + let mutation = r#"{ executeTransactionBlock(txBytes: $tx, signatures: $sigs) { effects { transactionBlock { digest } @@ -833,25 +831,24 @@ mod tests { } }"#; - let variables = vec![ - GraphqlQueryVariable { - name: "tx".to_string(), - ty: "String!".to_string(), - value: json!(tx_bytes), - }, - GraphqlQueryVariable { - name: "sigs".to_string(), - ty: "[String!]!".to_string(), - value: json!(sigs), - }, - ]; - - let res = cluster - .graphql_client - .execute_mutation_to_graphql(mutation.to_string(), variables) - .await - .unwrap(); - - assert!(res.errors().is_empty(), "{:#?}", res.errors()); - } + let variables = vec![ + GraphqlQueryVariable { + name: "tx".to_string(), + ty: "String!".to_string(), + value: json!(tx_bytes), + }, + GraphqlQueryVariable { + name: "sigs".to_string(), + ty: "[String!]!".to_string(), + value: json!(sigs), + }, + ]; + + let res = cluster + .graphql_client + .execute_mutation_to_graphql(mutation.to_string(), variables) + .await + .unwrap(); + + assert!(res.errors().is_empty(), "{:#?}", res.errors()); } diff --git a/crates/sui-graphql-rpc/tests/examples_validation_tests.rs b/crates/sui-graphql-rpc/tests/examples_validation_tests.rs index e0599c69405b2..00935cafda0dc 100644 --- a/crates/sui-graphql-rpc/tests/examples_validation_tests.rs +++ b/crates/sui-graphql-rpc/tests/examples_validation_tests.rs @@ -1,197 +1,146 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -#[cfg(feature = "pg_integration")] -mod tests { - use anyhow::{anyhow, Context, Result}; - use std::cmp::max; - use std::collections::BTreeMap; - use std::fs; - use std::path::PathBuf; - use sui_graphql_rpc::config::Limits; - use sui_graphql_rpc::test_infra::cluster::{prep_executor_cluster, ExecutorCluster}; - - struct Example { - contents: String, - path: Option, - } +use anyhow::{anyhow, Context, Result}; +use std::cmp::max; +use std::collections::BTreeMap; +use std::fs; +use std::path::PathBuf; +use sui_graphql_rpc::config::Limits; +use sui_graphql_rpc::test_infra::cluster::{prep_executor_cluster, ExecutorCluster}; + +struct Example { + contents: String, + path: Option, +} - fn good_examples() -> Result> { - let examples = PathBuf::from(&env!("CARGO_MANIFEST_DIR")).join("examples"); - - let mut dirs = vec![examples.clone()]; - let mut queries = BTreeMap::new(); - while let Some(dir) = dirs.pop() { - let entries = - fs::read_dir(&dir).with_context(|| format!("Looking in {}", dir.display()))?; - - for entry in entries { - let entry = entry.with_context(|| format!("Entry in {}", dir.display()))?; - let path = entry.path(); - let typ_ = entry - .file_type() - .with_context(|| format!("Metadata for {}", path.display()))?; - - if typ_.is_dir() { - dirs.push(entry.path()); - continue; - } - - if path.ends_with(".graphql") { - let contents = fs::read_to_string(&path) - .with_context(|| format!("Reading {}", path.display()))?; - - let rel_path = path - .strip_prefix(&examples) - .with_context(|| format!("Generating name from {}", path.display()))? - .with_extension(""); - - let name = rel_path - .to_str() - .ok_or_else(|| anyhow!("Generating name from {}", path.display()))?; - - queries.insert( - name.to_string(), - Example { - contents, - path: Some(path), - }, - ); - } +fn good_examples() -> Result> { + let examples = PathBuf::from(&env!("CARGO_MANIFEST_DIR")).join("examples"); + + let mut dirs = vec![examples.clone()]; + let mut queries = BTreeMap::new(); + while let Some(dir) = dirs.pop() { + let entries = + fs::read_dir(&dir).with_context(|| format!("Looking in {}", dir.display()))?; + + for entry in entries { + let entry = entry.with_context(|| format!("Entry in {}", dir.display()))?; + let path = entry.path(); + let typ_ = entry + .file_type() + .with_context(|| format!("Metadata for {}", path.display()))?; + + if typ_.is_dir() { + dirs.push(entry.path()); + continue; } - } - - Ok(queries) - } - - fn bad_examples() -> BTreeMap { - BTreeMap::from_iter([ - ( - "multiple_queries".to_string(), - Example { - contents: "{ chainIdentifier } { chainIdentifier }".to_string(), - path: None, - }, - ), - ( - "malformed".to_string(), - Example { - contents: "query { }}".to_string(), - path: None, - }, - ), - ( - "invalid".to_string(), - Example { - contents: "djewfbfo".to_string(), - path: None, - }, - ), - ( - "empty".to_string(), - Example { - contents: " ".to_string(), - path: None, - }, - ), - ]) - } - async fn test_query( - cluster: &ExecutorCluster, - name: &str, - query: &Example, - max_nodes: &mut u64, - max_output_nodes: &mut u64, - max_depth: &mut u64, - max_payload: &mut u64, - ) -> Vec { - let resp = cluster - .graphql_client - .execute_to_graphql(query.contents.clone(), true, vec![], vec![]) - .await - .unwrap(); - - let errors = resp.errors(); - if errors.is_empty() { - let usage = resp - .usage() - .expect("Usage not found") - .expect("Usage not found"); - *max_nodes = max(*max_nodes, usage["inputNodes"]); - *max_output_nodes = max(*max_output_nodes, usage["outputNodes"]); - *max_depth = max(*max_depth, usage["depth"]); - *max_payload = max(*max_payload, usage["queryPayload"]); - return vec![]; + if path.ends_with(".graphql") { + let contents = fs::read_to_string(&path) + .with_context(|| format!("Reading {}", path.display()))?; + + let rel_path = path + .strip_prefix(&examples) + .with_context(|| format!("Generating name from {}", path.display()))? + .with_extension(""); + + let name = rel_path + .to_str() + .ok_or_else(|| anyhow!("Generating name from {}", path.display()))?; + + queries.insert( + name.to_string(), + Example { + contents, + path: Some(path), + }, + ); + } } - - errors - .into_iter() - .map(|e| match &query.path { - Some(p) => format!("Query {name:?} at {} failed: {e}", p.display()), - None => format!("Query {name:?} failed: {e}"), - }) - .collect() } - #[tokio::test] - async fn good_examples_within_limits() { - let cluster = prep_executor_cluster().await; - let (mut max_nodes, mut max_output_nodes, mut max_depth, mut max_payload) = (0, 0, 0, 0); - - let mut errors = vec![]; - for (name, example) in good_examples().expect("Could not load examples") { - errors.extend( - test_query( - &cluster, - &name, - &example, - &mut max_nodes, - &mut max_output_nodes, - &mut max_depth, - &mut max_payload, - ) - .await, - ); - } + Ok(queries) +} - // Check that our examples can run with our usage limits - let default_config = Limits::default(); - assert!( - max_nodes <= default_config.max_query_nodes as u64, - "Max nodes {} exceeds default limit {}", - max_nodes, - default_config.max_query_nodes - ); - assert!( - max_output_nodes <= default_config.max_output_nodes as u64, - "Max output nodes {} exceeds default limit {}", - max_output_nodes, - default_config.max_output_nodes - ); - assert!( - max_depth <= default_config.max_query_depth as u64, - "Max depth {} exceeds default limit {}", - max_depth, - default_config.max_query_depth - ); - assert!( - max_payload <= default_config.max_query_payload_size as u64, - "Max payload {} exceeds default limit {}", - max_payload, - default_config.max_query_payload_size - ); +fn bad_examples() -> BTreeMap { + BTreeMap::from_iter([ + ( + "multiple_queries".to_string(), + Example { + contents: "{ chainIdentifier } { chainIdentifier }".to_string(), + path: None, + }, + ), + ( + "malformed".to_string(), + Example { + contents: "query { }}".to_string(), + path: None, + }, + ), + ( + "invalid".to_string(), + Example { + contents: "djewfbfo".to_string(), + path: None, + }, + ), + ( + "empty".to_string(), + Example { + contents: " ".to_string(), + path: None, + }, + ), + ]) +} - assert!(errors.is_empty(), "\n{}", errors.join("\n\n")); +async fn test_query( + cluster: &ExecutorCluster, + name: &str, + query: &Example, + max_nodes: &mut u64, + max_output_nodes: &mut u64, + max_depth: &mut u64, + max_payload: &mut u64, +) -> Vec { + let resp = cluster + .graphql_client + .execute_to_graphql(query.contents.clone(), true, vec![], vec![]) + .await + .unwrap(); + + let errors = resp.errors(); + if errors.is_empty() { + let usage = resp + .usage() + .expect("Usage not found") + .expect("Usage not found"); + *max_nodes = max(*max_nodes, usage["inputNodes"]); + *max_output_nodes = max(*max_output_nodes, usage["outputNodes"]); + *max_depth = max(*max_depth, usage["depth"]); + *max_payload = max(*max_payload, usage["queryPayload"]); + return vec![]; } - #[tokio::test] - async fn bad_examples_fail() { - let cluster = prep_executor_cluster().await; - let (mut max_nodes, mut max_output_nodes, mut max_depth, mut max_payload) = (0, 0, 0, 0); + errors + .into_iter() + .map(|e| match &query.path { + Some(p) => format!("Query {name:?} at {} failed: {e}", p.display()), + None => format!("Query {name:?} failed: {e}"), + }) + .collect() +} + +#[tokio::test] +async fn good_examples_within_limits() { + let cluster = prep_executor_cluster().await; + let (mut max_nodes, mut max_output_nodes, mut max_depth, mut max_payload) = (0, 0, 0, 0); - for (name, example) in bad_examples() { - let errors = test_query( + let mut errors = vec![]; + for (name, example) in good_examples().expect("Could not load examples") { + errors.extend( + test_query( &cluster, &name, &example, @@ -200,9 +149,57 @@ mod tests { &mut max_depth, &mut max_payload, ) - .await; + .await, + ); + } - assert!(!errors.is_empty(), "Query {name:?} should have failed"); - } + // Check that our examples can run with our usage limits + let default_config = Limits::default(); + assert!( + max_nodes <= default_config.max_query_nodes as u64, + "Max nodes {} exceeds default limit {}", + max_nodes, + default_config.max_query_nodes + ); + assert!( + max_output_nodes <= default_config.max_output_nodes as u64, + "Max output nodes {} exceeds default limit {}", + max_output_nodes, + default_config.max_output_nodes + ); + assert!( + max_depth <= default_config.max_query_depth as u64, + "Max depth {} exceeds default limit {}", + max_depth, + default_config.max_query_depth + ); + assert!( + max_payload <= default_config.max_query_payload_size as u64, + "Max payload {} exceeds default limit {}", + max_payload, + default_config.max_query_payload_size + ); + + assert!(errors.is_empty(), "\n{}", errors.join("\n\n")); +} + +#[tokio::test] +async fn bad_examples_fail() { + let cluster = prep_executor_cluster().await; + let (mut max_nodes, mut max_output_nodes, mut max_depth, mut max_payload) = (0, 0, 0, 0); + + for (name, example) in bad_examples() { + let errors = test_query( + &cluster, + &name, + &example, + &mut max_nodes, + &mut max_output_nodes, + &mut max_depth, + &mut max_payload, + ) + .await; + + assert!(!errors.is_empty(), "Query {name:?} should have failed"); } } diff --git a/crates/sui-graphql-rpc/tests/snapshots/snapshot_tests__schema_sdl_export.snap b/crates/sui-graphql-rpc/tests/snapshots/snapshot_tests__schema_sdl_export.snap index 7b781d6e1de02..9117563abc0a5 100644 --- a/crates/sui-graphql-rpc/tests/snapshots/snapshot_tests__schema_sdl_export.snap +++ b/crates/sui-graphql-rpc/tests/snapshots/snapshot_tests__schema_sdl_export.snap @@ -1268,7 +1268,13 @@ type EventEdge { } input EventFilter { + """ + Filter down to events from transactions sent by this address. + """ sender: SuiAddress + """ + Filter down to the events from this transaction (given by its transaction digest). + """ transactionDigest: String """ Events emitted by a particular module. An event is emitted by a @@ -2840,11 +2846,8 @@ objects are ones whose """ input ObjectFilter { """ - This field is used to specify the type of objects that should be included in the query - results. - - Objects can be filtered by their type's package, package::module, or their fully qualified - type name. + Filter objects by their type's `package`, `package::module`, or their fully qualified type + name. Generic types can be queried by either the generic type name, e.g. `0x2::coin::Coin`, or by the full type name, such as `0x2::coin::Coin<0x2::sui::SUI>`. @@ -4292,18 +4295,46 @@ type TransactionBlockEffects { } input TransactionBlockFilter { + """ + Filter transactions by move function called. Calls can be filtered by the `package`, + `package::module`, or the `package::module::name` of their function. + """ function: String """ An input filter selecting for either system or programmable transactions. """ kind: TransactionBlockKindInput + """ + Limit to transactions that occured strictly after the given checkpoint. + """ afterCheckpoint: UInt53 + """ + Limit to transactions in the given checkpoint. + """ atCheckpoint: UInt53 + """ + Limit to transaction that occured strictly before the given checkpoint. + """ beforeCheckpoint: UInt53 + """ + Limit to transactions that were signed by the given address. + """ signAddress: SuiAddress + """ + Limit to transactions that sent an object to the given address. + """ recvAddress: SuiAddress + """ + Limit to transactions that accepted the given object as an input. + """ inputObject: SuiAddress + """ + Limit to transactions that output a versioon of this object. + """ changedObject: SuiAddress + """ + Select transactions by their digest. + """ transactionIds: [String!] } diff --git a/crates/sui-indexer-builder/src/indexer_builder.rs b/crates/sui-indexer-builder/src/indexer_builder.rs index f37cab7d58c3c..0c05900f1bc54 100644 --- a/crates/sui-indexer-builder/src/indexer_builder.rs +++ b/crates/sui-indexer-builder/src/indexer_builder.rs @@ -18,6 +18,7 @@ type CheckpointData = (u64, Vec); pub type DataSender = metered_channel::Sender>; const INGESTION_BATCH_SIZE: usize = 100; +const RETRIEVED_CHECKPOINT_CHANNEL_SIZE: usize = 10000; pub struct IndexerBuilder { name: String, @@ -115,9 +116,7 @@ impl Indexer { let live_task_future = match ongoing_tasks.live_task() { Some(live_task) if !self.disable_live_task => { let live_task_future = self.datasource.start_ingestion_task( - live_task.task_name.clone(), - live_task.checkpoint, - live_task.target_checkpoint, + live_task, self.storage.clone(), self.data_mapper.clone(), ); @@ -126,7 +125,7 @@ impl Indexer { _ => None, }; - let backfill_tasks = ongoing_tasks.backfill_tasks(); + let backfill_tasks = ongoing_tasks.backfill_tasks_ordered_desc(); let storage_clone = self.storage.clone(); let data_mapper_clone = self.data_mapper.clone(); let datasource_clone = self.datasource.clone(); @@ -134,12 +133,10 @@ impl Indexer { let handle = spawn_monitored_task!(async { // Execute tasks one by one for backfill_task in backfill_tasks { - if backfill_task.checkpoint < backfill_task.target_checkpoint { + if backfill_task.start_checkpoint < backfill_task.target_checkpoint { datasource_clone .start_ingestion_task( - backfill_task.task_name.clone(), - backfill_task.checkpoint, - backfill_task.target_checkpoint, + backfill_task, storage_clone.clone(), data_mapper_clone.clone(), ) @@ -180,12 +177,18 @@ impl Indexer { match ongoing_tasks.live_task() { None => { self.storage - .register_task( + .register_live_task( format!("{} - Live", self.name), live_task_from_checkpoint, - i64::MAX as u64, ) .await + .tap_ok(|_| { + tracing::info!( + task_name = self.name.as_str(), + "Created live task from {}", + live_task_from_checkpoint, + ); + }) .tap_err(|e| { tracing::error!( "Failed to register live task ({}-MAX): {:?}", @@ -198,15 +201,27 @@ impl Indexer { // We still check this because in the case of slow // block generation (e.g. Ethereum), it's possible we will // stay on the same block for a bit. - if live_task_from_checkpoint != live_task.checkpoint { - live_task.checkpoint = live_task_from_checkpoint; - self.storage.update_task(live_task).await.tap_err(|e| { - tracing::error!( - "Failed to update live task to ({}-MAX): {:?}", - live_task_from_checkpoint, - e - ); - })?; + if live_task_from_checkpoint != live_task.start_checkpoint { + let old_checkpoint = live_task.start_checkpoint; + live_task.start_checkpoint = live_task_from_checkpoint; + self.storage + .update_task(live_task) + .await + .tap_ok(|_| { + tracing::info!( + task_name = self.name.as_str(), + "Updated live task starting point from {} to {}", + old_checkpoint, + live_task_from_checkpoint, + ); + }) + .tap_err(|e| { + tracing::error!( + "Failed to update live task to ({}-MAX): {:?}", + live_task_from_checkpoint, + e + ); + })?; } } } @@ -305,13 +320,17 @@ pub trait Persistent: IndexerProgressStore + Sync + Send + Clone { #[async_trait] pub trait IndexerProgressStore: Send { async fn load_progress(&self, task_name: String) -> anyhow::Result; + /// Attempt to save progress. Depending on the `ProgressSavingPolicy`, + /// the progress may be cached somewhere instead of flushing to persistent storage. + /// Returns saved checkpoint number if any. Caller can use this value as a signal + /// to see if we have reached the target checkpoint. async fn save_progress( &mut self, - task_name: String, - checkpoint_number: u64, - ) -> anyhow::Result<()>; + task: &Task, + checkpoint_numbers: &[u64], + ) -> anyhow::Result>; - async fn get_ongoing_tasks(&self, task_prefix: &str) -> Result, Error>; + async fn get_ongoing_tasks(&self, task_prefix: &str) -> Result; async fn get_largest_backfill_task_target_checkpoint( &self, @@ -321,10 +340,16 @@ pub trait IndexerProgressStore: Send { async fn register_task( &mut self, task_name: String, - checkpoint: u64, + start_checkpoint: u64, target_checkpoint: u64, ) -> Result<(), anyhow::Error>; + async fn register_live_task( + &mut self, + task_name: String, + start_checkpoint: u64, + ) -> Result<(), anyhow::Error>; + async fn update_task(&mut self, task: Task) -> Result<(), Error>; } @@ -332,9 +357,7 @@ pub trait IndexerProgressStore: Send { pub trait Datasource: Sync + Send { async fn start_ingestion_task( &self, - task_name: String, - starting_checkpoint: u64, - target_checkpoint: u64, + task: Task, mut storage: P, data_mapper: M, ) -> Result<(), Error> @@ -342,62 +365,61 @@ pub trait Datasource: Sync + Send { M: DataMapper, P: Persistent, { + let task_name = task.task_name.clone(); + let task_name_prefix = task.name_prefix(); + let task_type_label = task.type_str(); + let starting_checkpoint = task.start_checkpoint; + let target_checkpoint = task.target_checkpoint; let ingestion_batch_size = std::env::var("INGESTION_BATCH_SIZE") .unwrap_or(INGESTION_BATCH_SIZE.to_string()) .parse::() .unwrap(); + let checkpoint_channel_size = std::env::var("RETRIEVED_CHECKPOINT_CHANNEL_SIZE") + .unwrap_or(RETRIEVED_CHECKPOINT_CHANNEL_SIZE.to_string()) + .parse::() + .unwrap(); tracing::info!( task_name, ingestion_batch_size, + checkpoint_channel_size, "Starting ingestion task ({}-{})", starting_checkpoint, target_checkpoint, ); - let is_live_task = target_checkpoint == i64::MAX as u64; let (data_sender, data_rx) = metered_channel::channel( - 1000, + checkpoint_channel_size, &mysten_metrics::get_metrics() .unwrap() .channel_inflight - .with_label_values(&[&task_name]), + // This metric works now when there is only 1 backfill task running per task name. + // It will be unusable when there are parallel backfill tasks per task name. + .with_label_values(&[&format!("{}-{}", task_name_prefix, task_type_label)]), ); - let join_handle = self - .start_data_retrieval(starting_checkpoint, target_checkpoint, data_sender) - .await?; - + let is_live_task = task.is_live_task; + let join_handle = self.start_data_retrieval(task.clone(), data_sender).await?; let processed_checkpoints_metrics = self .get_tasks_processed_checkpoints_metric() - .with_label_values(&[&task_name]); + .with_label_values(&[task_name_prefix, task_type_label]); // track remaining checkpoints per task, except for live task let remaining_checkpoints_metric = if !is_live_task { let remaining = self .get_tasks_remaining_checkpoints_metric() - .with_label_values(&[&task_name]); + .with_label_values(&[task_name_prefix]); remaining.set((target_checkpoint - starting_checkpoint + 1) as i64); Some(remaining) } else { None }; - // track current checkpoint for live task - let live_task_current_checkpoint_metrics = if is_live_task { - let m = self - .get_live_task_checkpoint_metric() - .with_label_values(&[&task_name]); - m.set((starting_checkpoint) as i64); - Some(m) - } else { - None - }; let mut stream = mysten_metrics::metered_channel::ReceiverStream::new(data_rx) .ready_chunks(ingestion_batch_size); - + let mut last_saved_checkpoint = None; while let Some(batch) = stream.next().await { - // unwrap safe: at least 1 element in the batch let mut max_height = 0; + let mut heights = vec![]; let mut data = vec![]; - let mut batch_size = 0; for (height, d) in batch { + // Filter out data with height > target_checkpoint, in case data source returns any if height > target_checkpoint { tracing::warn!( task_name, @@ -407,36 +429,52 @@ pub trait Datasource: Sync + Send { continue; } max_height = std::cmp::max(max_height, height); - batch_size += 1; + heights.push(height); data.extend(d); } tracing::debug!( task_name, max_height, "Ingestion task received {} blocks.", - batch_size, + heights.len(), ); let timer = tokio::time::Instant::now(); if !data.is_empty() { + let timer = tokio::time::Instant::now(); let processed_data = data.into_iter().try_fold(vec![], |mut result, d| { result.append(&mut data_mapper.map(d)?); Ok::, Error>(result) })?; + tracing::debug!( + task_name, + max_height, + "Data mapper processed {} blocks in {}ms.", + heights.len(), + timer.elapsed().as_millis(), + ); + let timer = tokio::time::Instant::now(); // TODO: batch write data // TODO: we might be able to write data and progress in a single transaction. storage.write(processed_data).await?; + tracing::debug!( + task_name, + max_height, + "Processed data ({} blocks) was wrote to storage in {}ms.", + heights.len(), + timer.elapsed().as_millis(), + ); } - // TODO: batch progress - storage.save_progress(task_name.clone(), max_height).await?; + last_saved_checkpoint = storage.save_progress(&task, &heights).await?; tracing::debug!( task_name, max_height, + last_saved_checkpoint, "Ingestion task processed {} blocks in {}ms", - batch_size, + heights.len(), timer.elapsed().as_millis(), ); - processed_checkpoints_metrics.inc_by(batch_size as u64); + processed_checkpoints_metrics.inc_by(heights.len() as u64); if let Some(m) = &remaining_checkpoints_metric { // Note this is only approximate as the data may come in out of order m.set(std::cmp::max( @@ -444,16 +482,32 @@ pub trait Datasource: Sync + Send { 0, )); } - if let Some(m) = &live_task_current_checkpoint_metrics { - m.set((max_height) as i64) - } - if max_height > target_checkpoint { - break; + // If we have reached the target checkpoint, exit proactively + if let Some(cp) = last_saved_checkpoint { + if cp >= target_checkpoint { + // Task is done + break; + } } } if is_live_task { // Live task should never exit, except in unit tests - tracing::error!(task_name, "Live task exiting"); + tracing::error!(task_name, "Live task exiting unexpectedly"); + } else if let Some(last_saved_checkpoint) = last_saved_checkpoint { + if last_saved_checkpoint < target_checkpoint { + tracing::error!( + task_name, + last_saved_checkpoint, + "Task exiting before reaching target checkpoint", + ); + } else { + tracing::info!(task_name, "Backfill task is done, exiting"); + } + } else { + tracing::error!( + task_name, + "Task exiting unexpectedly with no progress saved" + ); } join_handle.abort(); if let Some(m) = &remaining_checkpoints_metric { @@ -464,8 +518,7 @@ pub trait Datasource: Sync + Send { async fn start_data_retrieval( &self, - starting_checkpoint: u64, - target_checkpoint: u64, + task: Task, data_sender: DataSender, ) -> Result>, Error>; @@ -476,8 +529,6 @@ pub trait Datasource: Sync + Send { fn get_tasks_remaining_checkpoints_metric(&self) -> &IntGaugeVec; fn get_tasks_processed_checkpoints_metric(&self) -> &IntCounterVec; - - fn get_live_task_checkpoint_metric(&self) -> &IntGaugeVec; } pub enum BackfillStrategy { diff --git a/crates/sui-indexer-builder/src/lib.rs b/crates/sui-indexer-builder/src/lib.rs index af00dde516ac0..679f335a280cf 100644 --- a/crates/sui-indexer-builder/src/lib.rs +++ b/crates/sui-indexer-builder/src/lib.rs @@ -2,33 +2,67 @@ // SPDX-License-Identifier: Apache-2.0 pub mod indexer_builder; +pub mod progress; + +pub const LIVE_TASK_TARGET_CHECKPOINT: i64 = i64::MAX; #[derive(Clone, Debug)] pub struct Task { pub task_name: String, - pub checkpoint: u64, + pub start_checkpoint: u64, pub target_checkpoint: u64, pub timestamp: u64, + pub is_live_task: bool, } -pub trait Tasks { - fn live_task(&self) -> Option; +impl Task { + // TODO: this is really fragile and we should fix the task naming thing and storage schema asasp + pub fn name_prefix(&self) -> &str { + self.task_name.split(' ').next().unwrap_or("Unknown") + } + + pub fn type_str(&self) -> &str { + if self.is_live_task { + "live" + } else { + "backfill" + } + } +} - fn backfill_tasks(&self) -> Vec; +#[derive(Clone, Debug)] +pub struct Tasks { + live_task: Option, + backfill_tasks: Vec, } -impl Tasks for Vec { - fn live_task(&self) -> Option { - // TODO: Change the schema to record live task properly. - self.iter() - .find(|t| t.target_checkpoint == i64::MAX as u64) - .cloned() +impl Tasks { + pub fn new(tasks: Vec) -> anyhow::Result { + let mut live_tasks = vec![]; + let mut backfill_tasks = vec![]; + for task in tasks { + if task.is_live_task { + live_tasks.push(task); + } else { + backfill_tasks.push(task); + } + } + if live_tasks.len() > 1 { + anyhow::bail!("More than one live task found: {:?}", live_tasks); + } + Ok(Self { + live_task: live_tasks.pop(), + backfill_tasks, + }) + } + + pub fn live_task(&self) -> Option { + self.live_task.clone() } - fn backfill_tasks(&self) -> Vec { - self.iter() - .filter(|t| t.target_checkpoint != i64::MAX as u64) - .cloned() - .collect() + pub fn backfill_tasks_ordered_desc(&self) -> Vec { + let mut tasks = self.backfill_tasks.clone(); + tasks.sort_by(|t1, t2| t2.start_checkpoint.cmp(&t1.start_checkpoint)); + tasks } } diff --git a/crates/sui-indexer-builder/src/progress.rs b/crates/sui-indexer-builder/src/progress.rs new file mode 100644 index 0000000000000..4b53a41fcba7c --- /dev/null +++ b/crates/sui-indexer-builder/src/progress.rs @@ -0,0 +1,249 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use std::collections::{HashMap, HashSet}; +use std::sync::{Arc, Mutex}; + +use crate::Task; + +#[derive(Debug, Clone)] +pub enum ProgressSavingPolicy { + SaveAfterDuration(SaveAfterDurationPolicy), + OutOfOrderSaveAfterDuration(OutOfOrderSaveAfterDurationPolicy), +} + +#[derive(Debug, Clone)] +pub struct SaveAfterDurationPolicy { + duration: tokio::time::Duration, + last_save_time: Arc>>>, +} + +impl SaveAfterDurationPolicy { + pub fn new(duration: tokio::time::Duration) -> Self { + Self { + duration, + last_save_time: Arc::new(Mutex::new(HashMap::new())), + } + } +} + +#[derive(Debug, Clone)] +pub struct OutOfOrderSaveAfterDurationPolicy { + duration: tokio::time::Duration, + last_save_time: Arc>>>, + seen: Arc>>>, + next_to_fill: Arc>>>, +} + +impl OutOfOrderSaveAfterDurationPolicy { + pub fn new(duration: tokio::time::Duration) -> Self { + Self { + duration, + last_save_time: Arc::new(Mutex::new(HashMap::new())), + seen: Arc::new(Mutex::new(HashMap::new())), + next_to_fill: Arc::new(Mutex::new(HashMap::new())), + } + } +} + +impl ProgressSavingPolicy { + /// If returns Some(progress), it means we should save the progress to DB. + pub fn cache_progress(&mut self, task: &Task, heights: &[u64]) -> Option { + let task_name = task.task_name.clone(); + let start_height = task.start_checkpoint; + let target_height = task.target_checkpoint; + match self { + ProgressSavingPolicy::SaveAfterDuration(policy) => { + let height = *heights.iter().max().unwrap(); + let mut last_save_time_guard = policy.last_save_time.lock().unwrap(); + let last_save_time = last_save_time_guard.entry(task_name).or_insert(None); + if height >= target_height { + *last_save_time = Some(tokio::time::Instant::now()); + return Some(height); + } + if let Some(v) = last_save_time { + if v.elapsed() >= policy.duration { + *last_save_time = Some(tokio::time::Instant::now()); + Some(height) + } else { + None + } + } else { + // update `last_save_time` to now but don't actually save progress + *last_save_time = Some(tokio::time::Instant::now()); + None + } + } + ProgressSavingPolicy::OutOfOrderSaveAfterDuration(policy) => { + let mut next_to_fill = { + let mut next_to_fill_guard = policy.next_to_fill.lock().unwrap(); + (*next_to_fill_guard + .entry(task_name.clone()) + .or_insert(Some(start_height))) + .unwrap() + }; + let old_next_to_fill = next_to_fill; + { + let mut seen_guard = policy.seen.lock().unwrap(); + let seen = seen_guard + .entry(task_name.clone()) + .or_insert(HashSet::new()); + seen.extend(heights.iter().cloned()); + while seen.remove(&next_to_fill) { + next_to_fill += 1; + } + } + // We made some progress in filling gaps + if old_next_to_fill != next_to_fill { + policy + .next_to_fill + .lock() + .unwrap() + .insert(task_name.clone(), Some(next_to_fill)); + } + + let mut last_save_time_guard = policy.last_save_time.lock().unwrap(); + let last_save_time = last_save_time_guard + .entry(task_name.clone()) + .or_insert(None); + + // If we have reached the target height, we always save + if next_to_fill > target_height { + *last_save_time = Some(tokio::time::Instant::now()); + return Some(next_to_fill - 1); + } + // Regardless of whether we made progress, we should save if we have waited long enough + if let Some(v) = last_save_time { + if v.elapsed() >= policy.duration && next_to_fill > start_height { + *last_save_time = Some(tokio::time::Instant::now()); + Some(next_to_fill - 1) + } else { + None + } + } else { + // update `last_save_time` to now but don't actually save progress + *last_save_time = Some(tokio::time::Instant::now()); + None + } + } + } + } +} + +#[cfg(test)] +mod tests { + + use super::*; + + #[tokio::test] + async fn test_save_after_duration_policy() { + let duration = tokio::time::Duration::from_millis(100); + let mut policy = + ProgressSavingPolicy::SaveAfterDuration(SaveAfterDurationPolicy::new(duration)); + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[1]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[2]), + Some(2) + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[3]), + Some(3) + ); + + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[4]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[5, 6]), + Some(6) + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[8, 7]), + Some(8) + ); + } + + #[tokio::test] + async fn test_out_of_order_save_after_duration_policy() { + let duration = tokio::time::Duration::from_millis(100); + let mut policy = ProgressSavingPolicy::OutOfOrderSaveAfterDuration( + OutOfOrderSaveAfterDurationPolicy::new(duration), + ); + + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[0]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[1]), + Some(1) + ); + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[3]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[4]), + Some(1) + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task1", 0, 100), &[2]), + Some(4) + ); + + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[0]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[1]), + Some(1) + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[2]), + Some(2) + ); + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[3]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[4]), + Some(4) + ); + + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[6, 7, 8]), + None + ); + tokio::time::sleep(duration).await; + assert_eq!( + policy.cache_progress(&new_test_task("task2", 0, 100), &[5, 9]), + Some(9) + ); + } + + fn new_test_task(name: &str, start: u64, target: u64) -> Task { + Task { + task_name: name.to_string(), + start_checkpoint: start, + target_checkpoint: target, + timestamp: 0, + is_live_task: false, + } + } +} diff --git a/crates/sui-indexer-builder/tests/indexer_test_utils.rs b/crates/sui-indexer-builder/tests/indexer_test_utils.rs index 0f9040ca93b4c..8d8d47be92274 100644 --- a/crates/sui-indexer-builder/tests/indexer_test_utils.rs +++ b/crates/sui-indexer-builder/tests/indexer_test_utils.rs @@ -16,7 +16,7 @@ use mysten_metrics::spawn_monitored_task; use sui_indexer_builder::indexer_builder::{ DataMapper, DataSender, Datasource, IndexerProgressStore, Persistent, }; -use sui_indexer_builder::Task; +use sui_indexer_builder::{Task, Tasks, LIVE_TASK_TARGET_CHECKPOINT}; pub struct TestDatasource { pub data: Vec, @@ -33,14 +33,13 @@ where { async fn start_data_retrieval( &self, - starting_checkpoint: u64, - _target_checkpoint: u64, + task: Task, data_sender: DataSender, ) -> Result>, Error> { let data_clone = self.data.clone(); Ok(spawn_monitored_task!(async { - let mut cp = starting_checkpoint; + let mut cp = task.start_checkpoint; while cp < data_clone.len() as u64 { data_sender .send((cp, vec![data_clone[cp as usize].clone()])) @@ -68,11 +67,6 @@ where // This is dummy &self.counter_metric } - - fn get_live_task_checkpoint_metric(&self) -> &IntGaugeVec { - // This is dummy - &self.gauge_metric - } } #[derive(Clone, Debug, Default)] @@ -99,7 +93,7 @@ impl InMemoryPersistent { .filter(|task| task.task_name.starts_with(task_prefix)) .cloned() .collect::>(); - tasks.sort_by(|t1, t2| t2.checkpoint.cmp(&t1.checkpoint)); + tasks.sort_by(|t1, t2| t2.start_checkpoint.cmp(&t1.start_checkpoint)); Ok(tasks) } } @@ -113,35 +107,35 @@ impl IndexerProgressStore for InMemoryPersistent { .await .get(&task_name) .unwrap() - .checkpoint) + .start_checkpoint) } async fn save_progress( &mut self, - task_name: String, - checkpoint_number: u64, - ) -> anyhow::Result<()> { + task: &Task, + checkpoint_numbers: &[u64], + ) -> anyhow::Result> { + let checkpoint_number = *checkpoint_numbers.last().unwrap(); self.progress_store .lock() .await - .get_mut(&task_name) + .get_mut(&task.task_name) .unwrap() - .checkpoint = checkpoint_number; - Ok(()) + .start_checkpoint = checkpoint_number; + Ok(Some(checkpoint_number)) } - async fn get_ongoing_tasks(&self, task_prefix: &str) -> Result, Error> { - let mut tasks = self + async fn get_ongoing_tasks(&self, task_prefix: &str) -> Result { + let tasks = self .progress_store .lock() .await .values() .filter(|task| task.task_name.starts_with(task_prefix)) - .filter(|task| task.checkpoint.lt(&task.target_checkpoint)) + .filter(|task| task.start_checkpoint.lt(&task.target_checkpoint)) .cloned() .collect::>(); - tasks.sort_by(|t1, t2| t2.checkpoint.cmp(&t1.checkpoint)); - Ok(tasks) + Tasks::new(tasks) } async fn get_largest_backfill_task_target_checkpoint( @@ -169,9 +163,31 @@ impl IndexerProgressStore for InMemoryPersistent { task_name.clone(), Task { task_name: task_name.clone(), - checkpoint, + start_checkpoint: checkpoint, target_checkpoint, timestamp: SystemTime::now().duration_since(UNIX_EPOCH)?.as_millis() as u64, + is_live_task: false, + }, + ); + if existing.is_some() { + return Err(anyhow!("Task {task_name} already exists")); + } + Ok(()) + } + + async fn register_live_task( + &mut self, + task_name: String, + checkpoint: u64, + ) -> Result<(), Error> { + let existing = self.progress_store.lock().await.insert( + task_name.clone(), + Task { + task_name: task_name.clone(), + start_checkpoint: checkpoint, + target_checkpoint: LIVE_TASK_TARGET_CHECKPOINT as u64, + timestamp: SystemTime::now().duration_since(UNIX_EPOCH)?.as_millis() as u64, + is_live_task: true, }, ); if existing.is_some() { diff --git a/crates/sui-indexer-builder/tests/indexer_tests.rs b/crates/sui-indexer-builder/tests/indexer_tests.rs index 8beeddcb5b74b..5cbfdb4da4828 100644 --- a/crates/sui-indexer-builder/tests/indexer_tests.rs +++ b/crates/sui-indexer-builder/tests/indexer_tests.rs @@ -7,7 +7,7 @@ use prometheus::{ IntGaugeVec, Registry, }; use sui_indexer_builder::indexer_builder::{BackfillStrategy, IndexerBuilder}; -use sui_indexer_builder::Task; +use sui_indexer_builder::{Task, LIVE_TASK_TARGET_CHECKPOINT}; mod indexer_test_utils; @@ -119,9 +119,10 @@ async fn indexer_partitioned_task_with_data_already_in_db_test1() { "test_indexer - backfill - 1".to_string(), Task { task_name: "test_indexer - backfill - 1".to_string(), - checkpoint: 30, + start_checkpoint: 30, target_checkpoint: 30, timestamp: 0, + is_live_task: false, }, ); let mut indexer = IndexerBuilder::new( @@ -170,9 +171,10 @@ async fn indexer_partitioned_task_with_data_already_in_db_test2() { "test_indexer - backfill - 1".to_string(), Task { task_name: "test_indexer - backfill - 1".to_string(), - checkpoint: 30, + start_checkpoint: 30, target_checkpoint: 30, timestamp: 0, + is_live_task: false, }, ); let mut indexer = IndexerBuilder::new( @@ -222,18 +224,20 @@ async fn indexer_partitioned_task_with_data_already_in_db_test3() { "test_indexer - backfill - 20:30".to_string(), Task { task_name: "test_indexer - backfill - 20:30".to_string(), - checkpoint: 30, + start_checkpoint: 30, target_checkpoint: 30, timestamp: 0, + is_live_task: false, }, ); persistent.progress_store.lock().await.insert( "test_indexer - backfill - 10:19".to_string(), Task { task_name: "test_indexer - backfill - 10:19".to_string(), - checkpoint: 10, + start_checkpoint: 10, target_checkpoint: 19, timestamp: 0, + is_live_task: false, }, ); let mut indexer = IndexerBuilder::new( @@ -278,18 +282,20 @@ async fn indexer_partitioned_task_with_data_already_in_db_test4() { "test_indexer - backfill - 20:30".to_string(), Task { task_name: "test_indexer - backfill - 20:30".to_string(), - checkpoint: 30, + start_checkpoint: 30, target_checkpoint: 30, timestamp: 0, + is_live_task: false, }, ); persistent.progress_store.lock().await.insert( "test_indexer - backfill - 10:19".to_string(), Task { task_name: "test_indexer - backfill - 10:19".to_string(), - checkpoint: 10, + start_checkpoint: 10, target_checkpoint: 19, timestamp: 0, + is_live_task: false, }, ); let mut indexer = IndexerBuilder::new( @@ -338,9 +344,10 @@ async fn indexer_with_existing_live_task1() { "test_indexer - Live".to_string(), Task { task_name: "test_indexer - Live".to_string(), - checkpoint: 30, - target_checkpoint: i64::MAX as u64, + start_checkpoint: 30, + target_checkpoint: LIVE_TASK_TARGET_CHECKPOINT as u64, timestamp: 0, + is_live_task: true, }, ); let mut indexer = IndexerBuilder::new( @@ -383,9 +390,10 @@ async fn indexer_with_existing_live_task2() { "test_indexer - Live".to_string(), Task { task_name: "test_indexer - Live".to_string(), - checkpoint: 30, - target_checkpoint: i64::MAX as u64, + start_checkpoint: 30, + target_checkpoint: LIVE_TASK_TARGET_CHECKPOINT as u64, timestamp: 10, + is_live_task: true, }, ); let mut indexer = IndexerBuilder::new( @@ -414,7 +422,7 @@ fn assert_ranges(desc_ordered_tasks: &[Task], ranges: Vec<(u64, u64)>) { let mut iter = desc_ordered_tasks.iter(); for (start, end) in ranges { let task = iter.next().unwrap(); - assert_eq!(start, task.checkpoint); + assert_eq!(start, task.start_checkpoint); assert_eq!(end, task.target_checkpoint); } } @@ -438,9 +446,10 @@ async fn resume_test() { "test_indexer - backfill - 30".to_string(), Task { task_name: "test_indexer - backfill - 30".to_string(), - checkpoint: 10, + start_checkpoint: 10, target_checkpoint: 30, timestamp: 0, + is_live_task: false, }, ); let mut indexer = IndexerBuilder::new( @@ -475,6 +484,11 @@ fn new_gauge_vec(registry: &Registry) -> IntGaugeVec { } fn new_counter_vec(registry: &Registry) -> IntCounterVec { - register_int_counter_vec_with_registry!("whatever_counter", "whatever", &["whatever"], registry,) - .unwrap() + register_int_counter_vec_with_registry!( + "whatever_counter", + "whatever", + &["whatever1", "whatever2"], + registry, + ) + .unwrap() } diff --git a/crates/sui-indexer/Cargo.toml b/crates/sui-indexer/Cargo.toml index 639a5a976318f..245d47d6fb536 100644 --- a/crates/sui-indexer/Cargo.toml +++ b/crates/sui-indexer/Cargo.toml @@ -16,7 +16,7 @@ bytes.workspace = true chrono.workspace = true clap = { workspace = true, features = ["env"] } csv.workspace = true -diesel = { workspace = true, features = ["postgres", "chrono", "r2d2", "serde_json"] } +diesel = { workspace = true, features = ["chrono", "serde_json"] } diesel-async = { workspace = true, features = ["bb8", "postgres", "async-connection-wrapper"] } bb8 = "0.8.5" futures.workspace = true @@ -63,10 +63,6 @@ move-binary-format.workspace = true diesel_migrations.workspace = true cached.workspace = true -[features] -default = [] -pg_integration = [] - [dev-dependencies] sui-keys.workspace = true sui-move-build.workspace = true diff --git a/crates/sui-indexer/migrations/pg/2023-08-19-044023_objects/up.sql b/crates/sui-indexer/migrations/pg/2023-08-19-044023_objects/up.sql index 5909f26eed777..e2fb5cabbf372 100644 --- a/crates/sui-indexer/migrations/pg/2023-08-19-044023_objects/up.sql +++ b/crates/sui-indexer/migrations/pg/2023-08-19-044023_objects/up.sql @@ -102,5 +102,6 @@ CREATE INDEX objects_snapshot_checkpoint_sequence_number ON objects_snapshot (ch CREATE INDEX objects_snapshot_owner ON objects_snapshot (owner_type, owner_id, object_id) WHERE owner_type BETWEEN 1 AND 2 AND owner_id IS NOT NULL; CREATE INDEX objects_snapshot_coin_owner ON objects_snapshot (owner_id, coin_type, object_id) WHERE coin_type IS NOT NULL AND owner_type = 1; CREATE INDEX objects_snapshot_coin_only ON objects_snapshot (coin_type, object_id) WHERE coin_type IS NOT NULL; -CREATE INDEX objects_snapshot_package_module_name_full_type ON objects_snapshot (object_type_package, object_type_module, object_type_name, object_type); +CREATE INDEX objects_snapshot_type_id ON objects_snapshot (object_type_package, object_type_module, object_type_name, object_type, object_id); +CREATE INDEX objects_snapshot_id_type ON objects_snapshot (object_id, object_type_package, object_type_module, object_type_name, object_type); CREATE INDEX objects_snapshot_owner_package_module_name_full_type ON objects_snapshot (owner_id, object_type_package, object_type_module, object_type_name, object_type); diff --git a/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/down.sql b/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/down.sql new file mode 100644 index 0000000000000..98cc9c0a36ce9 --- /dev/null +++ b/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS tx_affected_addresses; diff --git a/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/up.sql b/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/up.sql new file mode 100644 index 0000000000000..4f71554f1394a --- /dev/null +++ b/crates/sui-indexer/migrations/pg/2024-09-12-150939_tx_affected/up.sql @@ -0,0 +1,9 @@ +CREATE TABLE tx_affected_addresses ( + tx_sequence_number BIGINT NOT NULL, + affected BYTEA NOT NULL, + sender BYTEA NOT NULL, + PRIMARY KEY(affected, tx_sequence_number) +); + +CREATE INDEX tx_affected_addresses_tx_sequence_number_index ON tx_affected_addresses (tx_sequence_number); +CREATE INDEX tx_affected_addresses_sender ON tx_affected_addresses (sender, affected, tx_sequence_number); diff --git a/crates/sui-indexer/src/db.rs b/crates/sui-indexer/src/db.rs index dadd32c035f14..6ad831d331f45 100644 --- a/crates/sui-indexer/src/db.rs +++ b/crates/sui-indexer/src/db.rs @@ -6,11 +6,9 @@ use crate::errors::IndexerError; use clap::Args; use diesel::migration::{Migration, MigrationSource, MigrationVersion}; use diesel::pg::Pg; -use diesel::r2d2::ConnectionManager; -use diesel::r2d2::{Pool, PooledConnection}; +use diesel::table; use diesel::ExpressionMethods; use diesel::QueryDsl; -use diesel::{table, PgConnection}; use diesel_migrations::{embed_migrations, EmbeddedMigrations}; use std::time::Duration; use tracing::info; @@ -24,9 +22,6 @@ table! { const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations/pg"); -pub type ConnectionPool = Pool>; -pub type PoolConnection = PooledConnection>; - #[derive(Args, Debug, Clone)] pub struct ConnectionPoolConfig { #[arg(long, default_value_t = 100)] @@ -86,53 +81,6 @@ pub struct ConnectionConfig { pub read_only: bool, } -impl diesel::r2d2::CustomizeConnection for ConnectionConfig { - fn on_acquire(&self, conn: &mut PgConnection) -> std::result::Result<(), diesel::r2d2::Error> { - use diesel::RunQueryDsl; - - diesel::sql_query(format!( - "SET statement_timeout = {}", - self.statement_timeout.as_millis(), - )) - .execute(conn) - .map_err(diesel::r2d2::Error::QueryError)?; - - if self.read_only { - diesel::sql_query("SET default_transaction_read_only = 't'") - .execute(conn) - .map_err(diesel::r2d2::Error::QueryError)?; - } - Ok(()) - } -} - -pub fn new_connection_pool( - db_url: &str, - config: &ConnectionPoolConfig, -) -> Result { - let manager = ConnectionManager::::new(db_url); - - Pool::builder() - .max_size(config.pool_size) - .connection_timeout(config.connection_timeout) - .connection_customizer(Box::new(config.connection_config())) - .build(manager) - .map_err(|e| { - IndexerError::PgConnectionPoolInitError(format!( - "Failed to initialize connection pool for {db_url} with error: {e:?}" - )) - }) -} - -pub fn get_pool_connection(pool: &ConnectionPool) -> Result { - pool.get().map_err(|e| { - IndexerError::PgPoolConnectionError(format!( - "Failed to get connection from PG connection pool with error: {:?}", - e - )) - }) -} - /// Checks that the local migration scripts is a prefix of the records in the database. /// This allows us run migration scripts against a DB at anytime, without worrying about /// existing readers fail over. @@ -257,7 +205,6 @@ pub mod setup_postgres { } } -#[cfg(feature = "pg_integration")] #[cfg(test)] mod tests { use crate::database::{Connection, ConnectionPool}; diff --git a/crates/sui-indexer/src/handlers/checkpoint_handler.rs b/crates/sui-indexer/src/handlers/checkpoint_handler.rs index 62985b92cb927..f4eee7d23965b 100644 --- a/crates/sui-indexer/src/handlers/checkpoint_handler.rs +++ b/crates/sui-indexer/src/handlers/checkpoint_handler.rs @@ -38,6 +38,7 @@ use crate::handlers::committer::start_tx_checkpoint_commit_task; use crate::handlers::tx_processor::IndexingPackageBuffer; use crate::metrics::IndexerMetrics; use crate::models::display::StoredDisplay; +use crate::models::obj_indices::StoredObjectVersion; use crate::store::package_resolver::{IndexerStorePackageResolver, InterimPackageResolver}; use crate::store::{IndexerStore, PgIndexerStore}; use crate::types::{ @@ -254,6 +255,27 @@ impl CheckpointHandler { })) } + fn derive_object_versions( + object_history_changes: &TransactionObjectChangesToCommit, + ) -> Vec { + let mut object_versions = vec![]; + for changed_obj in object_history_changes.changed_objects.iter() { + object_versions.push(StoredObjectVersion { + object_id: changed_obj.object.id().to_vec(), + object_version: changed_obj.object.version().value() as i64, + cp_sequence_number: changed_obj.checkpoint_sequence_number as i64, + }); + } + for deleted_obj in object_history_changes.deleted_objects.iter() { + object_versions.push(StoredObjectVersion { + object_id: deleted_obj.object_id.to_vec(), + object_version: deleted_obj.object_version as i64, + cp_sequence_number: deleted_obj.checkpoint_sequence_number as i64, + }); + } + object_versions + } + async fn index_checkpoint( state: &PgIndexerStore, data: CheckpointData, @@ -272,6 +294,7 @@ impl CheckpointHandler { Self::index_objects(data.clone(), &metrics, package_resolver.clone()).await?; let object_history_changes: TransactionObjectChangesToCommit = Self::index_objects_history(data.clone(), package_resolver.clone()).await?; + let object_versions = Self::derive_object_versions(&object_history_changes); let (checkpoint, db_transactions, db_events, db_tx_indices, db_event_indices, db_displays) = { let CheckpointData { @@ -327,6 +350,7 @@ impl CheckpointHandler { display_updates: db_displays, object_changes, object_history_changes, + object_versions, packages, epoch, }) @@ -709,7 +733,7 @@ async fn get_move_struct_layout_map( move_core_types::annotated_value::MoveStructLayout, ), IndexerError, - >((struct_tag, move_struct_layout)) + >((struct_tag, *move_struct_layout)) } }) .collect::>(); @@ -745,18 +769,18 @@ fn try_create_dynamic_field_info( )) })?; let move_struct = move_object.to_move_struct(&move_struct_layout)?; - let (name_value, type_, object_id) = + let (move_value, type_, object_id) = DynamicFieldInfo::parse_move_object(&move_struct).tap_err(|e| warn!("{e}"))?; let name_type = move_object.type_().try_extract_field_name(&type_)?; - let bcs_name = bcs::to_bytes(&name_value.clone().undecorate()).map_err(|e| { + let bcs_name = bcs::to_bytes(&move_value.clone().undecorate()).map_err(|e| { IndexerError::SerdeError(format!( "Failed to serialize dynamic field name {:?}: {e}", - name_value + move_value )) })?; let name = DynamicFieldName { type_: name_type, - value: SuiMoveValue::from(name_value).to_json_value(), + value: SuiMoveValue::from(move_value).to_json_value(), }; Ok(Some(match type_ { DynamicFieldType::DynamicObject => { diff --git a/crates/sui-indexer/src/handlers/committer.rs b/crates/sui-indexer/src/handlers/committer.rs index 71bc8c274bb78..a6a9c2c1df42e 100644 --- a/crates/sui-indexer/src/handlers/committer.rs +++ b/crates/sui-indexer/src/handlers/committer.rs @@ -103,6 +103,7 @@ async fn commit_checkpoints( let mut display_updates_batch = BTreeMap::new(); let mut object_changes_batch = vec![]; let mut object_history_changes_batch = vec![]; + let mut object_versions_batch = vec![]; let mut packages_batch = vec![]; for indexed_checkpoint in indexed_checkpoint_batch { @@ -115,6 +116,7 @@ async fn commit_checkpoints( display_updates, object_changes, object_history_changes, + object_versions, packages, epoch: _, } = indexed_checkpoint; @@ -126,6 +128,7 @@ async fn commit_checkpoints( display_updates_batch.extend(display_updates.into_iter()); object_changes_batch.push(object_changes); object_history_changes_batch.push(object_history_changes); + object_versions_batch.push(object_versions); packages_batch.push(packages); } @@ -140,6 +143,10 @@ async fn commit_checkpoints( .into_iter() .flatten() .collect::>(); + let object_versions_batch = object_versions_batch + .into_iter() + .flatten() + .collect::>(); let packages_batch = packages_batch.into_iter().flatten().collect::>(); let checkpoint_num = checkpoint_batch.len(); let tx_count = tx_batch.len(); @@ -160,6 +167,7 @@ async fn commit_checkpoints( state.persist_objects(object_changes_batch.clone()), state.persist_object_history(object_history_changes_batch.clone()), state.persist_full_objects_history(object_history_changes_batch.clone()), + state.persist_object_versions(object_versions_batch.clone()), ]; if let Some(epoch_data) = epoch.clone() { persist_tasks.push(state.persist_epoch(epoch_data)); @@ -209,7 +217,9 @@ async fn commit_checkpoints( .await .expect("Failed to get chain identifier") .expect("Chain identifier should have been indexed at this point"); - let _ = state.persist_protocol_configs_and_feature_flags(chain_id); + let _ = state + .persist_protocol_configs_and_feature_flags(chain_id) + .await; } let elapsed = guard.stop_and_record(); diff --git a/crates/sui-indexer/src/handlers/mod.rs b/crates/sui-indexer/src/handlers/mod.rs index 2a6578fc18295..f96b59b12ecb3 100644 --- a/crates/sui-indexer/src/handlers/mod.rs +++ b/crates/sui-indexer/src/handlers/mod.rs @@ -4,7 +4,7 @@ use std::collections::BTreeMap; use crate::{ - models::display::StoredDisplay, + models::{display::StoredDisplay, obj_indices::StoredObjectVersion}, types::{ EventIndex, IndexedCheckpoint, IndexedDeletedObject, IndexedEpochInfo, IndexedEvent, IndexedObject, IndexedPackage, IndexedTransaction, TxIndex, @@ -27,6 +27,7 @@ pub struct CheckpointDataToCommit { pub display_updates: BTreeMap, pub object_changes: TransactionObjectChangesToCommit, pub object_history_changes: TransactionObjectChangesToCommit, + pub object_versions: Vec, pub packages: Vec, pub epoch: Option, } diff --git a/crates/sui-indexer/src/handlers/pruner.rs b/crates/sui-indexer/src/handlers/pruner.rs index ce62a03ef2ac2..21dca59607f9d 100644 --- a/crates/sui-indexer/src/handlers/pruner.rs +++ b/crates/sui-indexer/src/handlers/pruner.rs @@ -25,8 +25,7 @@ impl Pruner { epochs_to_keep: u64, metrics: IndexerMetrics, ) -> Result { - let blocking_cp = store.blocking_cp(); - let partition_manager = PgPartitionManager::new(blocking_cp.clone())?; + let partition_manager = PgPartitionManager::new(store.pool())?; Ok(Self { store, partition_manager, @@ -55,7 +54,8 @@ impl Pruner { // Not all partitioned tables are epoch-partitioned, so we need to filter them out. let table_partitions: HashMap<_, _> = self .partition_manager - .get_table_partitions()? + .get_table_partitions() + .await? .into_iter() .filter(|(table_name, _)| { self.partition_manager @@ -75,7 +75,8 @@ impl Pruner { // would have been pruned already if the pruner was running. for epoch in *min_partition..min_epoch { self.partition_manager - .drop_table_partition(table_name.clone(), epoch)?; + .drop_table_partition(table_name.clone(), epoch) + .await?; info!( "Batch dropped table partition {} epoch {}", table_name, epoch @@ -91,7 +92,8 @@ impl Pruner { info!("Pruning epoch {}", epoch); for table_name in table_partitions.keys() { self.partition_manager - .drop_table_partition(table_name.clone(), epoch)?; + .drop_table_partition(table_name.clone(), epoch) + .await?; info!("Dropped table partition {} epoch {}", table_name, epoch); } self.store.prune_epoch(epoch).await.unwrap_or_else(|e| { diff --git a/crates/sui-indexer/src/indexer.rs b/crates/sui-indexer/src/indexer.rs index 45747b16aaaee..ab269d31c5eb8 100644 --- a/crates/sui-indexer/src/indexer.rs +++ b/crates/sui-indexer/src/indexer.rs @@ -11,6 +11,7 @@ use tokio_util::sync::CancellationToken; use tracing::info; use async_trait::async_trait; +use futures::future::try_join_all; use mysten_metrics::spawn_monitored_task; use sui_data_ingestion_core::{ DataIngestionMetrics, IndexerExecutor, ProgressStore, ReaderOptions, WorkerPool, @@ -101,23 +102,20 @@ impl Indexer { // Otherwise, we would do the persisting in `commit_checkpoint` while the first cp is // being indexed. if let Some(chain_id) = IndexerStore::get_chain_identifier(&store).await? { - store.persist_protocol_configs_and_feature_flags(chain_id)?; + store + .persist_protocol_configs_and_feature_flags(chain_id) + .await?; } - let cancel_clone = cancel.clone(); - let (exit_sender, exit_receiver) = oneshot::channel(); - // Spawn a task that links the cancellation token to the exit sender - spawn_monitored_task!(async move { - cancel_clone.cancelled().await; - let _ = exit_sender.send(()); - }); - + let mut exit_senders = vec![]; + let mut executors = vec![]; + let progress_store = ShimIndexerProgressStore::new(vec![ + ("primary".to_string(), primary_watermark), + ("object_snapshot".to_string(), object_snapshot_watermark), + ]); let mut executor = IndexerExecutor::new( - ShimIndexerProgressStore::new(vec![ - ("primary".to_string(), primary_watermark), - ("object_snapshot".to_string(), object_snapshot_watermark), - ]), - 1, + progress_store.clone(), + 2, DataIngestionMetrics::new(&Registry::new()), ); let worker = new_handlers(store, metrics, primary_watermark, cancel.clone()).await?; @@ -126,18 +124,42 @@ impl Indexer { "primary".to_string(), config.checkpoint_download_queue_size, ); - executor.register(worker_pool).await?; + let (exit_sender, exit_receiver) = oneshot::channel(); + executors.push((executor, exit_receiver)); + exit_senders.push(exit_sender); + + // in a non-colocated setup, start a separate indexer for processing object snapshots + if config.sources.data_ingestion_path.is_none() { + let executor = IndexerExecutor::new( + progress_store, + 1, + DataIngestionMetrics::new(&Registry::new()), + ); + let (exit_sender, exit_receiver) = oneshot::channel(); + exit_senders.push(exit_sender); + executors.push((executor, exit_receiver)); + } let worker_pool = WorkerPool::new( object_snapshot_worker, "object_snapshot".to_string(), config.checkpoint_download_queue_size, ); - executor.register(worker_pool).await?; + let executor = executors.last_mut().expect("executors is not empty"); + executor.0.register(worker_pool).await?; + + // Spawn a task that links the cancellation token to the exit sender + spawn_monitored_task!(async move { + cancel.cancelled().await; + for exit_sender in exit_senders { + let _ = exit_sender.send(()); + } + }); + info!("Starting data ingestion executor..."); - executor - .run( + let futures = executors.into_iter().map(|(executor, exit_receiver)| { + executor.run( config .sources .data_ingestion_path @@ -149,10 +171,11 @@ impl Indexer { .as_ref() .map(|url| url.as_str().to_owned()), vec![], - extra_reader_options, + extra_reader_options.clone(), exit_receiver, ) - .await?; + }); + try_join_all(futures).await?; Ok(()) } @@ -177,6 +200,7 @@ impl Indexer { } } +#[derive(Clone)] struct ShimIndexerProgressStore { watermarks: HashMap, } diff --git a/crates/sui-indexer/src/indexer_reader.rs b/crates/sui-indexer/src/indexer_reader.rs index 5ef988e48ca60..883f9430c84cd 100644 --- a/crates/sui-indexer/src/indexer_reader.rs +++ b/crates/sui-indexer/src/indexer_reader.rs @@ -1,23 +1,22 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 - -use std::{collections::HashMap, sync::Arc}; +use std::sync::Arc; use anyhow::Result; use diesel::{ dsl::sql, sql_types::Bool, ExpressionMethods, OptionalExtension, QueryDsl, TextExpressionMethods, }; -use itertools::{any, Itertools}; -use tap::Pipe; -use tap::TapFallible; +use itertools::Itertools; +use tap::{Pipe, TapFallible}; +use tracing::{debug, error, warn}; use fastcrypto::encoding::Encoding; use fastcrypto::encoding::Hex; -use move_core_types::annotated_value::MoveStructLayout; -use move_core_types::language_storage::StructTag; +use move_core_types::annotated_value::{MoveStructLayout, MoveTypeLayout}; +use move_core_types::language_storage::{StructTag, TypeTag}; use sui_json_rpc_types::DisplayFieldsResponse; -use sui_json_rpc_types::{Balance, Coin as SuiCoin, SuiCoinMetadata}; +use sui_json_rpc_types::{Balance, Coin as SuiCoin, SuiCoinMetadata, SuiMoveValue}; use sui_json_rpc_types::{ CheckpointId, EpochInfo, EventFilter, SuiEvent, SuiObjectDataFilter, SuiTransactionBlockResponse, TransactionFilter, @@ -28,10 +27,10 @@ use sui_package_resolver::{PackageStoreWithLruCache, Resolver}; use sui_types::effects::TransactionEvents; use sui_types::{balance::Supply, coin::TreasuryCap, dynamic_field::DynamicFieldName}; use sui_types::{ - base_types::{ObjectID, ObjectRef, SequenceNumber, SuiAddress, VersionNumber}, + base_types::{ObjectID, SuiAddress, VersionNumber}, committee::EpochId, - digests::{ObjectDigest, TransactionDigest}, - dynamic_field::DynamicFieldInfo, + digests::TransactionDigest, + dynamic_field::{DynamicFieldInfo, DynamicFieldType}, object::{Object, ObjectRead}, sui_system_state::{sui_system_state_summary::SuiSystemStateSummary, SuiSystemStateTrait}, }; @@ -47,7 +46,7 @@ use crate::{ display::StoredDisplay, epoch::StoredEpochInfo, events::StoredEvent, - objects::{CoinBalance, ObjectRefColumn, StoredObject}, + objects::{CoinBalance, StoredObject}, transactions::{tx_events_to_sui_tx_events, StoredTransaction}, tx_indices::TxSequenceNumber, }, @@ -475,10 +474,10 @@ impl IndexerReader { .await .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Failed to join all tx block futures: {}", e))? + .tap_err(|e| error!("Failed to join all tx block futures: {}", e))? .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Failed to collect tx block futures: {}", e))?; + .tap_err(|e| error!("Failed to collect tx block futures: {}", e))?; Ok(tx_blocks) } @@ -837,7 +836,7 @@ impl IndexerReader { limit, ); - tracing::debug!("query transaction blocks: {}", query); + debug!("query transaction blocks: {}", query); let tx_sequence_numbers = diesel::sql_query(query.clone()) .load::(&mut connection) .await? @@ -1074,7 +1073,7 @@ impl IndexerReader { main_where_clause, cursor_clause, order_clause, limit, ) }; - tracing::debug!("query events: {}", query); + debug!("query events: {}", query); let stored_events = diesel::sql_query(query) .load::(&mut connection) .await?; @@ -1090,10 +1089,10 @@ impl IndexerReader { .await .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Failed to join sui event futures: {}", e))? + .tap_err(|e| error!("Failed to join sui event futures: {}", e))? .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Failed to collect sui event futures: {}", e))?; + .tap_err(|e| error!("Failed to collect sui event futures: {}", e))?; Ok(sui_events) } @@ -1103,57 +1102,31 @@ impl IndexerReader { cursor: Option, limit: usize, ) -> Result, IndexerError> { - let objects = self + let stored_objects = self .get_dynamic_fields_raw(parent_object_id, cursor, limit) .await?; - - if any(objects.iter(), |o| o.df_object_id.is_none()) { - return Err(IndexerError::PersistentStorageDataCorruptionError(format!( - "Dynamic field has empty df_object_id column for parent object {}", - parent_object_id - ))); - } - - // for Dynamic field objects, df_object_id != object_id, we need another look up - // to get the version and digests. - // TODO: simply store df_object_version and df_object_digest as well? - let dfo_ids = objects - .iter() - .filter_map(|o| { - // Unwrap safe: checked nullity above - if o.df_object_id.as_ref().unwrap() == &o.object_id { - None - } else { - Some(o.df_object_id.clone().unwrap()) - } - }) - .collect::>(); - - let object_refs = self.get_object_refs(dfo_ids).await?; let mut df_futures = vec![]; - for object in objects { - let package_resolver_clone = self.package_resolver.clone(); - df_futures.push(tokio::task::spawn( - object.try_into_expectant_dynamic_field_info(package_resolver_clone), - )); + let indexer_reader_arc = Arc::new(self.clone()); + for stored_object in stored_objects { + let indexer_reader_arc_clone = Arc::clone(&indexer_reader_arc); + df_futures.push(tokio::task::spawn(async move { + indexer_reader_arc_clone + .try_create_dynamic_field_info(stored_object) + .await + })); } - let mut dynamic_fields = futures::future::join_all(df_futures) + let df_infos = futures::future::join_all(df_futures) .await .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Error joining DF futures: {:?}", e))? + .tap_err(|e| error!("Error joining DF futures: {:?}", e))? .into_iter() .collect::, _>>() - .tap_err(|e| tracing::error!("Error calling DF try_into function: {:?}", e))?; - - for df in dynamic_fields.iter_mut() { - if let Some(obj_ref) = object_refs.get(&df.object_id) { - df.version = obj_ref.1; - df.digest = obj_ref.2; - } - } - - Ok(dynamic_fields) + .tap_err(|e| error!("Error calling try_create_dynamic_field_info: {:?}", e))? + .into_iter() + .flatten() + .collect::>(); + Ok(df_infos) } pub async fn get_dynamic_fields_raw( @@ -1183,6 +1156,90 @@ impl IndexerReader { .map_err(Into::into) } + async fn try_create_dynamic_field_info( + &self, + stored_object: StoredObject, + ) -> Result, IndexerError> { + if stored_object.df_kind.is_none() { + return Ok(None); + } + + let object: Object = stored_object.try_into()?; + let move_object = match object.data.try_as_move().cloned() { + Some(move_object) => move_object, + None => { + return Err(IndexerError::ResolveMoveStructError( + "Object is not a MoveObject".to_string(), + )); + } + }; + let struct_tag: StructTag = move_object.type_().clone().into(); + let move_type_layout = self + .package_resolver + .type_layout(TypeTag::Struct(Box::new(struct_tag.clone()))) + .await + .map_err(|e| { + IndexerError::ResolveMoveStructError(format!( + "Failed to get type layout for type {}: {}", + struct_tag, e + )) + })?; + let MoveTypeLayout::Struct(move_struct_layout) = move_type_layout else { + return Err(IndexerError::ResolveMoveStructError( + "MoveTypeLayout is not Struct".to_string(), + )); + }; + + let move_struct = move_object.to_move_struct(&move_struct_layout)?; + let (move_value, type_, object_id) = + DynamicFieldInfo::parse_move_object(&move_struct).tap_err(|e| warn!("{e}"))?; + let name_type = move_object.type_().try_extract_field_name(&type_)?; + let bcs_name = bcs::to_bytes(&move_value.clone().undecorate()).map_err(|e| { + IndexerError::SerdeError(format!( + "Failed to serialize dynamic field name {:?}: {e}", + move_value + )) + })?; + let name = DynamicFieldName { + type_: name_type, + value: SuiMoveValue::from(move_value).to_json_value(), + }; + + Ok(Some(match type_ { + DynamicFieldType::DynamicObject => { + let object = self.get_object(&object_id, None).await?.ok_or( + IndexerError::UncategorizedError(anyhow::anyhow!( + "Failed to find object_id {:?} when trying to create dynamic field info", + object_id + )), + )?; + + let version = object.version(); + let digest = object.digest(); + let object_type = object.data.type_().unwrap().clone(); + DynamicFieldInfo { + name, + bcs_name, + type_, + object_type: object_type.to_canonical_string(/* with_prefix */ true), + object_id, + version, + digest, + } + } + DynamicFieldType::DynamicField => DynamicFieldInfo { + name, + bcs_name, + type_, + object_type: move_object.into_type().into_type_params()[1] + .to_canonical_string(/* with_prefix */ true), + object_id: object.id(), + version: object.version(), + digest: object.digest(), + }, + })) + } + pub async fn bcs_name_from_dynamic_field_name( &self, name: &DynamicFieldName, @@ -1202,45 +1259,6 @@ impl IndexerReader { Ok(name_bcs_value) } - async fn get_object_refs( - &self, - object_ids: Vec>, - ) -> IndexerResult> { - use diesel_async::RunQueryDsl; - - let mut connection = self.pool.get().await?; - - objects::table - .filter(objects::object_id.eq_any(object_ids)) - .select(( - objects::object_id, - objects::object_version, - objects::object_digest, - )) - .load::(&mut connection) - .await? - .into_iter() - .map(|object_ref: ObjectRefColumn| { - let object_id = - ObjectID::from_bytes(object_ref.object_id.clone()).map_err(|_e| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "Can't convert {:?} to ObjectID", - object_ref.object_id - )) - })?; - let seq = SequenceNumber::from_u64(object_ref.object_version as u64); - let object_digest = ObjectDigest::try_from(object_ref.object_digest.as_slice()) - .map_err(|e| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "object {:?} has incompatible object digest. Error: {e}", - object_ref.object_digest - )) - })?; - Ok((object_id, (object_id, seq, object_digest))) - }) - .collect::>>() - } - async fn get_display_object_by_type( &self, object_type: &move_core_types::language_storage::StructTag, @@ -1331,8 +1349,7 @@ impl IndexerReader { coin_type_filter, ); - tracing::debug!("get coin balances query: {query}"); - + debug!("get coin balances query: {query}"); diesel::sql_query(query) .load::(&mut connection) .await? diff --git a/crates/sui-indexer/src/main.rs b/crates/sui-indexer/src/main.rs index 66144476f4d1d..523e91c28e8a1 100644 --- a/crates/sui-indexer/src/main.rs +++ b/crates/sui-indexer/src/main.rs @@ -4,9 +4,7 @@ use clap::Parser; use sui_indexer::config::Command; use sui_indexer::database::ConnectionPool; -use sui_indexer::db::{ - check_db_migration_consistency, new_connection_pool, reset_database, run_migrations, -}; +use sui_indexer::db::{check_db_migration_consistency, reset_database, run_migrations}; use sui_indexer::indexer::Indexer; use sui_indexer::store::PgIndexerStore; use tokio_util::sync::CancellationToken; @@ -30,14 +28,12 @@ async fn main() -> anyhow::Result<()> { mysten_metrics::init_metrics(®istry); let indexer_metrics = IndexerMetrics::new(®istry); - let connection_pool = - new_connection_pool(opts.database_url.as_str(), &opts.connection_pool_config)?; let pool = ConnectionPool::new( opts.database_url.clone(), opts.connection_pool_config.clone(), ) .await?; - spawn_connection_pool_metric_collector(indexer_metrics.clone(), connection_pool.clone()); + spawn_connection_pool_metric_collector(indexer_metrics.clone(), pool.clone()); match opts.command { Command::Indexer { @@ -49,12 +45,7 @@ async fn main() -> anyhow::Result<()> { // Make sure to run all migrations on startup, and also serve as a compatibility check. run_migrations(pool.dedicated_connection().await?).await?; - let store = PgIndexerStore::new( - connection_pool, - pool, - restore_config, - indexer_metrics.clone(), - ); + let store = PgIndexerStore::new(pool, restore_config, indexer_metrics.clone()); Indexer::start_writer_with_config( &ingestion_config, diff --git a/crates/sui-indexer/src/metrics.rs b/crates/sui-indexer/src/metrics.rs index 35c07fbfc2abd..739a686d3cbfc 100644 --- a/crates/sui-indexer/src/metrics.rs +++ b/crates/sui-indexer/src/metrics.rs @@ -774,7 +774,7 @@ impl IndexerMetrics { pub fn spawn_connection_pool_metric_collector( metrics: IndexerMetrics, - connection_pool: crate::db::ConnectionPool, + connection_pool: crate::database::ConnectionPool, ) { tokio::spawn(async move { loop { diff --git a/crates/sui-indexer/src/models/obj_indices.rs b/crates/sui-indexer/src/models/obj_indices.rs index 7e5298008834c..62e19b3d43c90 100644 --- a/crates/sui-indexer/src/models/obj_indices.rs +++ b/crates/sui-indexer/src/models/obj_indices.rs @@ -4,10 +4,6 @@ use diesel::prelude::*; use crate::schema::objects_version; - -use super::objects::StoredDeletedObject; -use super::objects::StoredObject; - /// Model types related to tables that support efficient execution of queries on the `objects`, /// `objects_history` and `objects_snapshot` tables. @@ -18,23 +14,3 @@ pub struct StoredObjectVersion { pub object_version: i64, pub cp_sequence_number: i64, } - -impl From<&StoredObject> for StoredObjectVersion { - fn from(o: &StoredObject) -> Self { - Self { - object_id: o.object_id.clone(), - object_version: o.object_version, - cp_sequence_number: o.checkpoint_sequence_number, - } - } -} - -impl From<&StoredDeletedObject> for StoredObjectVersion { - fn from(o: &StoredDeletedObject) -> Self { - Self { - object_id: o.object_id.clone(), - object_version: o.object_version, - cp_sequence_number: o.checkpoint_sequence_number, - } - } -} diff --git a/crates/sui-indexer/src/models/objects.rs b/crates/sui-indexer/src/models/objects.rs index 2e71546ceec7d..aebab39725145 100644 --- a/crates/sui-indexer/src/models/objects.rs +++ b/crates/sui-indexer/src/models/objects.rs @@ -13,9 +13,8 @@ use sui_json_rpc_types::{Balance, Coin as SuiCoin}; use sui_package_resolver::{PackageStore, Resolver}; use sui_types::base_types::{ObjectID, ObjectRef}; use sui_types::digests::ObjectDigest; -use sui_types::dynamic_field::{DynamicFieldInfo, DynamicFieldName, DynamicFieldType, Field}; -use sui_types::object::Object; -use sui_types::object::ObjectRead; +use sui_types::dynamic_field::{DynamicFieldType, Field}; +use sui_types::object::{Object, ObjectRead}; use crate::errors::IndexerError; use crate::schema::{full_objects_history, objects, objects_history, objects_snapshot}; @@ -209,7 +208,7 @@ impl From for StoredDeletedObject { #[derive(Queryable, Insertable, Debug, Identifiable, Clone, QueryableByName)] #[diesel(table_name = objects_history, primary_key(object_id, object_version, checkpoint_sequence_number))] -pub struct StoredDeletedHistoryObject { +pub(crate) struct StoredDeletedHistoryObject { pub object_id: Vec, pub object_version: i64, pub object_status: i16, @@ -315,129 +314,7 @@ impl StoredObject { )), }?; - Ok(ObjectRead::Exists(oref, object, Some(move_struct_layout))) - } - - pub async fn try_into_expectant_dynamic_field_info( - self, - package_resolver: Arc>, - ) -> Result { - match self - .try_into_dynamic_field_info(package_resolver) - .await - .transpose() - { - Some(Ok(info)) => Ok(info), - Some(Err(e)) => Err(e), - None => Err(IndexerError::PersistentStorageDataCorruptionError( - "Dynamic field object has incompatible dynamic field type: empty df_kind".into(), - )), - } - } - - pub async fn try_into_dynamic_field_info( - self, - package_resolver: Arc>, - ) -> Result, IndexerError> { - if self.df_kind.is_none() { - return Ok(None); - } - - // Past this point, if there is any unexpected field, it's a data corruption error - let object_id = ObjectID::from_bytes(&self.object_id).map_err(|_| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "Can't convert {:?} to object_id", - self.object_id - )) - })?; - let object_digest = ObjectDigest::try_from(self.object_digest.as_slice()).map_err(|e| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible object digest. Error: {e}", - object_id - )) - })?; - let df_object_id = if let Some(df_object_id) = self.df_object_id.clone() { - ObjectID::from_bytes(df_object_id).map_err(|e| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: df_object_id. Error: {e}", - object_id - )) - }) - } else { - return Err(IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: empty df_object_id", - object_id - ))); - }?; - let type_ = match self.df_kind { - Some(0) => DynamicFieldType::DynamicField, - Some(1) => DynamicFieldType::DynamicObject, - _ => { - return Err(IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: empty df_kind", - object_id - ))) - } - }; - let name = if let Some(field_name) = self.df_name.clone() { - let name: DynamicFieldName = bcs::from_bytes(&field_name).map_err(|e| { - IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: df_name. Error: {e}", - object_id - )) - })?; - name - } else { - return Err(IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: empty df_name", - object_id - ))); - }; - - let oref = self.get_object_ref()?; - let object: sui_types::object::Object = self.clone().try_into()?; - let Some(move_object) = object.data.try_as_move().cloned() else { - return Err(IndexerError::PostgresReadError(format!( - "Object {:?} is not a Move object", - oref, - ))); - }; - if !move_object.type_().is_dynamic_field() { - return Err(IndexerError::PostgresReadError(format!( - "Object {:?} is not a dynamic field", - oref, - ))); - } - - let layout = package_resolver - .type_layout(name.type_.clone()) - .await - .map_err(|e| { - IndexerError::ResolveMoveStructError(format!( - "Failed to create dynamic field info for obj {}:{}, type: {}. Error: {e}", - object.id(), - object.version(), - move_object.type_(), - )) - })?; - let sui_json_value = sui_json::SuiJsonValue::new(name.value.clone())?; - let bcs_name = sui_json_value.to_bcs_bytes(&layout)?; - let object_type = self.df_object_type.clone().ok_or( - IndexerError::PersistentStorageDataCorruptionError(format!( - "object {} has incompatible dynamic field type: empty df_object_type", - object_id - )), - )?; - - Ok(Some(DynamicFieldInfo { - version: oref.1, - digest: object_digest, - type_, - name, - bcs_name, - object_type, - object_id: df_object_id, - })) + Ok(ObjectRead::Exists(oref, object, Some(*move_struct_layout))) } pub fn get_object_ref(&self) -> Result { diff --git a/crates/sui-indexer/src/models/tx_indices.rs b/crates/sui-indexer/src/models/tx_indices.rs index 4f942c2e7af0b..de3ccc817826f 100644 --- a/crates/sui-indexer/src/models/tx_indices.rs +++ b/crates/sui-indexer/src/models/tx_indices.rs @@ -3,12 +3,13 @@ use crate::{ schema::{ - tx_calls_fun, tx_calls_mod, tx_calls_pkg, tx_changed_objects, tx_digests, tx_input_objects, - tx_kinds, tx_recipients, tx_senders, + tx_affected_addresses, tx_calls_fun, tx_calls_mod, tx_calls_pkg, tx_changed_objects, + tx_digests, tx_input_objects, tx_kinds, tx_recipients, tx_senders, }, types::TxIndex, }; use diesel::prelude::*; +use itertools::Itertools; #[derive(QueryableByName)] pub struct TxSequenceNumber { @@ -22,6 +23,14 @@ pub struct TxDigest { pub transaction_digest: Vec, } +#[derive(Queryable, Insertable, Selectable, Debug, Clone, Default)] +#[diesel(table_name = tx_affected_addresses)] +pub struct StoredTxAffected { + pub tx_sequence_number: i64, + pub affected: Vec, + pub sender: Vec, +} + #[derive(Queryable, Insertable, Selectable, Debug, Clone, Default)] #[diesel(table_name = tx_senders)] pub struct StoredTxSenders { @@ -99,6 +108,7 @@ impl TxIndex { pub fn split( self: TxIndex, ) -> ( + Vec, Vec, Vec, Vec, @@ -110,10 +120,24 @@ impl TxIndex { Vec, ) { let tx_sequence_number = self.tx_sequence_number as i64; + let tx_affected_addresses = self + .recipients + .iter() + .chain(self.payers.iter()) + .chain(std::iter::once(&self.sender)) + .unique() + .map(|a| StoredTxAffected { + tx_sequence_number, + affected: a.to_vec(), + sender: self.sender.to_vec(), + }) + .collect(); + let tx_sender = StoredTxSenders { tx_sequence_number, sender: self.sender.to_vec(), }; + let tx_recipients = self .recipients .iter() @@ -123,6 +147,7 @@ impl TxIndex { sender: self.sender.to_vec(), }) .collect(); + let tx_input_objects = self .input_objects .iter() @@ -132,6 +157,7 @@ impl TxIndex { sender: self.sender.to_vec(), }) .collect(); + let tx_changed_objects = self .changed_objects .iter() @@ -197,6 +223,7 @@ impl TxIndex { }; ( + tx_affected_addresses, vec![tx_sender], tx_recipients, tx_input_objects, diff --git a/crates/sui-indexer/src/schema/pg.rs b/crates/sui-indexer/src/schema.rs similarity index 85% rename from crates/sui-indexer/src/schema/pg.rs rename to crates/sui-indexer/src/schema.rs index 540fb9c14f876..ebbbd4bbd7b1c 100644 --- a/crates/sui-indexer/src/schema/pg.rs +++ b/crates/sui-indexer/src/schema.rs @@ -27,7 +27,7 @@ diesel::table! { validator_signature -> Bytea, end_of_epoch_data -> Nullable, min_tx_sequence_number -> Nullable, - max_tx_sequence_number -> Nullable + max_tx_sequence_number -> Nullable, } } @@ -146,20 +146,6 @@ diesel::table! { } } -diesel::table! { - events_partition_0 (tx_sequence_number, event_sequence_number) { - tx_sequence_number -> Int8, - event_sequence_number -> Int8, - transaction_digest -> Bytea, - senders -> Array>, - package -> Bytea, - module -> Text, - event_type -> Text, - timestamp_ms -> Int8, - bcs -> Bytea, - } -} - diesel::table! { feature_flags (protocol_version, flag_name) { protocol_version -> Int8, @@ -221,29 +207,6 @@ diesel::table! { } } -diesel::table! { - objects_history_partition_0 (checkpoint_sequence_number, object_id, object_version) { - object_id -> Bytea, - object_version -> Int8, - object_status -> Int2, - object_digest -> Nullable, - checkpoint_sequence_number -> Int8, - owner_type -> Nullable, - owner_id -> Nullable, - object_type -> Nullable, - object_type_package -> Nullable, - object_type_module -> Nullable, - object_type_name -> Nullable, - serialized_object -> Nullable, - coin_type -> Nullable, - coin_balance -> Nullable, - df_kind -> Nullable, - df_name -> Nullable, - df_object_type -> Nullable, - df_object_id -> Nullable, - } -} - diesel::table! { objects_snapshot (object_id) { object_id -> Bytea, @@ -275,14 +238,6 @@ diesel::table! { } } -diesel::table! { - protocol_configs (protocol_version, config_name) { - protocol_version -> Int8, - config_name -> Text, - config_value -> Nullable, - } -} - diesel::table! { packages (package_id, original_id, package_version) { package_id -> Bytea, @@ -293,6 +248,14 @@ diesel::table! { } } +diesel::table! { + protocol_configs (protocol_version, config_name) { + protocol_version -> Int8, + config_name -> Text, + config_value -> Nullable, + } +} + diesel::table! { pruner_cp_watermark (checkpoint_sequence_number) { checkpoint_sequence_number -> Int8, @@ -318,18 +281,10 @@ diesel::table! { } diesel::table! { - transactions_partition_0 (tx_sequence_number) { + tx_affected_addresses (affected, tx_sequence_number) { tx_sequence_number -> Int8, - transaction_digest -> Bytea, - raw_transaction -> Bytea, - raw_effects -> Bytea, - checkpoint_sequence_number -> Int8, - timestamp_ms -> Int8, - object_changes -> Array>, - balance_changes -> Array>, - events -> Array>, - transaction_kind -> Int2, - success_command_count -> Int2, + affected -> Bytea, + sender -> Bytea, } } @@ -418,18 +373,17 @@ diesel::allow_tables_to_appear_in_same_query!( event_struct_name, event_struct_package, events, - events_partition_0, feature_flags, + full_objects_history, objects, objects_history, - objects_history_partition_0, objects_snapshot, objects_version, packages, protocol_configs, pruner_cp_watermark, transactions, - transactions_partition_0, + tx_affected_addresses, tx_calls_fun, tx_calls_mod, tx_calls_pkg, diff --git a/crates/sui-indexer/src/schema/mod.rs b/crates/sui-indexer/src/schema/mod.rs deleted file mode 100644 index b65995cbcc233..0000000000000 --- a/crates/sui-indexer/src/schema/mod.rs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -#![allow(clippy::all)] - -mod pg; - -pub use pg::chain_identifier; -pub use pg::checkpoints; -pub use pg::display; -pub use pg::epochs; -pub use pg::event_emit_module; -pub use pg::event_emit_package; -pub use pg::event_senders; -pub use pg::event_struct_instantiation; -pub use pg::event_struct_module; -pub use pg::event_struct_name; -pub use pg::event_struct_package; -pub use pg::events; -pub use pg::feature_flags; -pub use pg::full_objects_history; -pub use pg::objects; -pub use pg::objects_history; -pub use pg::objects_snapshot; -pub use pg::objects_version; -pub use pg::packages; -pub use pg::protocol_configs; -pub use pg::pruner_cp_watermark; -pub use pg::transactions; -pub use pg::tx_calls_fun; -pub use pg::tx_calls_mod; -pub use pg::tx_calls_pkg; -pub use pg::tx_changed_objects; -pub use pg::tx_digests; -pub use pg::tx_input_objects; -pub use pg::tx_kinds; -pub use pg::tx_recipients; -pub use pg::tx_senders; - -pub use pg::events_partition_0; -pub use pg::objects_history_partition_0; -pub use pg::transactions_partition_0; diff --git a/crates/sui-indexer/src/store/indexer_store.rs b/crates/sui-indexer/src/store/indexer_store.rs index a93f955ce4800..a73a243ef8072 100644 --- a/crates/sui-indexer/src/store/indexer_store.rs +++ b/crates/sui-indexer/src/store/indexer_store.rs @@ -8,6 +8,7 @@ use async_trait::async_trait; use crate::errors::IndexerError; use crate::handlers::{EpochToCommit, TransactionObjectChangesToCommit}; use crate::models::display::StoredDisplay; +use crate::models::obj_indices::StoredObjectVersion; use crate::models::objects::{StoredDeletedObject, StoredObject}; use crate::types::{ EventIndex, IndexedCheckpoint, IndexedEvent, IndexedPackage, IndexedTransaction, TxIndex, @@ -33,7 +34,7 @@ pub trait IndexerStore: Clone + Sync + Send + 'static { async fn get_chain_identifier(&self) -> Result>, IndexerError>; - fn persist_protocol_configs_and_feature_flags( + async fn persist_protocol_configs_and_feature_flags( &self, chain_id: Vec, ) -> Result<(), IndexerError>; @@ -53,6 +54,11 @@ pub trait IndexerStore: Clone + Sync + Send + 'static { object_changes: Vec, ) -> Result<(), IndexerError>; + async fn persist_object_versions( + &self, + object_versions: Vec, + ) -> Result<(), IndexerError>; + async fn persist_objects_snapshot( &self, object_changes: Vec, diff --git a/crates/sui-indexer/src/store/mod.rs b/crates/sui-indexer/src/store/mod.rs index 60f4b86746d2d..7c1aa9abcc107 100644 --- a/crates/sui-indexer/src/store/mod.rs +++ b/crates/sui-indexer/src/store/mod.rs @@ -1,234 +1,54 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +use std::time::Duration; + +use diesel_async::{scoped_futures::ScopedBoxFuture, AsyncPgConnection}; pub(crate) use indexer_store::*; pub use pg_indexer_store::PgIndexerStore; +use crate::{database::ConnectionPool, errors::IndexerError}; + pub mod indexer_store; pub mod package_resolver; mod pg_indexer_store; pub mod pg_partition_manager; -pub mod diesel_macro { - thread_local! { - pub static CALLED_FROM_BLOCKING_POOL: std::cell::RefCell = const { std::cell::RefCell::new(false) }; - } - - #[macro_export] - macro_rules! read_only_repeatable_blocking { - ($pool:expr, $query:expr) => {{ - use $crate::db::get_pool_connection; - use $crate::db::PoolConnection; - let mut pool_conn = get_pool_connection($pool)?; - pool_conn - .build_transaction() - .read_only() - .repeatable_read() - .run($query) - .map_err(|e| IndexerError::PostgresReadError(e.to_string())) - }}; - } - - #[macro_export] - macro_rules! read_only_blocking { - ($pool:expr, $query:expr) => {{ - use $crate::db::get_pool_connection; - - let mut pool_conn = get_pool_connection($pool)?; - pool_conn - .build_transaction() - .read_only() - .run($query) - .map_err(|e| IndexerError::PostgresReadError(e.to_string())) - }}; - } - - #[macro_export] - macro_rules! transactional_blocking_with_retry { - ($pool:expr, $query:expr, $max_elapsed:expr) => {{ - use $crate::db::get_pool_connection; - - use $crate::errors::IndexerError; - let mut backoff = backoff::ExponentialBackoff::default(); - backoff.max_elapsed_time = Some($max_elapsed); - let result = match backoff::retry(backoff, || { - let mut pool_conn = - get_pool_connection($pool).map_err(|e| backoff::Error::Transient { - err: IndexerError::PostgresWriteError(e.to_string()), - retry_after: None, - })?; - pool_conn - .build_transaction() - .read_write() - .run($query) - .map_err(|e| { - tracing::error!("Error with persisting data into DB: {:?}, retrying...", e); - backoff::Error::Transient { - err: IndexerError::PostgresWriteError(e.to_string()), - retry_after: None, - } - }) - }) { - Ok(v) => Ok(v), - Err(backoff::Error::Transient { err, .. }) => Err(err), - Err(backoff::Error::Permanent(err)) => Err(err), - }; - result - }}; - } - - #[macro_export] - macro_rules! spawn_read_only_blocking { - ($pool:expr, $query:expr, $repeatable_read:expr) => {{ - use $crate::db::get_pool_connection; - - use $crate::errors::IndexerError; - use $crate::store::diesel_macro::CALLED_FROM_BLOCKING_POOL; - let current_span = tracing::Span::current(); - tokio::task::spawn_blocking(move || { - CALLED_FROM_BLOCKING_POOL - .with(|in_blocking_pool| *in_blocking_pool.borrow_mut() = true); - let _guard = current_span.enter(); - let mut pool_conn = get_pool_connection($pool).unwrap(); - if $repeatable_read { - pool_conn - .build_transaction() - .read_only() - .repeatable_read() - .run($query) - .map_err(|e| IndexerError::PostgresReadError(e.to_string())) - } else { - pool_conn - .build_transaction() - .read_only() - .run($query) - .map_err(|e| IndexerError::PostgresReadError(e.to_string())) - } - }) +pub async fn transaction_with_retry<'a, Q, T>( + pool: &ConnectionPool, + timeout: Duration, + query: Q, +) -> Result +where + Q: for<'r> FnOnce( + &'r mut AsyncPgConnection, + ) -> ScopedBoxFuture<'a, 'r, Result> + + Send, + Q: Clone, + T: 'a, +{ + let backoff = backoff::ExponentialBackoff { + max_elapsed_time: Some(timeout), + ..Default::default() + }; + backoff::future::retry(backoff, || async { + let mut connection = pool.get().await.map_err(|e| backoff::Error::Transient { + err: IndexerError::PostgresWriteError(e.to_string()), + retry_after: None, + })?; + + connection + .build_transaction() + .read_write() + .run(query.clone()) .await - .expect("Blocking call failed") - }}; - } - - #[macro_export] - macro_rules! insert_or_ignore_into { - ($table:expr, $values:expr, $conn:expr) => {{ - use diesel::RunQueryDsl; - let error_message = concat!("Failed to write to ", stringify!($table), " DB"); - diesel::insert_into($table) - .values($values) - .on_conflict_do_nothing() - .execute($conn) - .map_err(IndexerError::from) - .context(error_message)?; - }}; - } - - #[macro_export] - macro_rules! on_conflict_do_update { - ($table:expr, $values:expr, $target:expr, $pg_columns:expr, $mysql_columns:expr, $conn:expr) => {{ - use diesel::ExpressionMethods; - use diesel::RunQueryDsl; - diesel::insert_into($table) - .values($values) - .on_conflict($target) - .do_update() - .set($pg_columns) - .execute($conn)?; - }}; - } - - #[macro_export] - macro_rules! run_query { - ($pool:expr, $query:expr) => {{ - blocking_call_is_ok_or_panic!(); - read_only_blocking!($pool, $query) - }}; - } - - #[macro_export] - macro_rules! run_query_repeatable { - ($pool:expr, $query:expr) => {{ - blocking_call_is_ok_or_panic!(); - read_only_repeatable_blocking!($pool, $query) - }}; - } - - #[macro_export] - macro_rules! run_query_async { - ($pool:expr, $query:expr) => {{ - spawn_read_only_blocking!($pool, $query, false) - }}; - } - - #[macro_export] - macro_rules! run_query_repeatable_async { - ($pool:expr, $query:expr) => {{ - spawn_read_only_blocking!($pool, $query, true) - }}; - } - - /// Check that we are in a context conducive to making blocking calls. - /// This is done by either: - /// - Checking that we are not inside a tokio runtime context - /// - /// Or: - /// - If we are inside a tokio runtime context, ensure that the call went through - /// `IndexerReader::spawn_blocking` which properly moves the blocking call to a blocking thread - /// pool. - #[macro_export] - macro_rules! blocking_call_is_ok_or_panic { - () => {{ - use $crate::store::diesel_macro::CALLED_FROM_BLOCKING_POOL; - if tokio::runtime::Handle::try_current().is_ok() - && !CALLED_FROM_BLOCKING_POOL.with(|in_blocking_pool| *in_blocking_pool.borrow()) - { - panic!( - "You are calling a blocking DB operation directly on an async thread. \ - Please use IndexerReader::spawn_blocking instead to move the \ - operation to a blocking thread" - ); - } - }}; - } - - #[macro_export] - macro_rules! persist_chunk_into_table { - ($table:expr, $chunk:expr, $pool:expr) => {{ - let now = std::time::Instant::now(); - let chunk_len = $chunk.len(); - transactional_blocking_with_retry!( - $pool, - |conn| { - for chunk in $chunk.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!($table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!( - elapsed, - "Persisted {} rows to {}", - chunk_len, - stringify!($table), - ); - }) - .tap_err(|e| { - tracing::error!("Failed to persist {} with error: {}", stringify!($table), e); + .map_err(|e| { + tracing::error!("Error with persisting data into DB: {:?}, retrying...", e); + backoff::Error::Transient { + err: IndexerError::PostgresWriteError(e.to_string()), + retry_after: None, + } }) - }}; - } - - pub use blocking_call_is_ok_or_panic; - pub use read_only_blocking; - pub use read_only_repeatable_blocking; - pub use run_query; - pub use run_query_async; - pub use run_query_repeatable; - pub use run_query_repeatable_async; - pub use spawn_read_only_blocking; - pub use transactional_blocking_with_retry; + }) + .await } diff --git a/crates/sui-indexer/src/store/pg_indexer_store.rs b/crates/sui-indexer/src/store/pg_indexer_store.rs index d110e7b1959d0..480e3d34d8adf 100644 --- a/crates/sui-indexer/src/store/pg_indexer_store.rs +++ b/crates/sui-indexer/src/store/pg_indexer_store.rs @@ -5,7 +5,6 @@ use std::collections::hash_map::Entry; use std::collections::{BTreeMap, HashMap}; use std::io::Cursor; use std::time::Duration; -use std::time::Instant; use async_trait::async_trait; use core::result::Result::Ok; @@ -13,7 +12,8 @@ use csv::Writer; use diesel::dsl::{max, min}; use diesel::ExpressionMethods; use diesel::OptionalExtension; -use diesel::{QueryDsl, RunQueryDsl}; +use diesel::QueryDsl; +use diesel_async::scoped_futures::ScopedFutureExt; use itertools::Itertools; use object_store::path::Path; use tap::TapFallible; @@ -26,7 +26,6 @@ use sui_types::base_types::ObjectID; use crate::config::RestoreConfig; use crate::database::ConnectionPool; -use crate::db::ConnectionPool as BlockingConnectionPool; use crate::errors::{Context, IndexerError}; use crate::handlers::EpochToCommit; use crate::handlers::TransactionObjectChangesToCommit; @@ -51,15 +50,12 @@ use crate::schema::{ event_senders, event_struct_instantiation, event_struct_module, event_struct_name, event_struct_package, events, feature_flags, full_objects_history, objects, objects_history, objects_snapshot, objects_version, packages, protocol_configs, pruner_cp_watermark, - transactions, tx_calls_fun, tx_calls_mod, tx_calls_pkg, tx_changed_objects, tx_digests, - tx_input_objects, tx_kinds, tx_recipients, tx_senders, + transactions, tx_affected_addresses, tx_calls_fun, tx_calls_mod, tx_calls_pkg, + tx_changed_objects, tx_digests, tx_input_objects, tx_kinds, tx_recipients, tx_senders, }; +use crate::store::transaction_with_retry; use crate::types::EventIndex; use crate::types::{IndexedCheckpoint, IndexedEvent, IndexedPackage, IndexedTransaction, TxIndex}; -use crate::{ - insert_or_ignore_into, on_conflict_do_update, persist_chunk_into_table, read_only_blocking, - transactional_blocking_with_retry, -}; use super::pg_partition_manager::{EpochPartitionData, PgPartitionManager}; use super::IndexerStore; @@ -80,15 +76,6 @@ macro_rules! chunk { }}; } -macro_rules! prune_tx_or_event_indice_table { - ($table:ident, $conn:expr, $min_tx:expr, $max_tx:expr, $context_msg:expr) => { - diesel::delete($table::table.filter($table::tx_sequence_number.between($min_tx, $max_tx))) - .execute($conn) - .map_err(IndexerError::from) - .context($context_msg)?; - }; -} - // In one DB transaction, the update could be chunked into // a few statements, this is the amount of rows to update in one statement // TODO: I think with the `per_db_tx` params, `PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX` @@ -112,7 +99,6 @@ pub struct PgIndexerStoreConfig { #[derive(Clone)] pub struct PgIndexerStore { - blocking_cp: BlockingConnectionPool, pool: ConnectionPool, metrics: IndexerMetrics, partition_manager: PgPartitionManager, @@ -121,7 +107,6 @@ pub struct PgIndexerStore { impl PgIndexerStore { pub fn new( - blocking_cp: BlockingConnectionPool, pool: ConnectionPool, restore_config: RestoreConfig, metrics: IndexerMetrics, @@ -134,8 +119,8 @@ impl PgIndexerStore { .unwrap_or_else(|_e| PG_COMMIT_OBJECTS_PARALLEL_CHUNK_SIZE.to_string()) .parse::() .unwrap(); - let partition_manager = PgPartitionManager::new(blocking_cp.clone()) - .expect("Failed to initialize partition manager"); + let partition_manager = + PgPartitionManager::new(pool.clone()).expect("Failed to initialize partition manager"); let config = PgIndexerStoreConfig { parallel_chunk_size, parallel_objects_chunk_size, @@ -144,7 +129,6 @@ impl PgIndexerStore { }; Self { - blocking_cp, pool, metrics, partition_manager, @@ -152,200 +136,222 @@ impl PgIndexerStore { } } - pub fn blocking_cp(&self) -> BlockingConnectionPool { - self.blocking_cp.clone() - } - pub fn pool(&self) -> ConnectionPool { self.pool.clone() } - pub fn get_latest_epoch_id(&self) -> Result, IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - epochs::dsl::epochs - .select(max(epochs::epoch)) - .first::>(conn) - .map(|v| v.map(|v| v as u64)) - }) - .context("Failed reading latest epoch id from PostgresDB") - } - /// Get the range of the protocol versions that need to be indexed. - pub fn get_protocol_version_index_range(&self) -> Result<(i64, i64), IndexerError> { + pub async fn get_protocol_version_index_range(&self) -> Result<(i64, i64), IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; // We start indexing from the next protocol version after the latest one stored in the db. - let start = read_only_blocking!(&self.blocking_cp, |conn| { - protocol_configs::dsl::protocol_configs - .select(max(protocol_configs::protocol_version)) - .first::>(conn) - }) - .context("Failed reading latest protocol version from PostgresDB")? - .map_or(1, |v| v + 1); + let start = protocol_configs::table + .select(max(protocol_configs::protocol_version)) + .first::>(&mut connection) + .await + .map_err(Into::into) + .context("Failed reading latest protocol version from PostgresDB")? + .map_or(1, |v| v + 1); // We end indexing at the protocol version of the latest epoch stored in the db. - let end = read_only_blocking!(&self.blocking_cp, |conn| { - epochs::dsl::epochs - .select(max(epochs::protocol_version)) - .first::>(conn) - }) - .context("Failed reading latest epoch protocol version from PostgresDB")? - .unwrap_or(1); + let end = epochs::table + .select(max(epochs::protocol_version)) + .first::>(&mut connection) + .await + .map_err(Into::into) + .context("Failed reading latest epoch protocol version from PostgresDB")? + .unwrap_or(1); Ok((start, end)) } - pub fn get_chain_identifier(&self) -> Result>, IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - chain_identifier::dsl::chain_identifier - .select(chain_identifier::checkpoint_digest) - .first::>(conn) - .optional() - }) - .context("Failed reading chain id from PostgresDB") + async fn get_chain_identifier(&self) -> Result>, IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + chain_identifier::table + .select(chain_identifier::checkpoint_digest) + .first::>(&mut connection) + .await + .optional() + .map_err(Into::into) + .context("Failed reading chain id from PostgresDB") } - fn get_latest_checkpoint_sequence_number(&self) -> Result, IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - checkpoints::dsl::checkpoints - .select(max(checkpoints::sequence_number)) - .first::>(conn) - .map(|v| v.map(|v| v as u64)) - }) - .context("Failed reading latest checkpoint sequence number from PostgresDB") + async fn get_latest_checkpoint_sequence_number(&self) -> Result, IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + checkpoints::table + .select(max(checkpoints::sequence_number)) + .first::>(&mut connection) + .await + .map_err(Into::into) + .map(|v| v.map(|v| v as u64)) + .context("Failed reading latest checkpoint sequence number from PostgresDB") } - fn get_available_checkpoint_range(&self) -> Result<(u64, u64), IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - checkpoints::dsl::checkpoints - .select(( - min(checkpoints::sequence_number), - max(checkpoints::sequence_number), - )) - .first::<(Option, Option)>(conn) - .map(|(min, max)| { - ( - min.unwrap_or_default() as u64, - max.unwrap_or_default() as u64, - ) - }) - }) - .context("Failed reading min and max checkpoint sequence numbers from PostgresDB") + async fn get_available_checkpoint_range(&self) -> Result<(u64, u64), IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + checkpoints::table + .select(( + min(checkpoints::sequence_number), + max(checkpoints::sequence_number), + )) + .first::<(Option, Option)>(&mut connection) + .await + .map_err(Into::into) + .map(|(min, max)| { + ( + min.unwrap_or_default() as u64, + max.unwrap_or_default() as u64, + ) + }) + .context("Failed reading min and max checkpoint sequence numbers from PostgresDB") } - fn get_prunable_epoch_range(&self) -> Result<(u64, u64), IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - epochs::dsl::epochs - .select((min(epochs::epoch), max(epochs::epoch))) - .first::<(Option, Option)>(conn) - .map(|(min, max)| { - ( - min.unwrap_or_default() as u64, - max.unwrap_or_default() as u64, - ) - }) - }) - .context("Failed reading min and max epoch numbers from PostgresDB") + async fn get_prunable_epoch_range(&self) -> Result<(u64, u64), IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + epochs::table + .select((min(epochs::epoch), max(epochs::epoch))) + .first::<(Option, Option)>(&mut connection) + .await + .map_err(Into::into) + .map(|(min, max)| { + ( + min.unwrap_or_default() as u64, + max.unwrap_or_default() as u64, + ) + }) + .context("Failed reading min and max epoch numbers from PostgresDB") } - fn get_min_prunable_checkpoint(&self) -> Result { - read_only_blocking!(&self.blocking_cp, |conn| { - pruner_cp_watermark::dsl::pruner_cp_watermark - .select(min(pruner_cp_watermark::checkpoint_sequence_number)) - .first::>(conn) - .map(|v| v.unwrap_or_default() as u64) - }) - .context("Failed reading min prunable checkpoint sequence number from PostgresDB") + async fn get_min_prunable_checkpoint(&self) -> Result { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + pruner_cp_watermark::table + .select(min(pruner_cp_watermark::checkpoint_sequence_number)) + .first::>(&mut connection) + .await + .map_err(Into::into) + .map(|v| v.unwrap_or_default() as u64) + .context("Failed reading min prunable checkpoint sequence number from PostgresDB") } - fn get_checkpoint_range_for_epoch( + async fn get_checkpoint_range_for_epoch( &self, epoch: u64, ) -> Result<(u64, Option), IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - epochs::dsl::epochs - .select((epochs::first_checkpoint_id, epochs::last_checkpoint_id)) - .filter(epochs::epoch.eq(epoch as i64)) - .first::<(i64, Option)>(conn) - .map(|(min, max)| (min as u64, max.map(|v| v as u64))) - }) - .context("Failed reading checkpoint range from PostgresDB") + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + epochs::table + .select((epochs::first_checkpoint_id, epochs::last_checkpoint_id)) + .filter(epochs::epoch.eq(epoch as i64)) + .first::<(i64, Option)>(&mut connection) + .await + .map_err(Into::into) + .map(|(min, max)| (min as u64, max.map(|v| v as u64))) + .context("Failed reading checkpoint range from PostgresDB") } - fn get_transaction_range_for_checkpoint( + async fn get_transaction_range_for_checkpoint( &self, checkpoint: u64, ) -> Result<(u64, u64), IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - pruner_cp_watermark::dsl::pruner_cp_watermark - .select(( - pruner_cp_watermark::min_tx_sequence_number, - pruner_cp_watermark::max_tx_sequence_number, - )) - .filter(pruner_cp_watermark::checkpoint_sequence_number.eq(checkpoint as i64)) - .first::<(i64, i64)>(conn) - .map(|(min, max)| (min as u64, max as u64)) - }) - .context("Failed reading transaction range from PostgresDB") + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + pruner_cp_watermark::table + .select(( + pruner_cp_watermark::min_tx_sequence_number, + pruner_cp_watermark::max_tx_sequence_number, + )) + .filter(pruner_cp_watermark::checkpoint_sequence_number.eq(checkpoint as i64)) + .first::<(i64, i64)>(&mut connection) + .await + .map_err(Into::into) + .map(|(min, max)| (min as u64, max as u64)) + .context("Failed reading transaction range from PostgresDB") } - fn get_latest_object_snapshot_checkpoint_sequence_number( + async fn get_latest_object_snapshot_checkpoint_sequence_number( &self, ) -> Result, IndexerError> { - read_only_blocking!(&self.blocking_cp, |conn| { - objects_snapshot::dsl::objects_snapshot - .select(max(objects_snapshot::checkpoint_sequence_number)) - .first::>(conn) - .map(|v| v.map(|v| v as u64)) - }) - .context("Failed reading latest object snapshot checkpoint sequence number from PostgresDB") + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + + objects_snapshot::table + .select(max(objects_snapshot::checkpoint_sequence_number)) + .first::>(&mut connection) + .await + .map_err(Into::into) + .map(|v| v.map(|v| v as u64)) + .context( + "Failed reading latest object snapshot checkpoint sequence number from PostgresDB", + ) } - fn persist_display_updates( + async fn persist_display_updates( &self, display_updates: BTreeMap, ) -> Result<(), IndexerError> { - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { - on_conflict_do_update!( - display::table, - display_updates.values().collect::>(), - display::object_type, - ( + use diesel_async::RunQueryDsl; + + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::insert_into(display::table) + .values(display_updates.values().collect::>()) + .on_conflict(display::object_type) + .do_update() + .set(( display::id.eq(excluded(display::id)), display::version.eq(excluded(display::version)), display::bcs.eq(excluded(display::bcs)), - ), - |excluded: &StoredDisplay| ( - display::id.eq(excluded.id.clone()), - display::version.eq(excluded.version), - display::bcs.eq(excluded.bcs.clone()), - ), - conn - ); + )) + .execute(conn) + .await?; + Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - )?; + } + .scope_boxed() + }) + .await?; Ok(()) } - fn persist_object_mutation_chunk( + async fn persist_object_mutation_chunk( &self, mutated_object_mutation_chunk: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let guard = self .metrics .checkpoint_db_commit_latency_objects_chunks .start_timer(); let len = mutated_object_mutation_chunk.len(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { - on_conflict_do_update!( - objects::table, - mutated_object_mutation_chunk.clone(), - objects::object_id, - ( + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::insert_into(objects::table) + .values(mutated_object_mutation_chunk.clone()) + .on_conflict(objects::object_id) + .do_update() + .set(( objects::object_id.eq(excluded(objects::object_id)), objects::object_version.eq(excluded(objects::object_version)), objects::object_digest.eq(excluded(objects::object_digest)), @@ -361,29 +367,14 @@ impl PgIndexerStore { objects::df_name.eq(excluded(objects::df_name)), objects::df_object_type.eq(excluded(objects::df_object_type)), objects::df_object_id.eq(excluded(objects::df_object_id)), - ), - |excluded: StoredObject| ( - objects::object_id.eq(excluded.object_id.clone()), - objects::object_version.eq(excluded.object_version), - objects::object_digest.eq(excluded.object_digest.clone()), - objects::checkpoint_sequence_number.eq(excluded.checkpoint_sequence_number), - objects::owner_type.eq(excluded.owner_type), - objects::owner_id.eq(excluded.owner_id.clone()), - objects::object_type.eq(excluded.object_type.clone()), - objects::serialized_object.eq(excluded.serialized_object.clone()), - objects::coin_type.eq(excluded.coin_type.clone()), - objects::coin_balance.eq(excluded.coin_balance), - objects::df_kind.eq(excluded.df_kind), - objects::df_name.eq(excluded.df_name.clone()), - objects::df_object_type.eq(excluded.df_object_type.clone()), - objects::df_object_id.eq(excluded.df_object_id.clone()), - ), - conn - ); + )) + .execute(conn) + .await?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!(elapsed, "Persisted {} chunked objects", len); @@ -393,18 +384,18 @@ impl PgIndexerStore { }) } - fn persist_object_deletion_chunk( + async fn persist_object_deletion_chunk( &self, deleted_objects_chunk: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_objects_chunks .start_timer(); let len = deleted_objects_chunk.len(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { diesel::delete( objects::table.filter( objects::object_id.eq_any( @@ -416,13 +407,15 @@ impl PgIndexerStore { ), ) .execute(conn) + .await .map_err(IndexerError::from) .context("Failed to write object deletion to PostgresDB")?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!(elapsed, "Deleted {} chunked objects", len); @@ -432,10 +425,11 @@ impl PgIndexerStore { }) } - fn backfill_objects_snapshot_chunk( + async fn backfill_objects_snapshot_chunk( &self, objects: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_objects_snapshot_chunks @@ -452,17 +446,16 @@ impl PgIndexerStore { } } - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for objects_snapshot_chunk in objects_snapshot.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - on_conflict_do_update!( - objects_snapshot::table, - objects_snapshot_chunk, - objects_snapshot::object_id, - ( + diesel::insert_into(objects_snapshot::table) + .values(objects_snapshot_chunk) + .on_conflict(objects_snapshot::object_id) + .do_update() + .set(( objects_snapshot::object_version .eq(excluded(objects_snapshot::object_version)), objects_snapshot::object_status @@ -492,34 +485,15 @@ impl PgIndexerStore { .eq(excluded(objects_snapshot::df_object_type)), objects_snapshot::df_object_id .eq(excluded(objects_snapshot::df_object_id)), - ), - |excluded: StoredObjectSnapshot| ( - objects_snapshot::object_version.eq(excluded.object_version), - objects_snapshot::object_status.eq(excluded.object_status), - objects_snapshot::object_digest.eq(excluded.object_digest), - objects_snapshot::checkpoint_sequence_number - .eq(excluded.checkpoint_sequence_number), - objects_snapshot::owner_type.eq(excluded.owner_type), - objects_snapshot::owner_id.eq(excluded.owner_id), - objects_snapshot::object_type_package.eq(excluded.object_type_package), - objects_snapshot::object_type_module.eq(excluded.object_type_module), - objects_snapshot::object_type_name.eq(excluded.object_type_name), - objects_snapshot::object_type.eq(excluded.object_type), - objects_snapshot::serialized_object.eq(excluded.serialized_object), - objects_snapshot::coin_type.eq(excluded.coin_type), - objects_snapshot::coin_balance.eq(excluded.coin_balance), - objects_snapshot::df_kind.eq(excluded.df_kind), - objects_snapshot::df_name.eq(excluded.df_name), - objects_snapshot::df_object_type.eq(excluded.df_object_type), - objects_snapshot::df_object_id.eq(excluded.df_object_id), - ), - conn - ); + )) + .execute(conn) + .await?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!( @@ -533,58 +507,68 @@ impl PgIndexerStore { }) } - fn persist_objects_history_chunk( + async fn persist_objects_history_chunk( &self, objects: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_objects_history_chunks .start_timer(); let mut mutated_objects: Vec = vec![]; - let mut object_versions: Vec = vec![]; let mut deleted_object_ids: Vec = vec![]; for object in objects { match object { ObjectChangeToCommit::MutatedObject(stored_object) => { - object_versions.push(StoredObjectVersion::from(&stored_object)); mutated_objects.push(stored_object.into()); } ObjectChangeToCommit::DeletedObject(stored_deleted_object) => { - object_versions.push(StoredObjectVersion::from(&stored_deleted_object)); deleted_object_ids.push(stored_deleted_object.into()); } } } - - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for mutated_object_change_chunk in mutated_objects.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!( - objects_history::table, - mutated_object_change_chunk, - conn + let error_message = concat!( + "Failed to write to ", + stringify!((objects_history::table)), + " DB" ); - } - - for object_version_chunk in object_versions.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) - { - insert_or_ignore_into!(objects_version::table, object_version_chunk, conn); + diesel::insert_into(objects_history::table) + .values(mutated_object_change_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context(error_message)?; } for deleted_objects_chunk in deleted_object_ids.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(objects_history::table, deleted_objects_chunk, conn); + let error_message = concat!( + "Failed to write to ", + stringify!((objects_history::table)), + " DB" + ); + diesel::insert_into(objects_history::table) + .values(deleted_objects_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context(error_message)?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!( @@ -598,26 +582,34 @@ impl PgIndexerStore { }) } - fn persist_full_objects_history_chunk( + async fn persist_full_objects_history_chunk( &self, objects: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let guard = self .metrics .checkpoint_db_commit_latency_full_objects_history_chunks .start_timer(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for objects_chunk in objects.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(full_objects_history::table, objects_chunk, conn); + diesel::insert_into(full_objects_history::table) + .values(objects_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to full_objects_history table")?; } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + Ok(()) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!( @@ -631,8 +623,38 @@ impl PgIndexerStore { }) } - fn persist_checkpoints(&self, checkpoints: Vec) -> Result<(), IndexerError> { - let Some(first_checkpoint) = checkpoints.first() else { + async fn persist_object_version_chunk( + &self, + object_versions: Vec, + ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + for object_version_chunk in object_versions.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) + { + diesel::insert_into(objects_version::table) + .values(object_version_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to objects_version table")?; + } + Ok::<(), IndexerError>(()) + } + .scope_boxed() + }) + .await + } + + async fn persist_checkpoints( + &self, + checkpoints: Vec, + ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + + let Some(first_checkpoint) = checkpoints.as_slice().first() else { return Ok(()); }; @@ -640,21 +662,25 @@ impl PgIndexerStore { // chain identifier. if first_checkpoint.sequence_number == 0 { let checkpoint_digest = first_checkpoint.checkpoint_digest.into_inner().to_vec(); - self.persist_protocol_configs_and_feature_flags(checkpoint_digest.clone())?; - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + self.persist_protocol_configs_and_feature_flags(checkpoint_digest.clone()) + .await?; + + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { let checkpoint_digest = first_checkpoint.checkpoint_digest.into_inner().to_vec(); - insert_or_ignore_into!( - chain_identifier::table, - StoredChainIdentifier { checkpoint_digest }, - conn - ); + diesel::insert_into(chain_identifier::table) + .values(StoredChainIdentifier { checkpoint_digest }) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("failed to write to chain_identifier table")?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - )?; + } + .scope_boxed() + }) + .await?; } let guard = self .metrics @@ -662,16 +688,22 @@ impl PgIndexerStore { .start_timer(); let stored_cp_txs = checkpoints.iter().map(StoredCpTx::from).collect::>(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for stored_cp_tx_chunk in stored_cp_txs.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(pruner_cp_watermark::table, stored_cp_tx_chunk, conn); + diesel::insert_into(pruner_cp_watermark::table) + .values(stored_cp_tx_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to pruner_cp_watermark table")?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { info!( "Persisted {} pruner_cp_watermark rows.", @@ -686,33 +718,46 @@ impl PgIndexerStore { .iter() .map(StoredCheckpoint::from) .collect::>(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for stored_checkpoint_chunk in stored_checkpoints.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(checkpoints::table, stored_checkpoint_chunk, conn); + diesel::insert_into(checkpoints::table) + .values(stored_checkpoint_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to checkpoints table")?; let time_now_ms = chrono::Utc::now().timestamp_millis(); for stored_checkpoint in stored_checkpoint_chunk { self.metrics .db_commit_lag_ms .set(time_now_ms - stored_checkpoint.timestamp_ms); - self.metrics.max_committed_checkpoint_sequence_number.set( - stored_checkpoint.sequence_number, - ); - self.metrics.committed_checkpoint_timestamp_ms.set( - stored_checkpoint.timestamp_ms, - ); + self.metrics + .max_committed_checkpoint_sequence_number + .set(stored_checkpoint.sequence_number); + self.metrics + .committed_checkpoint_timestamp_ms + .set(stored_checkpoint.timestamp_ms); } + for stored_checkpoint in stored_checkpoint_chunk { - info!("Indexer lag: persisted checkpoint {} with time now {} and checkpoint time {}", stored_checkpoint.sequence_number, time_now_ms, stored_checkpoint.timestamp_ms); + info!( + "Indexer lag: \ + persisted checkpoint {} with time now {} and checkpoint time {}", + stored_checkpoint.sequence_number, + time_now_ms, + stored_checkpoint.timestamp_ms + ); } } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!( @@ -726,10 +771,11 @@ impl PgIndexerStore { }) } - fn persist_transactions_chunk( + async fn persist_transactions_chunk( &self, transactions: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_transactions_chunks @@ -744,16 +790,27 @@ impl PgIndexerStore { .collect::>(); drop(transformation_guard); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for transaction_chunk in transactions.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(transactions::table, transaction_chunk, conn); + let error_message = concat!( + "Failed to write to ", + stringify!((transactions::table)), + " DB" + ); + diesel::insert_into(transactions::table) + .values(transaction_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context(error_message)?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!( @@ -767,7 +824,8 @@ impl PgIndexerStore { }) } - fn persist_events_chunk(&self, events: Vec) -> Result<(), IndexerError> { + async fn persist_events_chunk(&self, events: Vec) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_events_chunks @@ -778,16 +836,24 @@ impl PgIndexerStore { .map(StoredEvent::from) .collect::>(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for event_chunk in events.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(events::table, event_chunk, conn); + let error_message = + concat!("Failed to write to ", stringify!((events::table)), " DB"); + diesel::insert_into(events::table) + .values(event_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context(error_message)?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!(elapsed, "Persisted {} chunked events", len); @@ -797,7 +863,8 @@ impl PgIndexerStore { }) } - fn persist_packages(&self, packages: Vec) -> Result<(), IndexerError> { + async fn persist_packages(&self, packages: Vec) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; if packages.is_empty() { return Ok(()); } @@ -809,29 +876,25 @@ impl PgIndexerStore { .into_iter() .map(StoredPackage::from) .collect::>(); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { for packages_chunk in packages.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - on_conflict_do_update!( - packages::table, - packages_chunk, - packages::package_id, - ( + diesel::insert_into(packages::table) + .values(packages_chunk) + .on_conflict(packages::package_id) + .do_update() + .set(( packages::package_id.eq(excluded(packages::package_id)), packages::move_package.eq(excluded(packages::move_package)), - ), - |excluded: StoredPackage| ( - packages::package_id.eq(excluded.package_id.clone()), - packages::move_package.eq(excluded.move_package), - ), - conn - ); + )) + .execute(conn) + .await?; } Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!(elapsed, "Persisted {} packages", packages.len()); @@ -845,6 +908,8 @@ impl PgIndexerStore { &self, indices: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let guard = self .metrics .checkpoint_db_commit_latency_event_indices_chunks @@ -897,364 +962,214 @@ impl PgIndexerStore { }, ); - // Now persist all the event indices in parallel into their tables. - let mut futures = vec![]; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_emit_package::table, - event_emit_packages, - &this.blocking_cp - ) - })); + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::insert_into(event_emit_package::table) + .values(&event_emit_packages) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_emit_module::table, - event_emit_modules, - &this.blocking_cp - ) - })); + diesel::insert_into(event_emit_module::table) + .values(&event_emit_modules) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!(event_senders::table, event_senders, &this.blocking_cp) - })); + diesel::insert_into(event_senders::table) + .values(&event_senders) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_struct_package::table, - event_struct_packages, - &this.blocking_cp - ) - })); + diesel::insert_into(event_struct_package::table) + .values(&event_struct_packages) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_struct_module::table, - event_struct_modules, - &this.blocking_cp - ) - })); + diesel::insert_into(event_struct_module::table) + .values(&event_struct_modules) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_struct_name::table, - event_struct_names, - &this.blocking_cp - ) - })); + diesel::insert_into(event_struct_name::table) + .values(&event_struct_names) + .on_conflict_do_nothing() + .execute(conn) + .await?; - futures.push(self.spawn_blocking_task(move |this| { - persist_chunk_into_table!( - event_struct_instantiation::table, - event_struct_instantiations, - &this.blocking_cp - ) - })); + diesel::insert_into(event_struct_instantiation::table) + .values(&event_struct_instantiations) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + Ok(()) + } + .scope_boxed() + }) + .await?; - futures::future::join_all(futures) - .await - .into_iter() - .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join event indices futures in a chunk: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() - .map_err(|e| { - IndexerError::PostgresWriteError(format!( - "Failed to persist all event indices in a chunk: {:?}", - e - )) - })?; let elapsed = guard.stop_and_record(); info!(elapsed, "Persisted {} chunked event indices", len); Ok(()) } async fn persist_tx_indices_chunk(&self, indices: Vec) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let guard = self .metrics .checkpoint_db_commit_latency_tx_indices_chunks .start_timer(); let len = indices.len(); - let (senders, recipients, input_objects, changed_objects, pkgs, mods, funs, digests, kinds) = - indices.into_iter().map(|i| i.split()).fold( + let ( + affected_addresses, + senders, + recipients, + input_objects, + changed_objects, + pkgs, + mods, + funs, + digests, + kinds, + ) = indices.into_iter().map(|i| i.split()).fold( + ( + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + Vec::new(), + ), + |( + mut tx_affected_addresses, + mut tx_senders, + mut tx_recipients, + mut tx_input_objects, + mut tx_changed_objects, + mut tx_pkgs, + mut tx_mods, + mut tx_funs, + mut tx_digests, + mut tx_kinds, + ), + index| { + tx_affected_addresses.extend(index.0); + tx_senders.extend(index.1); + tx_recipients.extend(index.2); + tx_input_objects.extend(index.3); + tx_changed_objects.extend(index.4); + tx_pkgs.extend(index.5); + tx_mods.extend(index.6); + tx_funs.extend(index.7); + tx_digests.extend(index.8); + tx_kinds.extend(index.9); ( - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - Vec::new(), - ), - |( - mut tx_senders, - mut tx_recipients, - mut tx_input_objects, - mut tx_changed_objects, - mut tx_pkgs, - mut tx_mods, - mut tx_funs, - mut tx_digests, - mut tx_kinds, - ), - index| { - tx_senders.extend(index.0); - tx_recipients.extend(index.1); - tx_input_objects.extend(index.2); - tx_changed_objects.extend(index.3); - tx_pkgs.extend(index.4); - tx_mods.extend(index.5); - tx_funs.extend(index.6); - tx_digests.extend(index.7); - tx_kinds.extend(index.8); - ( - tx_senders, - tx_recipients, - tx_input_objects, - tx_changed_objects, - tx_pkgs, - tx_mods, - tx_funs, - tx_digests, - tx_kinds, - ) - }, - ); + tx_affected_addresses, + tx_senders, + tx_recipients, + tx_input_objects, + tx_changed_objects, + tx_pkgs, + tx_mods, + tx_funs, + tx_digests, + tx_kinds, + ) + }, + ); - let mut futures = vec![]; - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let senders_len = senders.len(); - let recipients_len = recipients.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in senders.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_senders::table, chunk, conn); - } - for chunk in recipients.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_recipients::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!( - elapsed, - "Persisted {} rows to tx_senders and {} rows to tx_recipients", - senders_len, - recipients_len, - ); - }) - .tap_err(|e| { - tracing::error!( - "Failed to persist tx_senders and tx_recipients with error: {}", - e - ); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let input_objects_len = input_objects.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in input_objects.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_input_objects::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!( - elapsed, - "Persisted {} rows to tx_input_objects", input_objects_len, - ); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_input_objects with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let changed_objects_len = changed_objects.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in changed_objects.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_changed_objects::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!( - elapsed, - "Persisted {} rows to tx_changed_objects table", changed_objects_len, - ); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_changed_objects with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let rows_len = pkgs.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in pkgs.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_calls_pkg::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!( - elapsed, - "Persisted {} rows to tx_calls_pkg tables", rows_len - ); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_calls_pkg with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let rows_len = mods.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in mods.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_calls_mod::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!(elapsed, "Persisted {} rows to tx_calls_mod table", rows_len); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_calls_mod with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let rows_len = funs.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in funs.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_calls_fun::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!(elapsed, "Persisted {} rows to tx_calls_fun table", rows_len); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_calls_fun with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let calls_len = digests.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in digests.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_digests::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - Duration::from_secs(60) - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!(elapsed, "Persisted {} rows to tx_digests tables", calls_len); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_digests with error: {}", e); - }) - })); - - futures.push(self.spawn_blocking_task(move |this| { - let now = Instant::now(); - let rows_len = kinds.len(); - transactional_blocking_with_retry!( - &this.blocking_cp, - |conn| { - for chunk in kinds.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { - insert_or_ignore_into!(tx_kinds::table, chunk, conn); - } - Ok::<(), IndexerError>(()) - }, - Duration::from_secs(60) - ) - .tap_ok(|_| { - let elapsed = now.elapsed().as_secs_f64(); - info!(elapsed, "Persisted {} rows to tx_kinds tables", rows_len); - }) - .tap_err(|e| { - tracing::error!("Failed to persist tx_kinds with error: {}", e); - }) - })); + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::insert_into(tx_affected_addresses::table) + .values(&affected_addresses) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_senders::table) + .values(&senders) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_recipients::table) + .values(&recipients) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_input_objects::table) + .values(&input_objects) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_changed_objects::table) + .values(&changed_objects) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_calls_pkg::table) + .values(&pkgs) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_calls_mod::table) + .values(&mods) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_calls_fun::table) + .values(&funs) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_digests::table) + .values(&digests) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + diesel::insert_into(tx_kinds::table) + .values(&kinds) + .on_conflict_do_nothing() + .execute(conn) + .await?; + + Ok(()) + } + .scope_boxed() + }) + .await?; - futures::future::join_all(futures) - .await - .into_iter() - .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join tx indices futures in a chunk: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() - .map_err(|e| { - IndexerError::PostgresWriteError(format!( - "Failed to persist all tx indices in a chunk: {:?}", - e - )) - })?; let elapsed = guard.stop_and_record(); info!(elapsed, "Persisted {} chunked tx_indices", len); Ok(()) } - fn persist_epoch(&self, epoch: EpochToCommit) -> Result<(), IndexerError> { + async fn persist_epoch(&self, epoch: EpochToCommit) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; let guard = self .metrics .checkpoint_db_commit_latency_epoch .start_timer(); let epoch_id = epoch.new_epoch.epoch; - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { if let Some(last_epoch) = &epoch.last_epoch { let last_epoch_id = last_epoch.epoch; // Overwrites the `epoch_total_transactions` field on `epoch.last_epoch` because @@ -1269,6 +1184,7 @@ impl PgIndexerStore { .filter(checkpoints::epoch.eq(prev_epoch_id as i64)) .select(max(checkpoints::network_total_transactions)) .first::>(conn) + .await .map(|o| o.unwrap_or(0))?; result as u64 @@ -1281,14 +1197,11 @@ impl PgIndexerStore { let mut last_epoch = StoredEpochInfo::from_epoch_end_info(last_epoch); last_epoch.epoch_total_transactions = Some(epoch_total_transactions as i64); info!(last_epoch_id, "Persisting epoch end data."); - on_conflict_do_update!( - epochs::table, - vec![last_epoch], - epochs::epoch, - ( - // Note: Exclude epoch beginning info except system_state below. - // This is to ensure that epoch beginning info columns are not overridden with default values, - // because these columns are default values in `last_epoch`. + diesel::insert_into(epochs::table) + .values(vec![last_epoch]) + .on_conflict(epochs::epoch) + .do_update() + .set(( epochs::system_state.eq(excluded(epochs::system_state)), epochs::epoch_total_transactions .eq(excluded(epochs::epoch_total_transactions)), @@ -1305,36 +1218,28 @@ impl PgIndexerStore { epochs::leftover_storage_fund_inflow .eq(excluded(epochs::leftover_storage_fund_inflow)), epochs::epoch_commitments.eq(excluded(epochs::epoch_commitments)), - ), - |excluded: StoredEpochInfo| ( - epochs::system_state.eq(excluded.system_state.clone()), - epochs::epoch_total_transactions.eq(excluded.epoch_total_transactions), - epochs::last_checkpoint_id.eq(excluded.last_checkpoint_id), - epochs::epoch_end_timestamp.eq(excluded.epoch_end_timestamp), - epochs::storage_fund_reinvestment - .eq(excluded.storage_fund_reinvestment), - epochs::storage_charge.eq(excluded.storage_charge), - epochs::storage_rebate.eq(excluded.storage_rebate), - epochs::stake_subsidy_amount.eq(excluded.stake_subsidy_amount), - epochs::total_gas_fees.eq(excluded.total_gas_fees), - epochs::total_stake_rewards_distributed - .eq(excluded.total_stake_rewards_distributed), - epochs::leftover_storage_fund_inflow - .eq(excluded.leftover_storage_fund_inflow), - epochs::epoch_commitments.eq(excluded.epoch_commitments) - ), - conn - ); + )) + .execute(conn) + .await?; } let epoch_id = epoch.new_epoch.epoch; info!(epoch_id, "Persisting epoch beginning info"); let new_epoch = StoredEpochInfo::from_epoch_beginning_info(&epoch.new_epoch); - insert_or_ignore_into!(epochs::table, new_epoch, conn); + let error_message = + concat!("Failed to write to ", stringify!((epochs::table)), " DB"); + diesel::insert_into(epochs::table) + .values(new_epoch) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context(error_message)?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await .tap_ok(|_| { let elapsed = guard.stop_and_record(); info!(elapsed, epoch_id, "Persisted epoch beginning info"); @@ -1344,22 +1249,25 @@ impl PgIndexerStore { }) } - fn advance_epoch(&self, epoch_to_commit: EpochToCommit) -> Result<(), IndexerError> { + async fn advance_epoch(&self, epoch_to_commit: EpochToCommit) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + let last_epoch_id = epoch_to_commit.last_epoch.as_ref().map(|e| e.epoch); // partition_0 has been created, so no need to advance it. if let Some(last_epoch_id) = last_epoch_id { - let last_db_epoch: Option = - read_only_blocking!(&self.blocking_cp, |conn| { - epochs::table - .filter(epochs::epoch.eq(last_epoch_id as i64)) - .first::(conn) - .optional() - }) + let last_db_epoch: Option = epochs::table + .filter(epochs::epoch.eq(last_epoch_id as i64)) + .first::(&mut connection) + .await + .optional() + .map_err(Into::into) .context("Failed to read last epoch from PostgresDB")?; if let Some(last_epoch) = last_db_epoch { let epoch_partition_data = EpochPartitionData::compose_data(epoch_to_commit, last_epoch); - let table_partitions = self.partition_manager.get_table_partitions()?; + let table_partitions = self.partition_manager.get_table_partitions().await?; for (table, (_, last_partition)) in table_partitions { // Only advance epoch partition for epoch partitioned tables. if !self @@ -1370,11 +1278,9 @@ impl PgIndexerStore { continue; } let guard = self.metrics.advance_epoch_latency.start_timer(); - self.partition_manager.advance_epoch( - table.clone(), - last_partition, - &epoch_partition_data, - )?; + self.partition_manager + .advance_epoch(table.clone(), last_partition, &epoch_partition_data) + .await?; let elapsed = guard.stop_and_record(); info!( elapsed, @@ -1391,270 +1297,245 @@ impl PgIndexerStore { Ok(()) } - fn prune_checkpoints_table(&self, cp: u64) -> Result<(), IndexerError> { - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + async fn prune_checkpoints_table(&self, cp: u64) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { diesel::delete( checkpoints::table.filter(checkpoints::sequence_number.eq(cp as i64)), ) .execute(conn) + .await .map_err(IndexerError::from) .context("Failed to prune checkpoints table")?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await } - fn prune_epochs_table(&self, epoch: u64) -> Result<(), IndexerError> { - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + async fn prune_epochs_table(&self, epoch: u64) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { diesel::delete(epochs::table.filter(epochs::epoch.eq(epoch as i64))) .execute(conn) + .await .map_err(IndexerError::from) .context("Failed to prune epochs table")?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + } + .scope_boxed() + }) + .await } - fn prune_event_indices_table(&self, min_tx: u64, max_tx: u64) -> Result<(), IndexerError> { + async fn prune_event_indices_table( + &self, + min_tx: u64, + max_tx: u64, + ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let (min_tx, max_tx) = (min_tx as i64, max_tx as i64); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { - prune_tx_or_event_indice_table!( - event_emit_module, - conn, - min_tx, - max_tx, - "Failed to prune event_emit_module table" - ); - prune_tx_or_event_indice_table!( - event_emit_package, - conn, - min_tx, - max_tx, - "Failed to prune event_emit_package table" - ); - prune_tx_or_event_indice_table![ - event_senders, - conn, - min_tx, - max_tx, - "Failed to prune event_senders table" - ]; - prune_tx_or_event_indice_table![ - event_struct_instantiation, - conn, - min_tx, - max_tx, - "Failed to prune event_struct_instantiation table" - ]; - prune_tx_or_event_indice_table![ - event_struct_module, - conn, - min_tx, - max_tx, - "Failed to prune event_struct_module table" - ]; - prune_tx_or_event_indice_table![ - event_struct_name, - conn, - min_tx, - max_tx, - "Failed to prune event_struct_name table" - ]; - prune_tx_or_event_indice_table![ - event_struct_package, - conn, - min_tx, - max_tx, - "Failed to prune event_struct_package table" - ]; - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::delete( + event_emit_module::table + .filter(event_emit_module::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + event_emit_package::table + .filter(event_emit_package::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + event_senders::table + .filter(event_senders::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete(event_struct_instantiation::table.filter( + event_struct_instantiation::tx_sequence_number.between(min_tx, max_tx), + )) + .execute(conn) + .await?; + + diesel::delete( + event_struct_module::table + .filter(event_struct_module::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + event_struct_name::table + .filter(event_struct_name::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + event_struct_package::table + .filter(event_struct_package::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + Ok(()) + } + .scope_boxed() + }) + .await } - fn prune_tx_indices_table(&self, min_tx: u64, max_tx: u64) -> Result<(), IndexerError> { + async fn prune_tx_indices_table(&self, min_tx: u64, max_tx: u64) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let (min_tx, max_tx) = (min_tx as i64, max_tx as i64); - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { - prune_tx_or_event_indice_table!( - tx_senders, - conn, - min_tx, - max_tx, - "Failed to prune tx_senders table" - ); - prune_tx_or_event_indice_table!( - tx_recipients, - conn, - min_tx, - max_tx, - "Failed to prune tx_recipients table" - ); - prune_tx_or_event_indice_table![ - tx_input_objects, - conn, - min_tx, - max_tx, - "Failed to prune tx_input_objects table" - ]; - prune_tx_or_event_indice_table![ - tx_changed_objects, - conn, - min_tx, - max_tx, - "Failed to prune tx_changed_objects table" - ]; - prune_tx_or_event_indice_table![ - tx_calls_pkg, - conn, - min_tx, - max_tx, - "Failed to prune tx_calls_pkg table" - ]; - prune_tx_or_event_indice_table![ - tx_calls_mod, - conn, - min_tx, - max_tx, - "Failed to prune tx_calls_mod table" - ]; - prune_tx_or_event_indice_table![ - tx_calls_fun, - conn, - min_tx, - max_tx, - "Failed to prune tx_calls_fun table" - ]; - prune_tx_or_event_indice_table![ - tx_digests, - conn, - min_tx, - max_tx, - "Failed to prune tx_digests table" - ]; - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + diesel::delete( + tx_affected_addresses::table + .filter(tx_affected_addresses::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_senders::table + .filter(tx_senders::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_recipients::table + .filter(tx_recipients::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_input_objects::table + .filter(tx_input_objects::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_changed_objects::table + .filter(tx_changed_objects::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_calls_pkg::table + .filter(tx_calls_pkg::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_calls_mod::table + .filter(tx_calls_mod::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_calls_fun::table + .filter(tx_calls_fun::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + diesel::delete( + tx_digests::table + .filter(tx_digests::tx_sequence_number.between(min_tx, max_tx)), + ) + .execute(conn) + .await?; + + Ok(()) + } + .scope_boxed() + }) + .await } - fn prune_cp_tx_table(&self, cp: u64) -> Result<(), IndexerError> { - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { + async fn prune_cp_tx_table(&self, cp: u64) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { diesel::delete( pruner_cp_watermark::table .filter(pruner_cp_watermark::checkpoint_sequence_number.eq(cp as i64)), ) .execute(conn) + .await .map_err(IndexerError::from) .context("Failed to prune pruner_cp_watermark table")?; - Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - ) + Ok(()) + } + .scope_boxed() + }) + .await } - fn get_network_total_transactions_by_end_of_epoch( + async fn get_network_total_transactions_by_end_of_epoch( &self, epoch: u64, ) -> Result { - read_only_blocking!(&self.blocking_cp, |conn| { - checkpoints::table - .filter(checkpoints::epoch.eq(epoch as i64)) - .select(checkpoints::network_total_transactions) - .order_by(checkpoints::sequence_number.desc()) - .first::(conn) - }) - .context("Failed to get network total transactions in epoch") - .map(|v| v as u64) - } - - async fn execute_in_blocking_worker(&self, f: F) -> Result - where - F: FnOnce(Self) -> Result + Send + 'static, - R: Send + 'static, - { - let this = self.clone(); - let current_span = tracing::Span::current(); - tokio::task::spawn_blocking(move || { - let _guard = current_span.enter(); - f(this) - }) - .await - .map_err(Into::into) - .and_then(std::convert::identity) - } + use diesel_async::RunQueryDsl; - fn spawn_blocking_task( - &self, - f: F, - ) -> tokio::task::JoinHandle> - where - F: FnOnce(Self) -> Result + Send + 'static, - R: Send + 'static, - { - let this = self.clone(); - let current_span = tracing::Span::current(); - let guard = self.metrics.tokio_blocking_task_wait_latency.start_timer(); - tokio::task::spawn_blocking(move || { - let _guard = current_span.enter(); - let _elapsed = guard.stop_and_record(); - f(this) - }) - } + let mut connection = self.pool.get().await?; - fn spawn_task(&self, f: F) -> tokio::task::JoinHandle> - where - F: FnOnce(Self) -> Fut + Send + 'static, - Fut: std::future::Future> + Send + 'static, - R: Send + 'static, - { - let this = self.clone(); - tokio::task::spawn(async move { f(this).await }) + checkpoints::table + .filter(checkpoints::epoch.eq(epoch as i64)) + .select(checkpoints::network_total_transactions) + .order_by(checkpoints::sequence_number.desc()) + .first::(&mut connection) + .await + .map_err(Into::into) + .context("Failed to get network total transactions in epoch") + .map(|v| v as u64) } } #[async_trait] impl IndexerStore for PgIndexerStore { async fn get_latest_checkpoint_sequence_number(&self) -> Result, IndexerError> { - self.execute_in_blocking_worker(|this| this.get_latest_checkpoint_sequence_number()) - .await + self.get_latest_checkpoint_sequence_number().await } async fn get_available_epoch_range(&self) -> Result<(u64, u64), IndexerError> { - self.execute_in_blocking_worker(|this| this.get_prunable_epoch_range()) - .await + self.get_prunable_epoch_range().await } async fn get_available_checkpoint_range(&self) -> Result<(u64, u64), IndexerError> { - self.execute_in_blocking_worker(|this| this.get_available_checkpoint_range()) - .await + self.get_available_checkpoint_range().await } async fn get_chain_identifier(&self) -> Result>, IndexerError> { - self.execute_in_blocking_worker(|this| this.get_chain_identifier()) - .await + self.get_chain_identifier().await } async fn get_latest_object_snapshot_checkpoint_sequence_number( &self, ) -> Result, IndexerError> { - self.execute_in_blocking_worker(|this| { - this.get_latest_object_snapshot_checkpoint_sequence_number() - }) - .await + self.get_latest_object_snapshot_checkpoint_sequence_number() + .await } async fn persist_objects( @@ -1692,21 +1573,12 @@ impl IndexerStore for PgIndexerStore { chunk!(object_deletions, self.config.parallel_objects_chunk_size); let mutation_futures = object_mutation_chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.persist_object_mutation_chunk(c))) + .map(|c| self.persist_object_mutation_chunk(c)) .collect::>(); futures::future::join_all(mutation_futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!( - "Failed to join persist_object_mutation_chunk futures: {}", - e - ); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all object mutation chunks: {:?}", @@ -1715,21 +1587,12 @@ impl IndexerStore for PgIndexerStore { })?; let deletion_futures = object_deletion_chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.persist_object_deletion_chunk(c))) + .map(|c| self.persist_object_deletion_chunk(c)) .collect::>(); futures::future::join_all(deletion_futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!( - "Failed to join persist_object_deletion_chunk futures: {}", - e - ); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all object deletion chunks: {:?}", @@ -1764,22 +1627,13 @@ impl IndexerStore for PgIndexerStore { let chunks = chunk!(objects, self.config.parallel_objects_chunk_size); let futures = chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.backfill_objects_snapshot_chunk(c))) + .map(|c| self.backfill_objects_snapshot_chunk(c)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!( - "Failed to join backfill_objects_snapshot_chunk futures: {}", - e - ); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all objects snapshot chunks: {:?}", @@ -1816,22 +1670,13 @@ impl IndexerStore for PgIndexerStore { let chunks = chunk!(objects, self.config.parallel_objects_chunk_size); let futures = chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.persist_objects_history_chunk(c))) + .map(|c| self.persist_objects_history_chunk(c)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!( - "Failed to join persist_objects_history_chunk futures: {}", - e - ); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all objects history chunks: {:?}", @@ -1882,9 +1727,7 @@ impl IndexerStore for PgIndexerStore { let chunks = chunk!(objects, self.config.parallel_objects_chunk_size); let futures = chunks .into_iter() - .map(|c| { - self.spawn_blocking_task(move |this| this.persist_full_objects_history_chunk(c)) - }) + .map(|c| self.persist_full_objects_history_chunk(c)) .collect::>(); futures::future::join_all(futures) @@ -1892,22 +1735,41 @@ impl IndexerStore for PgIndexerStore { .into_iter() .collect::, _>>() .map_err(|e| { - tracing::error!( - "Failed to join persist_full_objects_history_chunk futures: {}", + IndexerError::PostgresWriteError(format!( + "Failed to persist all full objects history chunks: {:?}", e - ); - IndexerError::from(e) - })? + )) + })?; + let elapsed = guard.stop_and_record(); + info!(elapsed, "Persisted {} full objects history", len); + Ok(()) + } + + async fn persist_object_versions( + &self, + object_versions: Vec, + ) -> Result<(), IndexerError> { + if object_versions.is_empty() { + return Ok(()); + } + let object_versions_count = object_versions.len(); + let chunks = chunk!(object_versions, self.config.parallel_objects_chunk_size); + let futures = chunks + .into_iter() + .map(|c| self.persist_object_version_chunk(c)) + .collect::>(); + + futures::future::join_all(futures) + .await .into_iter() .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( - "Failed to persist all full objects history chunks: {:?}", + "Failed to persist all object version chunks: {:?}", e )) })?; - let elapsed = guard.stop_and_record(); - info!(elapsed, "Persisted {} full objects history", len); + info!("Persisted {} object versions", object_versions_count); Ok(()) } @@ -1915,8 +1777,7 @@ impl IndexerStore for PgIndexerStore { &self, checkpoints: Vec, ) -> Result<(), IndexerError> { - self.execute_in_blocking_worker(move |this| this.persist_checkpoints(checkpoints)) - .await + self.persist_checkpoints(checkpoints).await } async fn persist_transactions( @@ -1932,19 +1793,13 @@ impl IndexerStore for PgIndexerStore { let chunks = chunk!(transactions, self.config.parallel_chunk_size); let futures = chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.persist_transactions_chunk(c))) + .map(|c| self.persist_transactions_chunk(c)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join persist_transactions_chunk futures: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all transactions chunks: {:?}", @@ -1968,19 +1823,13 @@ impl IndexerStore for PgIndexerStore { let chunks = chunk!(events, self.config.parallel_chunk_size); let futures = chunks .into_iter() - .map(|c| self.spawn_blocking_task(move |this| this.persist_events_chunk(c))) + .map(|c| self.persist_events_chunk(c)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join persist_events_chunk futures: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all events chunks: {:?}", @@ -2000,16 +1849,14 @@ impl IndexerStore for PgIndexerStore { return Ok(()); } - self.spawn_blocking_task(move |this| this.persist_display_updates(display_updates)) - .await? + self.persist_display_updates(display_updates).await } async fn persist_packages(&self, packages: Vec) -> Result<(), IndexerError> { if packages.is_empty() { return Ok(()); } - self.execute_in_blocking_worker(move |this| this.persist_packages(packages)) - .await + self.persist_packages(packages).await } async fn persist_event_indices(&self, indices: Vec) -> Result<(), IndexerError> { @@ -2025,22 +1872,12 @@ impl IndexerStore for PgIndexerStore { let futures = chunks .into_iter() - .map(|chunk| { - self.spawn_task(move |this: Self| async move { - this.persist_event_indices_chunk(chunk).await - }) - }) + .map(|chunk| self.persist_event_indices_chunk(chunk)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join persist_event_indices_chunk futures: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all event_indices chunks: {:?}", @@ -2065,22 +1902,12 @@ impl IndexerStore for PgIndexerStore { let futures = chunks .into_iter() - .map(|chunk| { - self.spawn_task(move |this: Self| async move { - this.persist_tx_indices_chunk(chunk).await - }) - }) + .map(|chunk| self.persist_tx_indices_chunk(chunk)) .collect::>(); futures::future::join_all(futures) .await .into_iter() .collect::, _>>() - .map_err(|e| { - tracing::error!("Failed to join persist_tx_indices_chunk futures: {}", e); - IndexerError::from(e) - })? - .into_iter() - .collect::, _>>() .map_err(|e| { IndexerError::PostgresWriteError(format!( "Failed to persist all tx_indices chunks: {:?}", @@ -2093,17 +1920,15 @@ impl IndexerStore for PgIndexerStore { } async fn persist_epoch(&self, epoch: EpochToCommit) -> Result<(), IndexerError> { - self.execute_in_blocking_worker(move |this| this.persist_epoch(epoch)) - .await + self.persist_epoch(epoch).await } async fn advance_epoch(&self, epoch: EpochToCommit) -> Result<(), IndexerError> { - self.execute_in_blocking_worker(move |this| this.advance_epoch(epoch)) - .await + self.advance_epoch(epoch).await } async fn prune_epoch(&self, epoch: u64) -> Result<(), IndexerError> { - let (mut min_cp, max_cp) = match self.get_checkpoint_range_for_epoch(epoch)? { + let (mut min_cp, max_cp) = match self.get_checkpoint_range_for_epoch(epoch).await? { (min_cp, Some(max_cp)) => Ok((min_cp, max_cp)), _ => Err(IndexerError::PostgresReadError(format!( "Failed to get checkpoint range for epoch {}", @@ -2115,7 +1940,7 @@ impl IndexerStore for PgIndexerStore { // partially pruned already. min_prunable_cp is the min cp to be pruned. // By std::cmp::max, we will resume the pruning process from the next checkpoint, instead of // the first cp of the current epoch. - let min_prunable_cp = self.get_min_prunable_checkpoint()?; + let min_prunable_cp = self.get_min_prunable_checkpoint().await?; min_cp = std::cmp::max(min_cp, min_prunable_cp); for cp in min_cp..=max_cp { // NOTE: the order of pruning tables is crucial: @@ -2129,72 +1954,45 @@ impl IndexerStore for PgIndexerStore { "Pruning checkpoint {} of epoch {} (min_prunable_cp: {})", cp, epoch, min_prunable_cp ); - self.execute_in_blocking_worker(move |this| this.prune_checkpoints_table(cp)) - .await - .unwrap_or_else(|e| { - tracing::error!("Failed to prune checkpoint {}: {}", cp, e); - }); + self.prune_checkpoints_table(cp).await?; - let (min_tx, max_tx) = self.get_transaction_range_for_checkpoint(cp)?; - self.execute_in_blocking_worker(move |this| { - this.prune_tx_indices_table(min_tx, max_tx) - }) - .await - .unwrap_or_else(|e| { - tracing::error!("Failed to prune transactions for cp {}: {}", cp, e); - }); + let (min_tx, max_tx) = self.get_transaction_range_for_checkpoint(cp).await?; + self.prune_tx_indices_table(min_tx, max_tx).await?; info!( "Pruned transactions for checkpoint {} from tx {} to tx {}", cp, min_tx, max_tx ); - self.execute_in_blocking_worker(move |this| { - this.prune_event_indices_table(min_tx, max_tx) - }) - .await - .unwrap_or_else(|e| { - tracing::error!( - "Failed to prune events of transactions for cp {}: {}", - cp, - e - ); - }); + self.prune_event_indices_table(min_tx, max_tx).await?; info!( "Pruned events of transactions for checkpoint {} from tx {} to tx {}", cp, min_tx, max_tx ); self.metrics.last_pruned_transaction.set(max_tx as i64); - self.execute_in_blocking_worker(move |this| this.prune_cp_tx_table(cp)) - .await - .unwrap_or_else(|e| { - tracing::error!( - "Failed to prune pruner_cp_watermark table for cp {}: {}", - cp, - e - ); - }); + self.prune_cp_tx_table(cp).await?; info!("Pruned checkpoint {} of epoch {}", cp, epoch); self.metrics.last_pruned_checkpoint.set(cp as i64); } // NOTE: prune epochs table last, otherwise get_checkpoint_range_for_epoch would fail. - self.execute_in_blocking_worker(move |this| this.prune_epochs_table(epoch)) - .await - .unwrap_or_else(|e| { - tracing::error!("Failed to prune epoch table for epoch {}: {}", epoch, e); - }); + self.prune_epochs_table(epoch).await?; Ok(()) } async fn upload_display(&self, epoch_number: u64) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + + let mut connection = self.pool.get().await?; + let mut buffer = Cursor::new(Vec::new()); { let mut writer = Writer::from_writer(&mut buffer); - let displays = read_only_blocking!(&self.blocking_cp, |conn| { - display::table.load::(conn) - }) - .context("Failed to get display from database")?; + let displays = display::table + .load::(&mut connection) + .await + .map_err(Into::into) + .context("Failed to get display from database")?; info!("Read {} displays", displays.len()); writer @@ -2249,18 +2047,18 @@ impl IndexerStore for PgIndexerStore { &self, epoch: u64, ) -> Result { - self.execute_in_blocking_worker(move |this| { - this.get_network_total_transactions_by_end_of_epoch(epoch) - }) - .await + self.get_network_total_transactions_by_end_of_epoch(epoch) + .await } /// Persist protocol configs and feature flags until the protocol version for the latest epoch /// we have stored in the db, inclusive. - fn persist_protocol_configs_and_feature_flags( + async fn persist_protocol_configs_and_feature_flags( &self, chain_id: Vec, ) -> Result<(), IndexerError> { + use diesel_async::RunQueryDsl; + let chain_id = ChainIdentifier::from( CheckpointDigest::try_from(chain_id).expect("Unable to convert chain id"), ); @@ -2268,7 +2066,7 @@ impl IndexerStore for PgIndexerStore { let mut all_configs = vec![]; let mut all_flags = vec![]; - let (start_version, end_version) = self.get_protocol_version_index_range()?; + let (start_version, end_version) = self.get_protocol_version_index_range().await?; info!( "Persisting protocol configs with start_version: {}, end_version: {}", start_version, end_version @@ -2310,15 +2108,30 @@ impl IndexerStore for PgIndexerStore { // Now insert all of them into the db. // TODO: right now the size of these updates is manageable but later we may consider batching. - transactional_blocking_with_retry!( - &self.blocking_cp, - |conn| { - insert_or_ignore_into!(protocol_configs::table, all_configs.clone(), conn); - insert_or_ignore_into!(feature_flags::table, all_flags.clone(), conn); + transaction_with_retry(&self.pool, PG_DB_COMMIT_SLEEP_DURATION, |conn| { + async { + for config_chunk in all_configs.chunks(PG_COMMIT_CHUNK_SIZE_INTRA_DB_TX) { + diesel::insert_into(protocol_configs::table) + .values(config_chunk) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to protocol_configs table")?; + } + + diesel::insert_into(feature_flags::table) + .values(all_flags.clone()) + .on_conflict_do_nothing() + .execute(conn) + .await + .map_err(IndexerError::from) + .context("Failed to write to feature_flags table")?; Ok::<(), IndexerError>(()) - }, - PG_DB_COMMIT_SLEEP_DURATION - )?; + } + .scope_boxed() + }) + .await?; Ok(()) } } diff --git a/crates/sui-indexer/src/store/pg_partition_manager.rs b/crates/sui-indexer/src/store/pg_partition_manager.rs index 011595a7cdd90..2dbc031cc5f07 100644 --- a/crates/sui-indexer/src/store/pg_partition_manager.rs +++ b/crates/sui-indexer/src/store/pg_partition_manager.rs @@ -2,16 +2,17 @@ // SPDX-License-Identifier: Apache-2.0 use diesel::sql_types::{BigInt, VarChar}; -use diesel::{QueryableByName, RunQueryDsl}; +use diesel::QueryableByName; +use diesel_async::scoped_futures::ScopedFutureExt; use std::collections::{BTreeMap, HashMap}; use std::time::Duration; use tracing::{error, info}; -use crate::db::ConnectionPool; +use crate::database::ConnectionPool; use crate::errors::IndexerError; use crate::handlers::EpochToCommit; use crate::models::epoch::StoredEpochInfo; -use crate::store::diesel_macro::*; +use crate::store::transaction_with_retry; const GET_PARTITION_SQL: &str = r" SELECT parent.relname AS table_name, @@ -26,18 +27,11 @@ WHERE parent.relkind = 'p' GROUP BY table_name; "; +#[derive(Clone)] pub struct PgPartitionManager { - cp: ConnectionPool, - partition_strategies: HashMap<&'static str, PgPartitionStrategy>, -} + pool: ConnectionPool, -impl Clone for PgPartitionManager { - fn clone(&self) -> PgPartitionManager { - Self { - cp: self.cp.clone(), - partition_strategies: self.partition_strategies.clone(), - } - } + partition_strategies: HashMap<&'static str, PgPartitionStrategy>, } #[derive(Clone, Copy)] @@ -92,25 +86,19 @@ impl EpochPartitionData { } impl PgPartitionManager { - pub fn new(cp: ConnectionPool) -> Result { + pub fn new(pool: ConnectionPool) -> Result { let mut partition_strategies = HashMap::new(); partition_strategies.insert("events", PgPartitionStrategy::TxSequenceNumber); partition_strategies.insert("transactions", PgPartitionStrategy::TxSequenceNumber); partition_strategies.insert("objects_version", PgPartitionStrategy::ObjectId); let manager = Self { - cp, + pool, partition_strategies, }; - let tables = manager.get_table_partitions()?; - info!( - "Found {} tables with partitions : [{:?}]", - tables.len(), - tables - ); Ok(manager) } - pub fn get_table_partitions(&self) -> Result, IndexerError> { + pub async fn get_table_partitions(&self) -> Result, IndexerError> { #[derive(QueryableByName, Debug, Clone)] struct PartitionedTable { #[diesel(sql_type = VarChar)] @@ -121,19 +109,19 @@ impl PgPartitionManager { last_partition: i64, } + let mut connection = self.pool.get().await?; + Ok( - read_only_blocking!(&self.cp, |conn| diesel::RunQueryDsl::load( - diesel::sql_query(GET_PARTITION_SQL), - conn - ))? - .into_iter() - .map(|table: PartitionedTable| { - ( - table.table_name, - (table.first_partition as u64, table.last_partition as u64), - ) - }) - .collect(), + diesel_async::RunQueryDsl::load(diesel::sql_query(GET_PARTITION_SQL), &mut connection) + .await? + .into_iter() + .map(|table: PartitionedTable| { + ( + table.table_name, + (table.first_partition as u64, table.last_partition as u64), + ) + }) + .collect(), ) } @@ -163,7 +151,7 @@ impl PgPartitionManager { } } - pub fn advance_epoch( + pub async fn advance_epoch( &self, table: String, last_partition: u64, @@ -177,10 +165,9 @@ impl PgPartitionManager { return Ok(()); } if last_partition == data.last_epoch { - transactional_blocking_with_retry!( - &self.cp, - |conn| { - RunQueryDsl::execute( + transaction_with_retry(&self.pool, Duration::from_secs(10), |conn| { + async { + diesel_async::RunQueryDsl::execute( diesel::sql_query("CALL advance_partition($1, $2, $3, $4, $5)") .bind::(table.clone()) .bind::(data.last_epoch as i64) @@ -189,9 +176,13 @@ impl PgPartitionManager { .bind::(partition_range.1 as i64), conn, ) - }, - Duration::from_secs(10) - )?; + .await?; + + Ok(()) + } + .scope_boxed() + }) + .await?; info!( "Advanced epoch partition for table {} from {} to {}, prev partition upper bound {}", @@ -213,19 +204,25 @@ impl PgPartitionManager { Ok(()) } - pub fn drop_table_partition(&self, table: String, partition: u64) -> Result<(), IndexerError> { - transactional_blocking_with_retry!( - &self.cp, - |conn| { - RunQueryDsl::execute( + pub async fn drop_table_partition( + &self, + table: String, + partition: u64, + ) -> Result<(), IndexerError> { + transaction_with_retry(&self.pool, Duration::from_secs(10), |conn| { + async { + diesel_async::RunQueryDsl::execute( diesel::sql_query("CALL drop_partition($1, $2)") .bind::(table.clone()) .bind::(partition as i64), conn, ) - }, - Duration::from_secs(10) - )?; + .await?; + Ok(()) + } + .scope_boxed() + }) + .await?; Ok(()) } } diff --git a/crates/sui-indexer/src/test_utils.rs b/crates/sui-indexer/src/test_utils.rs index d8c629f081648..fb163b5398698 100644 --- a/crates/sui-indexer/src/test_utils.rs +++ b/crates/sui-indexer/src/test_utils.rs @@ -15,7 +15,7 @@ use crate::config::RestoreConfig; use crate::config::SnapshotLagConfig; use crate::database::Connection; use crate::database::ConnectionPool; -use crate::db::{new_connection_pool, ConnectionPoolConfig}; +use crate::db::ConnectionPoolConfig; use crate::errors::IndexerError; use crate::indexer::Indexer; use crate::store::PgIndexerStore; @@ -99,17 +99,11 @@ pub async fn start_test_indexer_impl( let indexer_metrics = IndexerMetrics::new(®istry); - let blocking_pool = new_connection_pool(&db_url, &pool_config).unwrap(); let pool = ConnectionPool::new(db_url.parse().unwrap(), pool_config) .await .unwrap(); let restore_config = RestoreConfig::default(); - let store = PgIndexerStore::new( - blocking_pool, - pool.clone(), - restore_config, - indexer_metrics.clone(), - ); + let store = PgIndexerStore::new(pool.clone(), restore_config, indexer_metrics.clone()); let handle = match reader_writer_config { ReaderWriterConfig::Reader { diff --git a/crates/sui-indexer/tests/ingestion_tests.rs b/crates/sui-indexer/tests/ingestion_tests.rs index 1295194b12dd6..1d8004fbce639 100644 --- a/crates/sui-indexer/tests/ingestion_tests.rs +++ b/crates/sui-indexer/tests/ingestion_tests.rs @@ -1,184 +1,169 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -#[cfg(feature = "pg_integration")] -mod ingestion_tests { - use diesel::ExpressionMethods; - use diesel::{QueryDsl, RunQueryDsl}; - use simulacrum::Simulacrum; - use std::net::SocketAddr; - use std::path::PathBuf; - use std::sync::Arc; - use std::time::Duration; - use sui_indexer::db::get_pool_connection; - use sui_indexer::errors::Context; - use sui_indexer::errors::IndexerError; - use sui_indexer::models::{objects::StoredObject, transactions::StoredTransaction}; - use sui_indexer::schema::{objects, transactions}; - use sui_indexer::store::{indexer_store::IndexerStore, PgIndexerStore}; - use sui_indexer::tempdb::get_available_port; - use sui_indexer::tempdb::TempDb; - use sui_indexer::test_utils::{start_test_indexer, ReaderWriterConfig}; - use sui_types::base_types::SuiAddress; - use sui_types::effects::TransactionEffectsAPI; - use sui_types::gas_coin::GasCoin; - use sui_types::SUI_FRAMEWORK_PACKAGE_ID; - use tempfile::tempdir; - use tokio::task::JoinHandle; - - macro_rules! read_only_blocking { - ($pool:expr, $query:expr) => {{ - let mut pg_pool_conn = get_pool_connection($pool)?; - pg_pool_conn - .build_transaction() - .read_only() - .run($query) - .map_err(|e| IndexerError::PostgresReadError(e.to_string())) - }}; - } - - /// Set up a test indexer fetching from a REST endpoint served by the given Simulacrum. - async fn set_up( - sim: Arc, - data_ingestion_path: PathBuf, - ) -> ( - JoinHandle<()>, - PgIndexerStore, - JoinHandle>, - TempDb, - ) { - let database = TempDb::new().unwrap(); - let server_url: SocketAddr = format!("127.0.0.1:{}", get_available_port()) - .parse() - .unwrap(); - - let server_handle = tokio::spawn(async move { - sui_rest_api::RestService::new_without_version(sim) - .start_service(server_url) - .await; - }); - // Starts indexer - let (pg_store, pg_handle, _) = start_test_indexer( - database.database().url().as_str().to_owned(), - format!("http://{}", server_url), - ReaderWriterConfig::writer_mode(None, None), - data_ingestion_path, - ) - .await; - (server_handle, pg_store, pg_handle, database) - } - - /// Wait for the indexer to catch up to the given checkpoint sequence number. - async fn wait_for_checkpoint( - pg_store: &PgIndexerStore, - checkpoint_sequence_number: u64, - ) -> Result<(), IndexerError> { - tokio::time::timeout(Duration::from_secs(10), async { - while { - let cp_opt = pg_store - .get_latest_checkpoint_sequence_number() - .await - .unwrap(); - cp_opt.is_none() || (cp_opt.unwrap() < checkpoint_sequence_number) - } { - tokio::time::sleep(Duration::from_millis(100)).await; - } - }) +use diesel::ExpressionMethods; +use diesel::QueryDsl; +use diesel_async::RunQueryDsl; +use simulacrum::Simulacrum; +use std::net::SocketAddr; +use std::path::PathBuf; +use std::sync::Arc; +use std::time::Duration; +use sui_indexer::errors::IndexerError; +use sui_indexer::models::{objects::StoredObject, transactions::StoredTransaction}; +use sui_indexer::schema::{objects, transactions}; +use sui_indexer::store::{indexer_store::IndexerStore, PgIndexerStore}; +use sui_indexer::tempdb::get_available_port; +use sui_indexer::tempdb::TempDb; +use sui_indexer::test_utils::{start_test_indexer, ReaderWriterConfig}; +use sui_types::base_types::SuiAddress; +use sui_types::effects::TransactionEffectsAPI; +use sui_types::gas_coin::GasCoin; +use sui_types::SUI_FRAMEWORK_PACKAGE_ID; +use tempfile::tempdir; +use tokio::task::JoinHandle; + +/// Set up a test indexer fetching from a REST endpoint served by the given Simulacrum. +async fn set_up( + sim: Arc, + data_ingestion_path: PathBuf, +) -> ( + JoinHandle<()>, + PgIndexerStore, + JoinHandle>, + TempDb, +) { + let database = TempDb::new().unwrap(); + let server_url: SocketAddr = format!("127.0.0.1:{}", get_available_port()) + .parse() + .unwrap(); + + let server_handle = tokio::spawn(async move { + sui_rest_api::RestService::new_without_version(sim) + .start_service(server_url) + .await; + }); + // Starts indexer + let (pg_store, pg_handle, _) = start_test_indexer( + database.database().url().as_str().to_owned(), + format!("http://{}", server_url), + ReaderWriterConfig::writer_mode(None, None), + data_ingestion_path, + ) + .await; + (server_handle, pg_store, pg_handle, database) +} + +/// Wait for the indexer to catch up to the given checkpoint sequence number. +async fn wait_for_checkpoint( + pg_store: &PgIndexerStore, + checkpoint_sequence_number: u64, +) -> Result<(), IndexerError> { + tokio::time::timeout(Duration::from_secs(30), async { + while { + let cp_opt = pg_store + .get_latest_checkpoint_sequence_number() + .await + .unwrap(); + cp_opt.is_none() || (cp_opt.unwrap() < checkpoint_sequence_number) + } { + tokio::time::sleep(Duration::from_millis(100)).await; + } + }) + .await + .expect("Timeout waiting for indexer to catchup to checkpoint"); + Ok(()) +} + +#[tokio::test] +pub async fn test_transaction_table() -> Result<(), IndexerError> { + let tempdir = tempdir().unwrap(); + let mut sim = Simulacrum::new(); + let data_ingestion_path = tempdir.path().to_path_buf(); + sim.set_data_ingestion_path(data_ingestion_path.clone()); + + // Execute a simple transaction. + let transfer_recipient = SuiAddress::random_for_testing_only(); + let (transaction, _) = sim.transfer_txn(transfer_recipient); + let (effects, err) = sim.execute_transaction(transaction.clone()).unwrap(); + assert!(err.is_none()); + + // Create a checkpoint which should include the transaction we executed. + let checkpoint = sim.create_checkpoint(); + + let (_, pg_store, _, _database) = set_up(Arc::new(sim), data_ingestion_path).await; + + // Wait for the indexer to catch up to the checkpoint. + wait_for_checkpoint(&pg_store, 1).await?; + + let digest = effects.transaction_digest(); + + // Read the transaction from the database directly. + let mut connection = pg_store.pool().dedicated_connection().await.unwrap(); + let db_txn: StoredTransaction = transactions::table + .filter(transactions::transaction_digest.eq(digest.inner().to_vec())) + .first::(&mut connection) + .await + .expect("Failed reading transaction from PostgresDB"); + + // Check that the transaction was stored correctly. + assert_eq!(db_txn.tx_sequence_number, 1); + assert_eq!(db_txn.transaction_digest, digest.inner().to_vec()); + assert_eq!( + db_txn.raw_transaction, + bcs::to_bytes(&transaction.data()).unwrap() + ); + assert_eq!(db_txn.raw_effects, bcs::to_bytes(&effects).unwrap()); + assert_eq!(db_txn.timestamp_ms, checkpoint.timestamp_ms as i64); + assert_eq!(db_txn.checkpoint_sequence_number, 1); + assert_eq!(db_txn.transaction_kind, 1); + assert_eq!(db_txn.success_command_count, 2); // split coin + transfer + Ok(()) +} + +#[tokio::test] +pub async fn test_object_type() -> Result<(), IndexerError> { + let tempdir = tempdir().unwrap(); + let mut sim = Simulacrum::new(); + let data_ingestion_path = tempdir.path().to_path_buf(); + sim.set_data_ingestion_path(data_ingestion_path.clone()); + + // Execute a simple transaction. + let transfer_recipient = SuiAddress::random_for_testing_only(); + let (transaction, _) = sim.transfer_txn(transfer_recipient); + let (_, err) = sim.execute_transaction(transaction.clone()).unwrap(); + assert!(err.is_none()); + + // Create a checkpoint which should include the transaction we executed. + let _ = sim.create_checkpoint(); + + let (_, pg_store, _, _database) = set_up(Arc::new(sim), data_ingestion_path).await; + + // Wait for the indexer to catch up to the checkpoint. + wait_for_checkpoint(&pg_store, 1).await?; + + let obj_id = transaction.gas()[0].0; + + // Read the transaction from the database directly. + let mut connection = pg_store.pool().dedicated_connection().await.unwrap(); + let db_object: StoredObject = objects::table + .filter(objects::object_id.eq(obj_id.to_vec())) + .first::(&mut connection) .await - .expect("Timeout waiting for indexer to catchup to checkpoint"); - Ok(()) - } - - #[tokio::test] - pub async fn test_transaction_table() -> Result<(), IndexerError> { - let tempdir = tempdir().unwrap(); - let mut sim = Simulacrum::new(); - let data_ingestion_path = tempdir.path().to_path_buf(); - sim.set_data_ingestion_path(data_ingestion_path.clone()); - - // Execute a simple transaction. - let transfer_recipient = SuiAddress::random_for_testing_only(); - let (transaction, _) = sim.transfer_txn(transfer_recipient); - let (effects, err) = sim.execute_transaction(transaction.clone()).unwrap(); - assert!(err.is_none()); - - // Create a checkpoint which should include the transaction we executed. - let checkpoint = sim.create_checkpoint(); - - let (_, pg_store, _, _database) = set_up(Arc::new(sim), data_ingestion_path).await; - - // Wait for the indexer to catch up to the checkpoint. - wait_for_checkpoint(&pg_store, 1).await?; - - let digest = effects.transaction_digest(); - - // Read the transaction from the database directly. - let db_txn: StoredTransaction = read_only_blocking!(&pg_store.blocking_cp(), |conn| { - transactions::table - .filter(transactions::transaction_digest.eq(digest.inner().to_vec())) - .first::(conn) - }) - .context("Failed reading transaction from PostgresDB")?; - - // Check that the transaction was stored correctly. - assert_eq!(db_txn.tx_sequence_number, 1); - assert_eq!(db_txn.transaction_digest, digest.inner().to_vec()); - assert_eq!( - db_txn.raw_transaction, - bcs::to_bytes(&transaction.data()).unwrap() - ); - assert_eq!(db_txn.raw_effects, bcs::to_bytes(&effects).unwrap()); - assert_eq!(db_txn.timestamp_ms, checkpoint.timestamp_ms as i64); - assert_eq!(db_txn.checkpoint_sequence_number, 1); - assert_eq!(db_txn.transaction_kind, 1); - assert_eq!(db_txn.success_command_count, 2); // split coin + transfer - Ok(()) - } - - #[tokio::test] - pub async fn test_object_type() -> Result<(), IndexerError> { - let tempdir = tempdir().unwrap(); - let mut sim = Simulacrum::new(); - let data_ingestion_path = tempdir.path().to_path_buf(); - sim.set_data_ingestion_path(data_ingestion_path.clone()); - - // Execute a simple transaction. - let transfer_recipient = SuiAddress::random_for_testing_only(); - let (transaction, _) = sim.transfer_txn(transfer_recipient); - let (_, err) = sim.execute_transaction(transaction.clone()).unwrap(); - assert!(err.is_none()); - - // Create a checkpoint which should include the transaction we executed. - let _ = sim.create_checkpoint(); - - let (_, pg_store, _, _database) = set_up(Arc::new(sim), data_ingestion_path).await; - - // Wait for the indexer to catch up to the checkpoint. - wait_for_checkpoint(&pg_store, 1).await?; - - let obj_id = transaction.gas()[0].0; - - // Read the transaction from the database directly. - let db_object: StoredObject = read_only_blocking!(&pg_store.blocking_cp(), |conn| { - objects::table - .filter(objects::object_id.eq(obj_id.to_vec())) - .first::(conn) - }) - .context("Failed reading object from PostgresDB")?; - - let obj_type_tag = GasCoin::type_(); - - // Check that the different components of the event type were stored correctly. - assert_eq!( - db_object.object_type, - Some(obj_type_tag.to_canonical_string(true)) - ); - assert_eq!( - db_object.object_type_package, - Some(SUI_FRAMEWORK_PACKAGE_ID.to_vec()) - ); - assert_eq!(db_object.object_type_module, Some("coin".to_string())); - assert_eq!(db_object.object_type_name, Some("Coin".to_string())); - Ok(()) - } + .expect("Failed reading object from PostgresDB"); + + let obj_type_tag = GasCoin::type_(); + + // Check that the different components of the event type were stored correctly. + assert_eq!( + db_object.object_type, + Some(obj_type_tag.to_canonical_string(true)) + ); + assert_eq!( + db_object.object_type_package, + Some(SUI_FRAMEWORK_PACKAGE_ID.to_vec()) + ); + assert_eq!(db_object.object_type_module, Some("coin".to_string())); + assert_eq!(db_object.object_type_name, Some("Coin".to_string())); + Ok(()) } diff --git a/crates/sui-json-rpc/src/error.rs b/crates/sui-json-rpc/src/error.rs index da735fdc7fbae..f56d0cde9ebf4 100644 --- a/crates/sui-json-rpc/src/error.rs +++ b/crates/sui-json-rpc/src/error.rs @@ -1,6 +1,8 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +use crate::authority_state::StateReadError; +use crate::name_service::NameServiceError; use fastcrypto::error::FastCryptoError; use hyper::header::InvalidHeaderValue; use itertools::Itertools; @@ -9,14 +11,12 @@ use jsonrpsee::types::error::{CallError, INTERNAL_ERROR_CODE}; use jsonrpsee::types::ErrorObject; use std::collections::BTreeMap; use sui_json_rpc_api::{TRANSACTION_EXECUTION_CLIENT_ERROR_CODE, TRANSIENT_ERROR_CODE}; +use sui_types::committee::{QUORUM_THRESHOLD, TOTAL_VOTING_POWER}; use sui_types::error::{SuiError, SuiObjectResponseError, UserInputError}; use sui_types::quorum_driver_types::QuorumDriverError; use thiserror::Error; use tokio::task::JoinError; -use crate::authority_state::StateReadError; -use crate::name_service::NameServiceError; - pub type RpcInterimResult = Result; #[derive(Debug, Error)] @@ -133,17 +133,9 @@ impl From for RpcError { Error::QuorumDriverError(err) => { match err { QuorumDriverError::InvalidUserSignature(err) => { - let inner_error_str = match err { - // TODO(wlmyng): update SuiError display trait to render UserInputError with display - SuiError::UserInputError { error } => error.to_string(), - _ => err.to_string(), - }; - - let error_message = format!("Invalid user signature: {inner_error_str}"); - let error_object = ErrorObject::owned( TRANSACTION_EXECUTION_CLIENT_ERROR_CODE, - error_message, + format!("Invalid user signature: {err}"), None::<()>, ); RpcError::Call(CallError::Custom(error_object)) @@ -164,14 +156,44 @@ impl From for RpcError { } QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, } => { + let weights: Vec = + conflicting_txes.values().map(|(_, stake)| *stake).collect(); + let remaining: u64 = TOTAL_VOTING_POWER - weights.iter().sum::(); + + // better version of above + let reason = if weights.iter().all(|w| remaining + w < QUORUM_THRESHOLD) { + "equivocated until the next epoch" + } else { + "reserved for another transaction" + }; + + let retried_info = match retried_tx_status { + Some((digest, success)) => { + format!( + "Retried transaction {} ({}) because it was able to gather the necessary votes.", + digest, if success { "succeeded" } else { "failed" } + ) + } + None => "".to_string(), + }; + let error_message = format!( - "Failed to sign transaction by a quorum of validators because of locked objects. Retried a conflicting transaction {:?}, success: {:?}", - retried_tx, - retried_tx_success - ); + "Failed to sign transaction by a quorum of validators because one or more of its objects is {}. {} Other transactions locking these objects:\n{}", + reason, + retried_info, + conflicting_txes + .iter() + .sorted_by(|(_, (_, a)), (_, (_, b))| b.cmp(a)) + .map(|(digest, (_, stake))| format!( + "- {} (stake {}.{})", + digest, + stake / 100, + stake % 100, + )) + .join("\n"), + ); let new_map = conflicting_txes .into_iter() @@ -224,8 +246,13 @@ impl From for RpcError { "NonRecoverableTransactionError should have at least one non-retryable error" ); - let error_list = new_errors.join(", "); - let error_msg = format!("Transaction execution failed due to issues with transaction inputs, please review the errors and try again: {}.", error_list); + let mut error_list = vec![]; + + for err in new_errors.iter() { + error_list.push(format!("- {}", err)); + } + + let error_msg = format!("Transaction execution failed due to issues with transaction inputs, please review the errors and try again:\n{}", error_list.join("\n")); let error_object = ErrorObject::owned( TRANSACTION_EXECUTION_CLIENT_ERROR_CODE, @@ -382,16 +409,64 @@ mod tests { TransactionDigest, (Vec<(AuthorityName, ObjectRef)>, StakeUnit), > = BTreeMap::new(); - let tx_digest = TransactionDigest::default(); + let tx_digest = TransactionDigest::from([1; 32]); let object_ref = test_object_ref(); - let stake_unit: StakeUnit = 10; + + // 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi has enough stake to escape equivocation + let stake_unit: StakeUnit = 8000; let authority_name = AuthorityPublicKeyBytes([0; AuthorityPublicKey::LENGTH]); conflicting_txes.insert(tx_digest, (vec![(authority_name, object_ref)], stake_unit)); + // 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR stake below quorum threshold + let tx_digest = TransactionDigest::from([2; 32]); + let stake_unit: StakeUnit = 500; + let authority_name = AuthorityPublicKeyBytes([1; AuthorityPublicKey::LENGTH]); + conflicting_txes.insert(tx_digest, (vec![(authority_name, object_ref)], stake_unit)); + + let quorum_driver_error = QuorumDriverError::ObjectsDoubleUsed { + conflicting_txes, + retried_tx_status: Some((TransactionDigest::from([1; 32]), true)), + }; + + let rpc_error: RpcError = Error::QuorumDriverError(quorum_driver_error).into(); + + let error_object: ErrorObjectOwned = rpc_error.into(); + let expected_code = expect!["-32002"]; + expected_code.assert_eq(&error_object.code().to_string()); + let expected_message = expect!["Failed to sign transaction by a quorum of validators because one or more of its objects is reserved for another transaction. Retried transaction 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi (succeeded) because it was able to gather the necessary votes. Other transactions locking these objects:\n- 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi (stake 80.0)\n- 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR (stake 5.0)"]; + expected_message.assert_eq(error_object.message()); + let expected_data = expect![[ + r#"{"4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi":[["0x0000000000000000000000000000000000000000000000000000000000000000",0,"11111111111111111111111111111111"]],"8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR":[["0x0000000000000000000000000000000000000000000000000000000000000000",0,"11111111111111111111111111111111"]]}"# + ]]; + let actual_data = error_object.data().unwrap().to_string(); + expected_data.assert_eq(&actual_data); + } + + #[test] + fn test_objects_double_used_equivocated() { + use sui_types::crypto::VerifyingKey; + let mut conflicting_txes: BTreeMap< + TransactionDigest, + (Vec<(AuthorityName, ObjectRef)>, StakeUnit), + > = BTreeMap::new(); + let tx_digest = TransactionDigest::from([1; 32]); + let object_ref = test_object_ref(); + + // 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi has lower stake at 10 + let stake_unit: StakeUnit = 4000; + let authority_name = AuthorityPublicKeyBytes([0; AuthorityPublicKey::LENGTH]); + conflicting_txes.insert(tx_digest, (vec![(authority_name, object_ref)], stake_unit)); + + // 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR is a higher stake and should be first in the list + let tx_digest = TransactionDigest::from([2; 32]); + let stake_unit: StakeUnit = 5000; + let authority_name = AuthorityPublicKeyBytes([1; AuthorityPublicKey::LENGTH]); + conflicting_txes.insert(tx_digest, (vec![(authority_name, object_ref)], stake_unit)); + let quorum_driver_error = QuorumDriverError::ObjectsDoubleUsed { conflicting_txes, - retried_tx: Some(TransactionDigest::default()), - retried_tx_success: Some(true), + // below threshold + retried_tx_status: None, }; let rpc_error: RpcError = Error::QuorumDriverError(quorum_driver_error).into(); @@ -399,10 +474,10 @@ mod tests { let error_object: ErrorObjectOwned = rpc_error.into(); let expected_code = expect!["-32002"]; expected_code.assert_eq(&error_object.code().to_string()); - let expected_message = expect!["Failed to sign transaction by a quorum of validators because of locked objects. Retried a conflicting transaction Some(TransactionDigest(11111111111111111111111111111111)), success: Some(true)"]; + let expected_message = expect!["Failed to sign transaction by a quorum of validators because one or more of its objects is equivocated until the next epoch. Other transactions locking these objects:\n- 8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR (stake 50.0)\n- 4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi (stake 40.0)"]; expected_message.assert_eq(error_object.message()); let expected_data = expect![[ - r#"{"11111111111111111111111111111111":[["0x0000000000000000000000000000000000000000000000000000000000000000",0,"11111111111111111111111111111111"]]}"# + r#"{"4vJ9JU1bJJE96FWSJKvHsmmFADCg4gpZQff4P3bkLKi":[["0x0000000000000000000000000000000000000000000000000000000000000000",0,"11111111111111111111111111111111"]],"8qbHbw2BbbTHBW1sbeqakYXVKRQM8Ne7pLK7m6CVfeR":[["0x0000000000000000000000000000000000000000000000000000000000000000",0,"11111111111111111111111111111111"]]}"# ]]; let actual_data = error_object.data().unwrap().to_string(); expected_data.assert_eq(&actual_data); @@ -441,7 +516,7 @@ mod tests { let expected_code = expect!["-32002"]; expected_code.assert_eq(&error_object.code().to_string()); let expected_message = - expect!["Transaction execution failed due to issues with transaction inputs, please review the errors and try again: Balance of gas object 10 is lower than the needed amount: 100, Object (0x0000000000000000000000000000000000000000000000000000000000000000, SequenceNumber(0), o#11111111111111111111111111111111) is not available for consumption, its current version: SequenceNumber(10)."]; + expect!["Transaction execution failed due to issues with transaction inputs, please review the errors and try again:\n- Balance of gas object 10 is lower than the needed amount: 100\n- Object ID 0x0000000000000000000000000000000000000000000000000000000000000000 Version 0x0 Digest 11111111111111111111111111111111 is not available for consumption, current version: 0xa"]; expected_message.assert_eq(error_object.message()); } @@ -473,7 +548,7 @@ mod tests { let expected_code = expect!["-32002"]; expected_code.assert_eq(&error_object.code().to_string()); let expected_message = - expect!["Transaction execution failed due to issues with transaction inputs, please review the errors and try again: Could not find the referenced object 0x0000000000000000000000000000000000000000000000000000000000000000 at version None."]; + expect!["Transaction execution failed due to issues with transaction inputs, please review the errors and try again:\n- Could not find the referenced object 0x0000000000000000000000000000000000000000000000000000000000000000 at version None"]; expected_message.assert_eq(error_object.message()); } diff --git a/crates/sui-json/src/lib.rs b/crates/sui-json/src/lib.rs index 4782ed9e57116..f012dcbbbc720 100644 --- a/crates/sui-json/src/lib.rs +++ b/crates/sui-json/src/lib.rs @@ -223,7 +223,7 @@ impl SuiJsonValue { with one field of address or u8 vector type" ), }, - MoveTypeLayout::Struct(MoveStructLayout { type_, .. }) if type_ == &ID::type_() => { + MoveTypeLayout::Struct(struct_layout) if struct_layout.type_ == ID::type_() => { Ok(R::MoveValue::Struct(R::MoveStruct(vec![ Self::to_move_value(val, &inner_vec[0].layout.clone())?, ]))) @@ -277,27 +277,26 @@ impl SuiJsonValue { R::MoveValue::U256(convert_string_to_u256(s.as_str())?) } // For ascii and utf8 strings - ( - JsonValue::String(s), - MoveTypeLayout::Struct(MoveStructLayout { type_, fields: _ }), - ) if is_move_string_type(type_) => { + (JsonValue::String(s), MoveTypeLayout::Struct(struct_layout)) + if is_move_string_type(&struct_layout.type_) => + { R::MoveValue::Vector(s.as_bytes().iter().copied().map(R::MoveValue::U8).collect()) } // For ID - (JsonValue::String(s), MoveTypeLayout::Struct(MoveStructLayout { type_, fields })) - if type_ == &ID::type_() => + (JsonValue::String(s), MoveTypeLayout::Struct(struct_layout)) + if struct_layout.type_ == ID::type_() => { - if fields.len() != 1 { + if struct_layout.fields.len() != 1 { bail!( - "Cannot convert string arg {s} to {type_} which is expected to be a struct with one field" + "Cannot convert string arg {s} to {} which is expected to be a struct with one field", struct_layout.type_ ); }; let addr = SuiAddress::from_str(s)?; R::MoveValue::Address(addr.into()) } - (JsonValue::Object(o), MoveTypeLayout::Struct(MoveStructLayout { fields, .. })) => { + (JsonValue::Object(o), MoveTypeLayout::Struct(struct_layout)) => { let mut field_values = vec![]; - for layout in fields { + for layout in struct_layout.fields.iter() { let field = o .get(layout.name.as_str()) .ok_or_else(|| anyhow!("Missing field {} for struct {ty}", layout.name))?; @@ -306,10 +305,8 @@ impl SuiJsonValue { R::MoveValue::Struct(R::MoveStruct(field_values)) } // Unnest fields - (value, MoveTypeLayout::Struct(MoveStructLayout { fields, .. })) - if fields.len() == 1 => - { - Self::to_move_value(value, &fields[0].layout)? + (value, MoveTypeLayout::Struct(struct_layout)) if struct_layout.fields.len() == 1 => { + Self::to_move_value(value, &struct_layout.fields[0].layout)? } (JsonValue::String(s), MoveTypeLayout::Vector(t)) => { match &**t { @@ -332,8 +329,8 @@ impl SuiJsonValue { }; R::MoveValue::Vector(vec.iter().copied().map(R::MoveValue::U8).collect()) } - MoveTypeLayout::Struct(MoveStructLayout { fields: inner, .. }) => { - Self::handle_inner_struct_layout(inner, val, ty, s)? + MoveTypeLayout::Struct(struct_layout) => { + Self::handle_inner_struct_layout(&struct_layout.fields, val, ty, s)? } _ => bail!("Cannot convert string arg {s} to {ty}"), } @@ -594,36 +591,36 @@ pub fn primitive_type( if resolved_struct == RESOLVED_ASCII_STR { ( true, - Some(MoveTypeLayout::Struct(MoveStructLayout { + Some(MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: resolved_to_struct(RESOLVED_ASCII_STR), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("bytes").into(), MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - )], - })), + )]), + }))), ) } else if resolved_struct == RESOLVED_UTF8_STR { // both structs structs representing strings have one field - a vector of type u8 ( true, - Some(MoveTypeLayout::Struct(MoveStructLayout { + Some(MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: resolved_to_struct(RESOLVED_UTF8_STR), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("bytes").into(), MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - )], - })), + )]), + }))), ) } else if resolved_struct == RESOLVED_SUI_ID { ( true, - Some(MoveTypeLayout::Struct(MoveStructLayout { + Some(MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: resolved_to_struct(RESOLVED_SUI_ID), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("bytes").into(), MoveTypeLayout::Address, - )], - })), + )]), + }))), ) } else { (false, None) diff --git a/crates/sui-json/src/tests.rs b/crates/sui-json/src/tests.rs index a73e51e8cecfc..3d7b4e26e2667 100644 --- a/crates/sui-json/src/tests.rs +++ b/crates/sui-json/src/tests.rs @@ -618,18 +618,18 @@ fn test_from_str() { fn test_sui_call_arg_string_type() { let arg1 = bcs::to_bytes("Some String").unwrap(); - let string_layout = Some(MoveTypeLayout::Struct(MoveStructLayout { + let string_layout = Some(MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_ASCII_MODULE_NAME.into(), name: STD_ASCII_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout { + fields: Box::new(vec![MoveFieldLayout { name: ident_str!("bytes").into(), layout: MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - }], - })); + }]), + }))); let v = SuiJsonValue::from_bcs_bytes(string_layout.as_ref(), &arg1).unwrap(); assert_eq!(json! {"Some String"}, v.to_json_value()); @@ -639,31 +639,31 @@ fn test_sui_call_arg_string_type() { fn test_sui_call_arg_option_type() { let arg1 = bcs::to_bytes(&Some("Some String")).unwrap(); - let string_layout = MoveTypeLayout::Struct(MoveStructLayout { + let string_layout = MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_ASCII_MODULE_NAME.into(), name: STD_ASCII_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout { + fields: Box::new(vec![MoveFieldLayout { name: ident_str!("bytes").into(), layout: MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - }], - }); + }]), + })); - let option_layout = MoveTypeLayout::Struct(MoveStructLayout { + let option_layout = MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_OPTION_MODULE_NAME.into(), name: STD_OPTION_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout { + fields: Box::new(vec![MoveFieldLayout { name: ident_str!("vec").into(), layout: MoveTypeLayout::Vector(Box::new(string_layout.clone())), - }], - }); + }]), + })); let v = SuiJsonValue::from_bcs_bytes(Some(option_layout).as_ref(), &arg1).unwrap(); @@ -680,7 +680,7 @@ fn test_sui_call_arg_option_type() { #[test] fn test_convert_struct() { - let layout = MoveTypeLayout::Struct(GasCoin::layout()); + let layout = MoveTypeLayout::Struct(Box::new(GasCoin::layout())); let value = json!({"id":"0xf1416fe18c7baa1673187375777a7606708481311cb3548509ec91a5871c6b9a", "balance": "1000000"}); let sui_json = SuiJsonValue::new(value).unwrap(); @@ -702,18 +702,18 @@ fn test_convert_struct() { fn test_convert_string_vec() { let test_vec = vec!["0xbbb", "test_str"]; let bcs = bcs::to_bytes(&test_vec).unwrap(); - let string_layout = MoveTypeLayout::Struct(MoveStructLayout { + let string_layout = MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_ASCII_MODULE_NAME.into(), name: STD_ASCII_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout { + fields: Box::new(vec![MoveFieldLayout { name: ident_str!("bytes").into(), layout: MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - }], - }); + }]), + })); let layout = MoveTypeLayout::Vector(Box::new(string_layout)); @@ -737,31 +737,31 @@ fn test_string_vec_df_name_child_id_eq() { ] }); - let string_layout = MoveTypeLayout::Struct(MoveStructLayout { + let string_layout = MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_ASCII_MODULE_NAME.into(), name: STD_ASCII_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout { + fields: Box::new(vec![MoveFieldLayout { name: ident_str!("bytes").into(), layout: MoveTypeLayout::Vector(Box::new(MoveTypeLayout::U8)), - }], - }); + }]), + })); - let layout = MoveTypeLayout::Struct(MoveStructLayout { + let layout = MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_: StructTag { address: MOVE_STDLIB_ADDRESS, module: STD_ASCII_MODULE_NAME.into(), name: STD_ASCII_STRUCT_NAME.into(), type_params: vec![], }, - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( Identifier::from_str("labels").unwrap(), MoveTypeLayout::Vector(Box::new(string_layout)), - )], - }); + )]), + })); let sui_json = SuiJsonValue::new(name).unwrap(); let bcs2 = sui_json.to_bcs_bytes(&layout).unwrap(); diff --git a/crates/sui-light-client/.gitignore b/crates/sui-light-client/.gitignore new file mode 100644 index 0000000000000..c2ec96c36f8b7 --- /dev/null +++ b/crates/sui-light-client/.gitignore @@ -0,0 +1 @@ +checkpoints_dir/* \ No newline at end of file diff --git a/crates/sui-light-client/Cargo.toml b/crates/sui-light-client/Cargo.toml index a18d7df4c49cd..7992b77d71bb0 100644 --- a/crates/sui-light-client/Cargo.toml +++ b/crates/sui-light-client/Cargo.toml @@ -27,8 +27,12 @@ serde_json.workspace = true sui-types.workspace = true sui-config.workspace = true sui-rest-api.workspace = true -sui-json.workspace = true sui-sdk.workspace = true move-binary-format.workspace = true sui-json-rpc-types.workspace = true sui-package-resolver.workspace = true +url.workspace = true +reqwest.workspace = true +object_store.workspace = true +env_logger = "0.11.5" +log = "0.4.22" diff --git a/crates/sui-light-client/example_config/light_client.yaml b/crates/sui-light-client/example_config/light_client.yaml index 8d3fc8688af3e..0f45d70bfafc9 100644 --- a/crates/sui-light-client/example_config/light_client.yaml +++ b/crates/sui-light-client/example_config/light_client.yaml @@ -1,3 +1,5 @@ -full_node_url: "http://ord-mnt-rpcbig-06.mainnet.sui.io:9000" +full_node_url: "https://fullnode.mainnet.sui.io:443" checkpoint_summary_dir: "checkpoints_dir" -genesis_filename: "genesis.blob" \ No newline at end of file +genesis_filename: "genesis.blob" +object_store_url: "https://checkpoints.mainnet.sui.io" +graphql_url: "https://sui-mainnet.mystenlabs.com/graphql" \ No newline at end of file diff --git a/crates/sui-light-client/src/main.rs b/crates/sui-light-client/src/main.rs index 0dc3f586f4ee5..f335e5cde7e94 100644 --- a/crates/sui-light-client/src/main.rs +++ b/crates/sui-light-client/src/main.rs @@ -4,9 +4,9 @@ use anyhow::anyhow; use async_trait::async_trait; use move_core_types::account_address::AccountAddress; -use sui_json_rpc_types::SuiTransactionBlockResponseOptions; +use sui_json_rpc_types::{SuiObjectDataOptions, SuiTransactionBlockResponseOptions}; -use sui_rest_api::{CheckpointData, Client}; +use sui_rest_api::CheckpointData; use sui_types::{ base_types::ObjectID, committee::Committee, @@ -15,20 +15,26 @@ use sui_types::{ effects::{TransactionEffects, TransactionEffectsAPI, TransactionEvents}, message_envelope::Envelope, messages_checkpoint::{CertifiedCheckpointSummary, CheckpointSummary, EndOfEpochData}, - object::{Data, Object}, + object::{bounded_visitor::BoundedVisitor, Data, Object}, }; use sui_config::genesis::Genesis; -use sui_json::SuiJsonValue; use sui_package_resolver::Result as ResolverResult; use sui_package_resolver::{Package, PackageStore, Resolver}; use sui_sdk::SuiClientBuilder; use clap::{Parser, Subcommand}; -use std::{fs, io::Write, path::PathBuf, str::FromStr}; +use std::{collections::HashMap, fs, io::Write, path::PathBuf, str::FromStr, sync::Mutex}; use std::{io::Read, sync::Arc}; +use log::info; +use object_store::parse_url; +use object_store::path::Path; +use serde_json::json; +use serde_json::Value; +use url::Url; + /// A light client for the Sui blockchain #[derive(Parser, Debug)] #[command(author, version, about, long_about = None)] @@ -43,11 +49,15 @@ struct Args { struct RemotePackageStore { config: Config, + cache: Mutex>>, } impl RemotePackageStore { pub fn new(config: Config) -> Self { - Self { config } + Self { + config, + cache: Mutex::new(HashMap::new()), + } } } @@ -56,9 +66,21 @@ impl PackageStore for RemotePackageStore { /// Read package contents. Fails if `id` is not an object, not a package, or is malformed in /// some way. async fn fetch(&self, id: AccountAddress) -> ResolverResult> { + // Check if we have it in the cache + if let Some(package) = self.cache.lock().unwrap().get(&id) { + info!("Fetch Package: {} cache hit", id); + return Ok(package.clone()); + } + + info!("Fetch Package: {}", id); + let object = get_verified_object(&self.config, id.into()).await.unwrap(); - let package = Package::read_from_object(&object).unwrap(); - Ok(Arc::new(package)) + let package = Arc::new(Package::read_from_object(&object).unwrap()); + + // Add to the cache + self.cache.lock().unwrap().insert(id, package.clone()); + + Ok(package) } } @@ -93,12 +115,41 @@ struct Config { // Genesis file name genesis_filename: PathBuf, + + /// Object store url + object_store_url: String, + + /// GraphQL endpoint + graphql_url: String, } -impl Config { - pub fn rest_url(&self) -> &str { - &self.full_node_url - } +async fn query_last_checkpoint_of_epoch(config: &Config, epoch_id: u64) -> anyhow::Result { + // GraphQL query to get the last checkpoint of an epoch + let query = json!({ + "query": "query ($epochID: Int) { epoch(id: $epochID) { checkpoints(last: 1) { nodes { sequenceNumber } } } }", + "variables": { "epochID": epoch_id } + }); + + // Submit the query by POSTing to the GraphQL endpoint + let client = reqwest::Client::new(); + let resp = client + .post(&config.graphql_url) + .header("Content-Type", "application/json") + .body(query.to_string()) + .send() + .await + .expect("Cannot connect to graphql") + .text() + .await + .expect("Cannot parse response"); + + // Parse the JSON response to get the last checkpoint of the epoch + let v: Value = serde_json::from_str(resp.as_str()).expect("Incorrect JSON response"); + let checkpoint_number = v["data"]["epoch"]["checkpoints"]["nodes"][0]["sequenceNumber"] + .as_u64() + .unwrap(); + + Ok(checkpoint_number) } // The list of checkpoints at the end of each epoch @@ -182,11 +233,19 @@ fn write_checkpoint_list( async fn download_checkpoint_summary( config: &Config, - seq: u64, + checkpoint_number: u64, ) -> anyhow::Result { // Download the checkpoint from the server - let client = Client::new(config.rest_url()); - client.get_checkpoint_summary(seq).await.map_err(Into::into) + + let url = Url::parse(&config.object_store_url)?; + let (dyn_store, _store_path) = parse_url(&url).unwrap(); + let path = Path::from(format!("{}.chk", checkpoint_number)); + let response = dyn_store.get(&path).await?; + let bytes = response.bytes().await?; + let (_, blob) = bcs::from_bytes::<(u8, CheckpointData)>(&bytes)?; + + info!("Downloaded checkpoint summary: {}", checkpoint_number); + Ok(blob.checkpoint_summary) } /// Run binary search to for each end of epoch checkpoint that is missing @@ -202,73 +261,58 @@ async fn sync_checkpoint_list_to_latest(config: &Config) -> anyhow::Result<()> { // Download the latest in list checkpoint let summary = download_checkpoint_summary(config, *latest_in_list).await?; let mut last_epoch = summary.epoch(); - let mut last_checkpoint_seq = summary.sequence_number; // Download the very latest checkpoint - let client = Client::new(config.rest_url()); - let latest = client.get_latest_checkpoint().await?; - - // Binary search to find missing checkpoints + let client = SuiClientBuilder::default() + .build(config.full_node_url.as_str()) + .await + .expect("Cannot connect to full node"); + + let latest_seq = client + .read_api() + .get_latest_checkpoint_sequence_number() + .await?; + let latest = download_checkpoint_summary(config, latest_seq).await?; + + // Sequentially record all the missing end of epoch checkpoints numbers while last_epoch + 1 < latest.epoch() { - let mut start = last_checkpoint_seq; - let mut end = latest.sequence_number; - let target_epoch = last_epoch + 1; - // Print target - println!("Target Epoch: {}", target_epoch); - let mut found_summary = None; - - while start < end { - let mid = (start + end) / 2; - let summary = download_checkpoint_summary(config, mid).await?; - - // print summary epoch and seq - println!( - "Epoch: {} Seq: {}: {}", - summary.epoch(), - summary.sequence_number, - summary.end_of_epoch_data.is_some() - ); - - if summary.epoch() == target_epoch && summary.end_of_epoch_data.is_some() { - found_summary = Some(summary); - break; - } + let target_last_checkpoint_number = + query_last_checkpoint_of_epoch(config, target_epoch).await?; - if summary.epoch() <= target_epoch { - start = mid + 1; - } else { - end = mid; - } - } + // Add to the list + checkpoints_list + .checkpoints + .push(target_last_checkpoint_number); + write_checkpoint_list(config, &checkpoints_list)?; - if let Some(summary) = found_summary { - // Note: Do not write summary to file, since we must only persist - // checkpoints that have been verified by the previous committee + // Update + last_epoch = target_epoch; - // Add to the list - checkpoints_list.checkpoints.push(summary.sequence_number); - write_checkpoint_list(config, &checkpoints_list)?; - - // Update - last_epoch = summary.epoch(); - last_checkpoint_seq = summary.sequence_number; - } + println!( + "Last Epoch: {} Last Checkpoint: {}", + target_epoch, target_last_checkpoint_number + ); } Ok(()) } async fn check_and_sync_checkpoints(config: &Config) -> anyhow::Result<()> { - sync_checkpoint_list_to_latest(config).await?; + sync_checkpoint_list_to_latest(config) + .await + .map_err(|e| anyhow!(format!("Cannot refresh list: {e}")))?; // Get the local checkpoint list - let checkpoints_list: CheckpointsList = read_checkpoint_list(config)?; + let checkpoints_list: CheckpointsList = read_checkpoint_list(config) + .map_err(|e| anyhow!(format!("Cannot read checkpoint list: {e}")))?; // Load the genesis committee let mut genesis_path = config.checkpoint_summary_dir.clone(); genesis_path.push(&config.genesis_filename); - let genesis_committee = Genesis::load(&genesis_path)?.committee()?; + let genesis_committee = Genesis::load(&genesis_path)? + .committee() + .map_err(|e| anyhow!(format!("Cannot load Genesis: {e}")))?; // Check the signatures of all checkpoints // And download any missing ones @@ -281,10 +325,13 @@ async fn check_and_sync_checkpoints(config: &Config) -> anyhow::Result<()> { // If file exists read the file otherwise download it from the server let summary = if checkpoint_path.exists() { - read_checkpoint(config, *ckp_id)? + read_checkpoint(config, *ckp_id) + .map_err(|e| anyhow!(format!("Cannot read checkpoint: {e}")))? } else { // Download the checkpoint from the server - let summary = download_checkpoint_summary(config, *ckp_id).await?; + let summary = download_checkpoint_summary(config, *ckp_id) + .await + .map_err(|e| anyhow!(format!("Cannot download summary: {e}")))?; summary.clone().try_into_verified(&prev_committee)?; // Write the checkpoint summary to a file write_checkpoint(config, &summary)?; @@ -317,11 +364,21 @@ async fn check_and_sync_checkpoints(config: &Config) -> anyhow::Result<()> { Ok(()) } -async fn get_full_checkpoint(config: &Config, seq: u64) -> anyhow::Result { - // Downloading the checkpoint from the server - let client: Client = Client::new(config.rest_url()); - let full_checkpoint = client.get_full_checkpoint(seq).await?; - +async fn get_full_checkpoint( + config: &Config, + checkpoint_number: u64, +) -> anyhow::Result { + let url = Url::parse(&config.object_store_url) + .map_err(|_| anyhow!("Cannot parse object store URL"))?; + let (dyn_store, _store_path) = parse_url(&url).unwrap(); + let path = Path::from(format!("{}.chk", checkpoint_number)); + info!("Request full checkpoint: {}", path); + let response = dyn_store + .get(&path) + .await + .map_err(|_| anyhow!("Cannot get full checkpoint from object store"))?; + let bytes = response.bytes().await?; + let (_, full_checkpoint) = bcs::from_bytes::<(u8, CheckpointData)>(&bytes)?; Ok(full_checkpoint) } @@ -363,24 +420,26 @@ async fn get_verified_effects_and_events( config: &Config, tid: TransactionDigest, ) -> anyhow::Result<(TransactionEffects, Option)> { - let sui_mainnet: Arc = Arc::new( - SuiClientBuilder::default() - .build(config.full_node_url.as_str()) - .await - .unwrap(), - ); + let sui_mainnet: sui_sdk::SuiClient = SuiClientBuilder::default() + .build(config.full_node_url.as_str()) + .await + .unwrap(); let read_api = sui_mainnet.read_api(); + info!("Getting effects and events for TID: {}", tid); // Lookup the transaction id and get the checkpoint sequence number let options = SuiTransactionBlockResponseOptions::new(); let seq = read_api .get_transaction_with_options(tid, options) - .await? + .await + .map_err(|e| anyhow!(format!("Cannot get transaction: {e}")))? .checkpoint .ok_or(anyhow!("Transaction not found"))?; // Download the full checkpoint for this sequence number - let full_check_point = get_full_checkpoint(config, seq).await?; + let full_check_point = get_full_checkpoint(config, seq) + .await + .map_err(|e| anyhow!(format!("Cannot get full checkpoint: {e}")))?; // Load the list of stored checkpoints let checkpoints_list: CheckpointsList = read_checkpoint_list(config)?; @@ -420,31 +479,57 @@ async fn get_verified_effects_and_events( // Since we did not find a small committee checkpoint we use the genesis let mut genesis_path = config.checkpoint_summary_dir.clone(); genesis_path.push(&config.genesis_filename); - Genesis::load(&genesis_path)?.committee()? + Genesis::load(&genesis_path)? + .committee() + .map_err(|e| anyhow!(format!("Cannot load Genesis: {e}")))? }; + info!("Extracting effects and events for TID: {}", tid); extract_verified_effects_and_events(&full_check_point, &committee, tid) + .map_err(|e| anyhow!(format!("Cannot extract effects and events: {e}"))) } async fn get_verified_object(config: &Config, id: ObjectID) -> anyhow::Result { - let client: Client = Client::new(config.rest_url()); - let object = client.get_object(id).await?; + let sui_client: Arc = Arc::new( + SuiClientBuilder::default() + .build(config.full_node_url.as_str()) + .await + .unwrap(), + ); + + info!("Getting object: {}", id); + + let read_api = sui_client.read_api(); + let object_json = read_api + .get_object_with_options(id, SuiObjectDataOptions::bcs_lossless()) + .await + .expect("Cannot get object"); + let object = object_json + .into_object() + .expect("Cannot make into object data"); + let object: Object = object.try_into().expect("Cannot reconstruct object"); // Need to authenticate this object - let (effects, _) = get_verified_effects_and_events(config, object.previous_transaction).await?; + let (effects, _) = get_verified_effects_and_events(config, object.previous_transaction) + .await + .expect("Cannot get effects and events"); // check that this object ID, version and hash is in the effects + let target_object_ref = object.compute_object_reference(); effects .all_changed_objects() .iter() - .find(|object_ref| object_ref.0 == object.compute_object_reference()) - .ok_or(anyhow!("Object not found"))?; + .find(|object_ref| object_ref.0 == target_object_ref) + .ok_or(anyhow!("Object not found")) + .expect("Object not found"); Ok(object) } #[tokio::main] pub async fn main() { + env_logger::init(); + // Command line arguments and config loading let args = Args::parse(); @@ -479,23 +564,27 @@ pub async fn main() { exec_digests.transaction, exec_digests.effects ); - for event in events.as_ref().unwrap().data.iter() { - let type_layout = resolver - .type_layout(event.type_.clone().into()) - .await - .unwrap(); - - let json_val = - SuiJsonValue::from_bcs_bytes(Some(&type_layout), &event.contents).unwrap(); + if events.is_some() { + for event in events.as_ref().unwrap().data.iter() { + let type_layout = resolver + .type_layout(event.type_.clone().into()) + .await + .unwrap(); - println!( - "Event:\n - Package: {}\n - Module: {}\n - Sender: {}\n - Type: {}\n{}", - event.package_id, - event.transaction_module, - event.sender, - event.type_, - serde_json::to_string_pretty(&json_val.to_json_value()).unwrap() - ); + let result = BoundedVisitor::deserialize_value(&event.contents, &type_layout) + .expect("Cannot deserialize"); + + println!( + "Event:\n - Package: {}\n - Module: {}\n - Sender: {}\n - Type: {}\n{}", + event.package_id, + event.transaction_module, + event.sender, + event.type_, + serde_json::to_string_pretty(&result).unwrap() + ); + } + } else { + println!("No events found"); } } Some(SCommands::Object { oid }) => { @@ -510,9 +599,9 @@ pub async fn main() { .await .unwrap(); - let json_val = - SuiJsonValue::from_bcs_bytes(Some(&type_layout), move_object.contents()) - .unwrap(); + let result = + BoundedVisitor::deserialize_value(move_object.contents(), &type_layout) + .expect("Cannot deserialize"); let (oid, version, hash) = object.compute_object_reference(); println!( @@ -522,7 +611,7 @@ pub async fn main() { hash, object.owner, object_type, - serde_json::to_string_pretty(&json_val.to_json_value()).unwrap() + serde_json::to_string_pretty(&result).unwrap() ); } } diff --git a/crates/sui-light-client/src/proof.rs b/crates/sui-light-client/src/proof.rs index 8d846384e64bb..e337b9ffa346d 100644 --- a/crates/sui-light-client/src/proof.rs +++ b/crates/sui-light-client/src/proof.rs @@ -3,6 +3,7 @@ use anyhow::anyhow; +use serde::{Deserialize, Serialize}; use sui_types::{ base_types::ObjectRef, committee::Committee, @@ -13,8 +14,8 @@ use sui_types::{ transaction::Transaction, }; -/// Define aspect of Sui state that needs to be certified in a proof -#[derive(Default)] +/// Define aspect of Sui state that need to be certified in a proof +#[derive(Default, Debug, Serialize, Deserialize)] pub struct ProofTarget { /// Objects that need to be certified. pub objects: Vec<(ObjectRef, Object)>, @@ -58,6 +59,7 @@ impl ProofTarget { /// Part of a proof that provides evidence relating to a specific transaction to /// certify objects and events. +#[derive(Debug, Serialize, Deserialize)] pub struct TransactionProof { /// Checkpoint contents including this transaction. pub checkpoint_contents: CheckpointContents, @@ -74,6 +76,7 @@ pub struct TransactionProof { /// A proof for specific targets. It certifies a checkpoint summary and optionally includes /// transaction evidence to certify objects and events. +#[derive(Debug, Serialize, Deserialize)] pub struct Proof { /// Targets of the proof are a committee, objects, or events that need to be certified. pub targets: ProofTarget, diff --git a/crates/sui-move-build/src/lib.rs b/crates/sui-move-build/src/lib.rs index 70bec1ebffddf..5d8e36a1b8520 100644 --- a/crates/sui-move-build/src/lib.rs +++ b/crates/sui-move-build/src/lib.rs @@ -15,7 +15,7 @@ use move_binary_format::{ normalized::{self, Type}, CompiledModule, }; -use move_bytecode_utils::{layout::SerdeLayoutBuilder, module_cache::GetModule}; +use move_bytecode_utils::{layout::SerdeLayoutBuilder, module_cache::GetModule, Modules}; use move_compiler::{ compiled_unit::AnnotatedCompiledModule, diagnostics::{report_diagnostics_to_buffer, report_warnings, Diagnostics}, @@ -33,6 +33,7 @@ use move_package::{ }, package_hooks::{PackageHooks, PackageIdentifier}, resolution::resolution_graph::ResolvedGraph, + source_package::parsed_manifest::PackageName, BuildConfig as MoveBuildConfig, }; use move_package::{ @@ -65,6 +66,9 @@ pub struct CompiledPackage { pub published_at: Result, /// The dependency IDs of this package pub dependency_ids: PackageDependencies, + /// The bytecode modules that this package depends on (both directly and transitively), + /// i.e. on-chain dependencies. + pub bytecode_deps: Vec<(PackageName, CompiledModule)>, } /// Wrapper around the core Move `BuildConfig` with some Sui-specific info @@ -234,6 +238,39 @@ pub fn build_from_resolution_graph( ) -> SuiResult { let (published_at, dependency_ids) = gather_published_ids(&resolution_graph, chain_id); + // collect bytecode dependencies as these are not returned as part of core + // `CompiledPackage` + let mut bytecode_deps = vec![]; + for (name, pkg) in resolution_graph.package_table.iter() { + if !pkg + .get_sources(&resolution_graph.build_options) + .unwrap() + .is_empty() + { + continue; + } + let modules = + pkg.get_bytecodes_bytes() + .map_err(|error| SuiError::ModuleDeserializationFailure { + error: format!( + "Deserializing bytecode dependency for package {}: {:?}", + name, error + ), + })?; + for module in modules { + let module = + CompiledModule::deserialize_with_defaults(module.as_ref()).map_err(|error| { + SuiError::ModuleDeserializationFailure { + error: format!( + "Deserializing bytecode dependency for package {}: {:?}", + name, error + ), + } + })?; + bytecode_deps.push((*name, module)); + } + } + let result = if print_diags_to_stderr { BuildConfig::compile_package(resolution_graph, &mut std::io::stderr()) } else { @@ -268,6 +305,7 @@ pub fn build_from_resolution_graph( package, published_at, dependency_ids, + bytecode_deps, }) } @@ -297,12 +335,16 @@ impl CompiledPackage { .deps_compiled_units .iter() .map(|(_, m)| &m.unit.module) + .chain(self.bytecode_deps.iter().map(|(_, m)| m)) } /// Return all of the bytecode modules in this package and the modules of its direct and transitive dependencies. /// Note: these are not topologically sorted by dependency. pub fn get_modules_and_deps(&self) -> impl Iterator { - self.package.all_modules().map(|m| &m.unit.module) + self.package + .all_modules() + .map(|m| &m.unit.module) + .chain(self.bytecode_deps.iter().map(|(_, m)| m)) } /// Return the bytecode modules in this package, topologically sorted in dependency order. @@ -313,11 +355,10 @@ impl CompiledPackage { &self, with_unpublished_deps: bool, ) -> Vec { - let all_modules = self.package.all_modules_map(); - let graph = all_modules.compute_dependency_graph(); + let all_modules = Modules::new(self.get_modules_and_deps()); // SAFETY: package built successfully - let modules = graph.compute_topological_order().unwrap(); + let modules = all_modules.compute_topological_order().unwrap(); if with_unpublished_deps { // For each transitive dependent module, if they are not to be published, they must have @@ -606,7 +647,9 @@ impl PackageHooks for SuiPackageHooks { &self, manifest: &SourceManifest, ) -> anyhow::Result { - if !cfg!(debug_assertions) && manifest.package.edition == Some(Edition::DEVELOPMENT) { + if (!cfg!(debug_assertions) || cfg!(test)) + && manifest.package.edition == Some(Edition::DEVELOPMENT) + { return Err(Edition::DEVELOPMENT.unknown_edition_error()); } Ok(manifest.package.name) diff --git a/crates/sui-move-build/src/unit_tests/build_tests.rs b/crates/sui-move-build/src/unit_tests/build_tests.rs index 6c3973b623098..482c70aff63f3 100644 --- a/crates/sui-move-build/src/unit_tests/build_tests.rs +++ b/crates/sui-move-build/src/unit_tests/build_tests.rs @@ -3,6 +3,8 @@ use std::path::Path; +use move_compiler::editions::Edition; + use crate::BuildConfig; #[test] @@ -37,5 +39,10 @@ fn development_mode_not_allowed() { .join("unit_tests") .join("data") .join("no_development_mode"); - assert!(BuildConfig::new_for_testing().build(&path).is_err()); + let err = BuildConfig::new_for_testing() + .build(&path) + .expect_err("Should have failed due to unsupported edition"); + assert!(err + .to_string() + .contains(&Edition::DEVELOPMENT.unknown_edition_error().to_string())); } diff --git a/crates/sui-move/src/unit_test.rs b/crates/sui-move/src/unit_test.rs index 78ae1e5260a52..a75f6b9b14910 100644 --- a/crates/sui-move/src/unit_test.rs +++ b/crates/sui-move/src/unit_test.rs @@ -10,11 +10,7 @@ use move_package::BuildConfig; use move_unit_test::{extensions::set_extension_hook, UnitTestingConfig}; use move_vm_runtime::native_extensions::NativeContextExtensions; use once_cell::sync::Lazy; -use std::{ - collections::BTreeMap, - path::Path, - sync::{Arc, RwLock}, -}; +use std::{cell::RefCell, collections::BTreeMap, path::Path, sync::Arc}; use sui_move_build::decorate_warnings; use sui_move_natives::test_scenario::InMemoryTestStore; use sui_move_natives::{object_runtime::ObjectRuntime, NativesCostTable}; @@ -58,8 +54,10 @@ impl Test { } } -static TEST_STORE_INNER: Lazy> = - Lazy::new(|| RwLock::new(InMemoryStorage::default())); +// Create a separate test store per-thread. +thread_local! { + static TEST_STORE_INNER: RefCell = RefCell::new(InMemoryStorage::default()); +} static TEST_STORE: Lazy = Lazy::new(|| InMemoryTestStore(&TEST_STORE_INNER)); diff --git a/crates/sui-node/Cargo.toml b/crates/sui-node/Cargo.toml index 4d46c42852e5a..a9342adbb5a7c 100644 --- a/crates/sui-node/Cargo.toml +++ b/crates/sui-node/Cargo.toml @@ -27,7 +27,6 @@ tower.workspace = true reqwest.workspace = true tap.workspace = true serde.workspace = true -snap.workspace = true bin-version.workspace = true url.workspace = true humantime.workspace = true diff --git a/crates/sui-node/src/metrics.rs b/crates/sui-node/src/metrics.rs index 7ea4b781b81a9..8cd5891d3e6f6 100644 --- a/crates/sui-node/src/metrics.rs +++ b/crates/sui-node/src/metrics.rs @@ -1,50 +1,16 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +use mysten_common::metrics::{push_metrics, MetricsPushClient}; use mysten_network::metrics::MetricsCallbackProvider; use prometheus::{ register_histogram_vec_with_registry, register_int_counter_vec_with_registry, - register_int_gauge_vec_with_registry, Encoder, HistogramVec, IntCounterVec, IntGaugeVec, - Registry, PROTOBUF_FORMAT, + register_int_gauge_vec_with_registry, HistogramVec, IntCounterVec, IntGaugeVec, Registry, }; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; +use std::time::Duration; use sui_network::tonic::Code; use mysten_metrics::RegistryService; -use tracing::error; - -pub struct MetricsPushClient { - certificate: std::sync::Arc, - client: reqwest::Client, -} - -impl MetricsPushClient { - pub fn new(network_key: sui_types::crypto::NetworkKeyPair) -> Self { - use fastcrypto::traits::KeyPair; - let certificate = std::sync::Arc::new(sui_tls::SelfSignedCertificate::new( - network_key.private(), - sui_tls::SUI_VALIDATOR_SERVER_NAME, - )); - let identity = certificate.reqwest_identity(); - let client = reqwest::Client::builder() - .identity(identity) - .build() - .unwrap(); - - Self { - certificate, - client, - } - } - - pub fn certificate(&self) -> &sui_tls::SelfSignedCertificate { - &self.certificate - } - - pub fn client(&self) -> &reqwest::Client { - &self.client - } -} /// Starts a task to periodically push metrics to a configured endpoint if a metrics push endpoint /// is configured. @@ -72,61 +38,6 @@ pub fn start_metrics_push_task(config: &sui_config::NodeConfig, registry: Regist let config_copy = config.clone(); let mut client = MetricsPushClient::new(config_copy.network_key_pair().copy()); - async fn push_metrics( - client: &MetricsPushClient, - url: &reqwest::Url, - registry: &RegistryService, - ) -> Result<(), anyhow::Error> { - // now represents a collection timestamp for all of the metrics we send to the proxy - let now = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_millis() as i64; - - let mut metric_families = registry.gather_all(); - for mf in metric_families.iter_mut() { - for m in mf.mut_metric() { - m.set_timestamp_ms(now); - } - } - - let mut buf: Vec = vec![]; - let encoder = prometheus::ProtobufEncoder::new(); - encoder.encode(&metric_families, &mut buf)?; - - let mut s = snap::raw::Encoder::new(); - let compressed = s.compress_vec(&buf).map_err(|err| { - error!("unable to snappy encode; {err}"); - err - })?; - - let response = client - .client() - .post(url.to_owned()) - .header(reqwest::header::CONTENT_ENCODING, "snappy") - .header(reqwest::header::CONTENT_TYPE, PROTOBUF_FORMAT) - .body(compressed) - .send() - .await?; - - if !response.status().is_success() { - let status = response.status(); - let body = match response.text().await { - Ok(body) => body, - Err(error) => format!("couldn't decode response body; {error}"), - }; - return Err(anyhow::anyhow!( - "metrics push failed: [{}]:{}", - status, - body - )); - } - - tracing::debug!("successfully pushed metrics to {url}"); - - Ok(()) - } - tokio::spawn(async move { tracing::info!(push_url =% url, interval =? interval, "Started Metrics Push Service"); @@ -137,7 +48,7 @@ pub fn start_metrics_push_task(config: &sui_config::NodeConfig, registry: Regist interval.tick().await; if let Err(error) = push_metrics(&client, &url, ®istry).await { - tracing::warn!("unable to push metrics: {error}; new client will be created"); + tracing::error!("unable to push metrics: {error}; new client will be created"); // aggressively recreate our client connection if we hit an error // since our tick interval is only every min, this should not be racey client = MetricsPushClient::new(config_copy.network_key_pair().copy()); diff --git a/crates/sui-open-rpc/spec/openrpc.json b/crates/sui-open-rpc/spec/openrpc.json index fecb9d3fa8214..d3c63288b73e8 100644 --- a/crates/sui-open-rpc/spec/openrpc.json +++ b/crates/sui-open-rpc/spec/openrpc.json @@ -1293,7 +1293,7 @@ "name": "Result", "value": { "minSupportedProtocolVersion": "1", - "maxSupportedProtocolVersion": "58", + "maxSupportedProtocolVersion": "59", "protocolVersion": "6", "featureFlags": { "accept_zklogin_in_multisig": false, @@ -1304,7 +1304,9 @@ "ban_entry_init": false, "bridge": false, "commit_root_state_digest": false, + "consensus_distributed_vote_scoring_strategy": false, "consensus_order_end_of_epoch_last": true, + "consensus_round_prober": false, "disable_invariant_violation_check_in_swap_loc": false, "disallow_adding_abilities_on_upgrade": false, "disallow_change_struct_type_params_on_upgrade": false, @@ -1813,6 +1815,7 @@ "max_type_nodes": { "u64": "256" }, + "max_type_to_layout_nodes": null, "max_value_stack_size": { "u64": "1024" }, diff --git a/crates/sui-open-rpc/src/examples.rs b/crates/sui-open-rpc/src/examples.rs index 9c2f8d8ba6805..d7732756a7829 100644 --- a/crates/sui-open-rpc/src/examples.rs +++ b/crates/sui-open-rpc/src/examples.rs @@ -1206,7 +1206,7 @@ impl RpcExampleProvider { }, MoveStructLayout { type_: struct_tag, - fields: Vec::new(), + fields: Box::new(Vec::new()), }, ) .unwrap(), diff --git a/crates/sui-package-resolver/src/lib.rs b/crates/sui-package-resolver/src/lib.rs index a6c1f3d3c0043..ace9d8efc511e 100644 --- a/crates/sui-package-resolver/src/lib.rs +++ b/crates/sui-package-resolver/src/lib.rs @@ -1453,10 +1453,10 @@ impl<'l> ResolutionContext<'l> { } ( - MoveTypeLayout::Struct(MoveStructLayout { + MoveTypeLayout::Struct(Box::new(MoveStructLayout { type_, - fields: resolved_fields, - }), + fields: Box::new(resolved_fields), + })), field_depth + 1, ) } @@ -1481,10 +1481,10 @@ impl<'l> ResolutionContext<'l> { } ( - MoveTypeLayout::Enum(MoveEnumLayout { + MoveTypeLayout::Enum(Box::new(MoveEnumLayout { type_, variants: resolved_variants, - }), + })), field_depth + 1, ) } diff --git a/crates/sui-protocol-config-macros/src/lib.rs b/crates/sui-protocol-config-macros/src/lib.rs index 82484b76b67a0..08083e0192466 100644 --- a/crates/sui-protocol-config-macros/src/lib.rs +++ b/crates/sui-protocol-config-macros/src/lib.rs @@ -224,6 +224,64 @@ pub fn accessors_macro(input: TokenStream) -> TokenStream { TokenStream::from(output) } +#[proc_macro_derive(ProtocolConfigOverride)] +pub fn protocol_config_override_macro(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + // Create a new struct name by appending "Optional". + let struct_name = &ast.ident; + let optional_struct_name = + syn::Ident::new(&format!("{}Optional", struct_name), struct_name.span()); + + // Extract the fields from the struct + let fields = match &ast.data { + Data::Struct(data_struct) => match &data_struct.fields { + Fields::Named(fields_named) => &fields_named.named, + _ => panic!("ProtocolConfig must have named fields"), + }, + _ => panic!("ProtocolConfig must be a struct"), + }; + + // Create new fields with types wrapped in Option. + let optional_fields = fields.iter().map(|field| { + let field_name = &field.ident; + let field_type = &field.ty; + quote! { + #field_name: Option<#field_type> + } + }); + + // Generate the function to update the original struct. + let update_fields = fields.iter().map(|field| { + let field_name = &field.ident; + quote! { + if let Some(value) = self.#field_name { + tracing::warn!( + "ProtocolConfig field \"{}\" has been overridden with the value: {value:?}", + stringify!(#field_name), + ); + config.#field_name = value; + } + } + }); + + // Generate the new struct definition. + let output = quote! { + #[derive(serde::Deserialize, Debug)] + pub struct #optional_struct_name { + #(#optional_fields,)* + } + + impl #optional_struct_name { + pub fn apply_to(self, config: &mut #struct_name) { + #(#update_fields)* + } + } + }; + + TokenStream::from(output) +} + #[proc_macro_derive(ProtocolConfigFeatureFlagsGetters)] pub fn feature_flag_getters_macro(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); diff --git a/crates/sui-protocol-config/Cargo.toml b/crates/sui-protocol-config/Cargo.toml index f8a8dfec81551..17a80bebad216 100644 --- a/crates/sui-protocol-config/Cargo.toml +++ b/crates/sui-protocol-config/Cargo.toml @@ -18,6 +18,7 @@ schemars.workspace = true insta.workspace = true clap.workspace = true move-vm-config.workspace = true +serde-env.workspace = true [dev-dependencies] insta.workspace = true diff --git a/crates/sui-protocol-config/src/lib.rs b/crates/sui-protocol-config/src/lib.rs index 3c9a2e08bbc9e..f20f9a972f8cd 100644 --- a/crates/sui-protocol-config/src/lib.rs +++ b/crates/sui-protocol-config/src/lib.rs @@ -11,12 +11,14 @@ use clap::*; use move_vm_config::verifier::VerifierConfig; use serde::{Deserialize, Serialize}; use serde_with::skip_serializing_none; -use sui_protocol_config_macros::{ProtocolConfigAccessors, ProtocolConfigFeatureFlagsGetters}; +use sui_protocol_config_macros::{ + ProtocolConfigAccessors, ProtocolConfigFeatureFlagsGetters, ProtocolConfigOverride, +}; use tracing::{info, warn}; /// The minimum and maximum protocol versions supported by this build. const MIN_PROTOCOL_VERSION: u64 = 1; -const MAX_PROTOCOL_VERSION: u64 = 58; +const MAX_PROTOCOL_VERSION: u64 = 59; // Record history of protocol version allocations here: // @@ -174,6 +176,9 @@ const MAX_PROTOCOL_VERSION: u64 = 58; // Version 57: Reduce minimum number of random beacon shares. // Version 58: Optimize boolean binops // Finalize bridge committee on mainnet. +// Switch to distributed vote scoring in consensus in devnet +// Version 59: Validation of public inputs for Groth16 verification. +// Enable configuration of maximum number of type nodes in a type layout. #[derive(Copy, Clone, Debug, Hash, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord)] pub struct ProtocolVersion(u64); @@ -256,7 +261,7 @@ pub struct Error(pub String); // TODO: There are quite a few non boolean values in the feature flags. We should move them out. /// Records on/off feature flags that may vary at each protocol version. -#[derive(Default, Clone, Serialize, Debug, ProtocolConfigFeatureFlagsGetters)] +#[derive(Default, Clone, Serialize, Deserialize, Debug, ProtocolConfigFeatureFlagsGetters)] struct FeatureFlags { // Add feature flags here, e.g.: // new_protocol_feature: bool, @@ -516,6 +521,14 @@ struct FeatureFlags { // Rethrow type layout errors during serialization instead of trying to convert them. #[serde(skip_serializing_if = "is_false")] rethrow_serialization_type_layout_errors: bool, + + // Use distributed vote leader scoring strategy in consensus. + #[serde(skip_serializing_if = "is_false")] + consensus_distributed_vote_scoring_strategy: bool, + + // Probe rounds received by peers from every authority. + #[serde(skip_serializing_if = "is_false")] + consensus_round_prober: bool, } fn is_false(b: &bool) -> bool { @@ -527,7 +540,7 @@ fn is_empty(b: &BTreeSet) -> bool { } /// Ordering mechanism for transactions in one Narwhal consensus output. -#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Debug)] +#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub enum ConsensusTransactionOrdering { /// No ordering. Transactions are processed in the order they appear in the consensus output. #[default] @@ -543,7 +556,7 @@ impl ConsensusTransactionOrdering { } // The config for per object congestion control in consensus handler. -#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Debug)] +#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub enum PerObjectCongestionControlMode { #[default] None, // No congestion control. @@ -558,7 +571,7 @@ impl PerObjectCongestionControlMode { } // Configuration options for consensus algorithm. -#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Debug)] +#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub enum ConsensusChoice { #[default] Narwhal, @@ -573,7 +586,7 @@ impl ConsensusChoice { } // Configuration options for consensus network. -#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Debug)] +#[derive(Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] pub enum ConsensusNetwork { #[default] Anemo, @@ -618,7 +631,7 @@ impl ConsensusNetwork { /// return `None` if the field is not defined at that version. /// - If you want a customized getter, you can add a method in the impl. #[skip_serializing_none] -#[derive(Clone, Serialize, Debug, ProtocolConfigAccessors)] +#[derive(Clone, Serialize, Debug, ProtocolConfigAccessors, ProtocolConfigOverride)] pub struct ProtocolConfig { pub version: ProtocolVersion, @@ -853,6 +866,9 @@ pub struct ProtocolConfig { // entire object, just consulting an ID -> tx digest map obj_access_cost_verify_per_byte: Option, + // Maximal nodes which are allowed when converting to a type layout. + max_type_to_layout_nodes: Option, + /// === Gas version. gas model === /// Gas model version, what code we are using to charge gas @@ -1562,6 +1578,15 @@ impl ProtocolConfig { pub fn rethrow_serialization_type_layout_errors(&self) -> bool { self.feature_flags.rethrow_serialization_type_layout_errors } + + pub fn consensus_distributed_vote_scoring_strategy(&self) -> bool { + self.feature_flags + .consensus_distributed_vote_scoring_strategy + } + + pub fn consensus_round_prober(&self) -> bool { + self.feature_flags.consensus_round_prober + } } #[cfg(not(msim))] @@ -1594,7 +1619,7 @@ impl ProtocolConfig { let mut ret = Self::get_for_version_impl(version, chain); ret.version = version; - CONFIG_OVERRIDE.with(|ovr| { + ret = CONFIG_OVERRIDE.with(|ovr| { if let Some(override_fn) = &*ovr.borrow() { warn!( "overriding ProtocolConfig settings with custom settings (you should not see this log outside of tests)" @@ -1603,7 +1628,17 @@ impl ProtocolConfig { } else { ret } - }) + }); + + if std::env::var("SUI_PROTOCOL_CONFIG_OVERRIDE_ENABLE").is_ok() { + warn!("overriding ProtocolConfig settings with custom settings; this may break non-local networks"); + let overrides: ProtocolConfigOptional = + serde_env::from_env_with_prefix("SUI_PROTOCOL_CONFIG_OVERRIDE") + .expect("failed to parse ProtocolConfig override env variables"); + overrides.apply_to(&mut ret); + } + + ret } /// Get the value ProtocolConfig that are in effect during the given protocol version. @@ -1743,6 +1778,7 @@ impl ProtocolConfig { max_num_transferred_move_object_ids_system_tx: Some(2048 * 16), max_event_emit_size: Some(250 * 1024), max_move_vector_len: Some(256 * 1024), + max_type_to_layout_nodes: None, max_back_edges_per_function: Some(10_000), max_back_edges_per_module: Some(10_000), @@ -2713,6 +2749,18 @@ impl ProtocolConfig { if chain == Chain::Mainnet { cfg.bridge_should_try_to_finalize_committee = Some(true); } + + if chain != Chain::Mainnet && chain != Chain::Testnet { + // Enable distributed vote scoring for devnet + cfg.feature_flags + .consensus_distributed_vote_scoring_strategy = true; + } + } + 59 => { + cfg.max_type_to_layout_nodes = Some(512); + + // Enable round prober in consensus. + cfg.feature_flags.consensus_round_prober = true; } // Use this template when making changes: // @@ -2869,6 +2917,15 @@ impl ProtocolConfig { pub fn set_passkey_auth_for_testing(&mut self, val: bool) { self.feature_flags.passkey_auth = val } + + pub fn set_consensus_distributed_vote_scoring_strategy_for_testing(&mut self, val: bool) { + self.feature_flags + .consensus_distributed_vote_scoring_strategy = val; + } + + pub fn set_consensus_round_prober_for_testing(&mut self, val: bool) { + self.feature_flags.consensus_round_prober = val; + } } type OverrideFn = dyn Fn(ProtocolVersion, ProtocolConfig) -> ProtocolConfig + Send; diff --git a/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Mainnet_version_59.snap b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Mainnet_version_59.snap new file mode 100644 index 0000000000000..d373846e0ab7e --- /dev/null +++ b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Mainnet_version_59.snap @@ -0,0 +1,325 @@ +--- +source: crates/sui-protocol-config/src/lib.rs +expression: "ProtocolConfig::get_for_version(cur, *chain_id)" +--- +version: 59 +feature_flags: + package_upgrades: true + commit_root_state_digest: true + advance_epoch_start_time_in_safe_mode: true + loaded_child_objects_fixed: true + missing_type_is_compatibility_error: true + scoring_decision_with_validity_cutoff: true + consensus_order_end_of_epoch_last: true + disallow_adding_abilities_on_upgrade: true + disable_invariant_violation_check_in_swap_loc: true + advance_to_highest_supported_protocol_version: true + ban_entry_init: true + package_digest_hash_module: true + disallow_change_struct_type_params_on_upgrade: true + no_extraneous_module_bytes: true + narwhal_versioned_metadata: true + zklogin_auth: true + consensus_transaction_ordering: ByGasPrice + simplified_unwrap_then_delete: true + upgraded_multisig_supported: true + txn_base_cost_as_multiplier: true + shared_object_deletion: true + narwhal_new_leader_election_schedule: true + loaded_child_object_format: true + enable_jwk_consensus_updates: true + end_of_epoch_transaction_supported: true + simple_conservation_checks: true + loaded_child_object_format_type: true + receive_objects: true + random_beacon: true + bridge: true + enable_effects_v2: true + narwhal_certificate_v2: true + verify_legacy_zklogin_address: true + recompute_has_public_transfer_in_execution: true + accept_zklogin_in_multisig: true + include_consensus_digest_in_prologue: true + hardened_otw_check: true + allow_receiving_object_id: true + enable_coin_deny_list: true + enable_group_ops_native_functions: true + reject_mutable_random_on_entry_functions: true + per_object_congestion_control_mode: TotalTxCount + consensus_choice: Mysticeti + consensus_network: Tonic + zklogin_max_epoch_upper_bound_delta: 30 + mysticeti_leader_scoring_and_schedule: true + reshare_at_same_initial_version: true + resolve_abort_locations_to_package_id: true + mysticeti_use_committed_subdag_digest: true + record_consensus_determined_version_assignments_in_prologue: true + fresh_vm_on_framework_upgrade: true + prepend_prologue_tx_in_consensus_commit_in_checkpoints: true + mysticeti_num_leaders_per_round: 1 + soft_bundle: true + enable_coin_deny_list_v2: true + rethrow_serialization_type_layout_errors: true + consensus_round_prober: true +max_tx_size_bytes: 131072 +max_input_objects: 2048 +max_size_written_objects: 5000000 +max_size_written_objects_system_tx: 50000000 +max_serialized_tx_effects_size_bytes: 524288 +max_serialized_tx_effects_size_bytes_system_tx: 8388608 +max_gas_payment_objects: 256 +max_modules_in_publish: 64 +max_package_dependencies: 32 +max_arguments: 512 +max_type_arguments: 16 +max_type_argument_depth: 16 +max_pure_argument_size: 16384 +max_programmable_tx_commands: 1024 +move_binary_format_version: 7 +min_move_binary_format_version: 6 +binary_module_handles: 100 +binary_struct_handles: 300 +binary_function_handles: 1500 +binary_function_instantiations: 750 +binary_signatures: 1000 +binary_constant_pool: 4000 +binary_identifiers: 10000 +binary_address_identifiers: 100 +binary_struct_defs: 200 +binary_struct_def_instantiations: 100 +binary_function_defs: 1000 +binary_field_handles: 500 +binary_field_instantiations: 250 +binary_friend_decls: 100 +max_move_object_size: 256000 +max_move_package_size: 102400 +max_publish_or_upgrade_per_ptb: 5 +max_tx_gas: 50000000000 +max_gas_price: 100000 +max_gas_computation_bucket: 5000000 +gas_rounding_step: 1000 +max_loop_depth: 5 +max_generic_instantiation_length: 32 +max_function_parameters: 128 +max_basic_blocks: 1024 +max_value_stack_size: 1024 +max_type_nodes: 256 +max_push_size: 10000 +max_struct_definitions: 200 +max_function_definitions: 1000 +max_fields_in_struct: 32 +max_dependency_depth: 100 +max_num_event_emit: 1024 +max_num_new_move_object_ids: 2048 +max_num_new_move_object_ids_system_tx: 32768 +max_num_deleted_move_object_ids: 2048 +max_num_deleted_move_object_ids_system_tx: 32768 +max_num_transferred_move_object_ids: 2048 +max_num_transferred_move_object_ids_system_tx: 32768 +max_event_emit_size: 256000 +max_event_emit_size_total: 65536000 +max_move_vector_len: 262144 +max_move_identifier_len: 128 +max_move_value_depth: 128 +max_back_edges_per_function: 10000 +max_back_edges_per_module: 10000 +max_verifier_meter_ticks_per_function: 16000000 +max_meter_ticks_per_module: 16000000 +max_meter_ticks_per_package: 16000000 +object_runtime_max_num_cached_objects: 1000 +object_runtime_max_num_cached_objects_system_tx: 16000 +object_runtime_max_num_store_entries: 1000 +object_runtime_max_num_store_entries_system_tx: 16000 +base_tx_cost_fixed: 1000 +package_publish_cost_fixed: 1000 +base_tx_cost_per_byte: 0 +package_publish_cost_per_byte: 80 +obj_access_cost_read_per_byte: 15 +obj_access_cost_mutate_per_byte: 40 +obj_access_cost_delete_per_byte: 40 +obj_access_cost_verify_per_byte: 200 +max_type_to_layout_nodes: 512 +gas_model_version: 8 +obj_data_cost_refundable: 100 +obj_metadata_cost_non_refundable: 50 +storage_rebate_rate: 9900 +storage_fund_reinvest_rate: 500 +reward_slashing_rate: 10000 +storage_gas_price: 76 +max_transactions_per_checkpoint: 10000 +max_checkpoint_size_bytes: 31457280 +buffer_stake_for_protocol_upgrade_bps: 5000 +address_from_bytes_cost_base: 52 +address_to_u256_cost_base: 52 +address_from_u256_cost_base: 52 +config_read_setting_impl_cost_base: 100 +config_read_setting_impl_cost_per_byte: 40 +dynamic_field_hash_type_and_key_cost_base: 100 +dynamic_field_hash_type_and_key_type_cost_per_byte: 2 +dynamic_field_hash_type_and_key_value_cost_per_byte: 2 +dynamic_field_hash_type_and_key_type_tag_cost_per_byte: 2 +dynamic_field_add_child_object_cost_base: 100 +dynamic_field_add_child_object_type_cost_per_byte: 10 +dynamic_field_add_child_object_value_cost_per_byte: 10 +dynamic_field_add_child_object_struct_tag_cost_per_byte: 10 +dynamic_field_borrow_child_object_cost_base: 100 +dynamic_field_borrow_child_object_child_ref_cost_per_byte: 10 +dynamic_field_borrow_child_object_type_cost_per_byte: 10 +dynamic_field_remove_child_object_cost_base: 100 +dynamic_field_remove_child_object_child_cost_per_byte: 2 +dynamic_field_remove_child_object_type_cost_per_byte: 2 +dynamic_field_has_child_object_cost_base: 100 +dynamic_field_has_child_object_with_ty_cost_base: 100 +dynamic_field_has_child_object_with_ty_type_cost_per_byte: 2 +dynamic_field_has_child_object_with_ty_type_tag_cost_per_byte: 2 +event_emit_cost_base: 52 +event_emit_value_size_derivation_cost_per_byte: 2 +event_emit_tag_size_derivation_cost_per_byte: 5 +event_emit_output_cost_per_byte: 10 +object_borrow_uid_cost_base: 52 +object_delete_impl_cost_base: 52 +object_record_new_uid_cost_base: 52 +transfer_transfer_internal_cost_base: 52 +transfer_freeze_object_cost_base: 52 +transfer_share_object_cost_base: 52 +transfer_receive_object_cost_base: 52 +tx_context_derive_id_cost_base: 52 +types_is_one_time_witness_cost_base: 52 +types_is_one_time_witness_type_tag_cost_per_byte: 2 +types_is_one_time_witness_type_cost_per_byte: 2 +validator_validate_metadata_cost_base: 52 +validator_validate_metadata_data_cost_per_byte: 2 +crypto_invalid_arguments_cost: 100 +bls12381_bls12381_min_sig_verify_cost_base: 52 +bls12381_bls12381_min_sig_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_sig_verify_msg_cost_per_block: 2 +bls12381_bls12381_min_pk_verify_cost_base: 52 +bls12381_bls12381_min_pk_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_pk_verify_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_keccak256_cost_base: 52 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_sha256_cost_base: 52 +ecdsa_k1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_k1_decompress_pubkey_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_k1_secp256k1_verify_sha256_cost_base: 52 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_keccak256_cost_base: 52 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_sha256_cost_base: 52 +ecdsa_r1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_keccak256_cost_base: 52 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_sha256_cost_base: 52 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_block: 2 +ecvrf_ecvrf_verify_cost_base: 52 +ecvrf_ecvrf_verify_alpha_string_cost_per_byte: 2 +ecvrf_ecvrf_verify_alpha_string_cost_per_block: 2 +ed25519_ed25519_verify_cost_base: 52 +ed25519_ed25519_verify_msg_cost_per_byte: 2 +ed25519_ed25519_verify_msg_cost_per_block: 2 +groth16_prepare_verifying_key_bls12381_cost_base: 52 +groth16_prepare_verifying_key_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bn254_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_public_input_cost_per_byte: 2 +hash_blake2b256_cost_base: 52 +hash_blake2b256_data_cost_per_byte: 2 +hash_blake2b256_data_cost_per_block: 2 +hash_keccak256_cost_base: 52 +hash_keccak256_data_cost_per_byte: 2 +hash_keccak256_data_cost_per_block: 2 +group_ops_bls12381_decode_scalar_cost: 52 +group_ops_bls12381_decode_g1_cost: 52 +group_ops_bls12381_decode_g2_cost: 52 +group_ops_bls12381_decode_gt_cost: 52 +group_ops_bls12381_scalar_add_cost: 52 +group_ops_bls12381_g1_add_cost: 52 +group_ops_bls12381_g2_add_cost: 52 +group_ops_bls12381_gt_add_cost: 52 +group_ops_bls12381_scalar_sub_cost: 52 +group_ops_bls12381_g1_sub_cost: 52 +group_ops_bls12381_g2_sub_cost: 52 +group_ops_bls12381_gt_sub_cost: 52 +group_ops_bls12381_scalar_mul_cost: 52 +group_ops_bls12381_g1_mul_cost: 52 +group_ops_bls12381_g2_mul_cost: 52 +group_ops_bls12381_gt_mul_cost: 52 +group_ops_bls12381_scalar_div_cost: 52 +group_ops_bls12381_g1_div_cost: 52 +group_ops_bls12381_g2_div_cost: 52 +group_ops_bls12381_gt_div_cost: 52 +group_ops_bls12381_g1_hash_to_base_cost: 52 +group_ops_bls12381_g2_hash_to_base_cost: 52 +group_ops_bls12381_g1_hash_to_cost_per_byte: 2 +group_ops_bls12381_g2_hash_to_cost_per_byte: 2 +group_ops_bls12381_g1_msm_base_cost: 52 +group_ops_bls12381_g2_msm_base_cost: 52 +group_ops_bls12381_g1_msm_base_cost_per_input: 52 +group_ops_bls12381_g2_msm_base_cost_per_input: 52 +group_ops_bls12381_msm_max_len: 32 +group_ops_bls12381_pairing_cost: 52 +hmac_hmac_sha3_256_cost_base: 52 +hmac_hmac_sha3_256_input_cost_per_byte: 2 +hmac_hmac_sha3_256_input_cost_per_block: 2 +check_zklogin_id_cost_base: 200 +check_zklogin_issuer_cost_base: 200 +bcs_per_byte_serialized_cost: 2 +bcs_legacy_min_output_size_cost: 1 +bcs_failure_cost: 52 +hash_sha2_256_base_cost: 52 +hash_sha2_256_per_byte_cost: 2 +hash_sha2_256_legacy_min_input_len_cost: 1 +hash_sha3_256_base_cost: 52 +hash_sha3_256_per_byte_cost: 2 +hash_sha3_256_legacy_min_input_len_cost: 1 +type_name_get_base_cost: 52 +type_name_get_per_byte_cost: 2 +string_check_utf8_base_cost: 52 +string_check_utf8_per_byte_cost: 2 +string_is_char_boundary_base_cost: 52 +string_sub_string_base_cost: 52 +string_sub_string_per_byte_cost: 2 +string_index_of_base_cost: 52 +string_index_of_per_byte_pattern_cost: 2 +string_index_of_per_byte_searched_cost: 2 +vector_empty_base_cost: 52 +vector_length_base_cost: 52 +vector_push_back_base_cost: 52 +vector_push_back_legacy_per_abstract_memory_unit_cost: 2 +vector_borrow_base_cost: 52 +vector_pop_back_base_cost: 52 +vector_destroy_empty_base_cost: 52 +vector_swap_base_cost: 52 +debug_print_base_cost: 52 +debug_print_stack_trace_base_cost: 52 +execution_version: 3 +consensus_bad_nodes_stake_threshold: 20 +max_jwk_votes_per_validator_per_epoch: 240 +max_age_of_jwk_in_epochs: 1 +random_beacon_reduction_allowed_delta: 800 +random_beacon_reduction_lower_bound: 800 +random_beacon_dkg_timeout_round: 3000 +random_beacon_min_round_interval_ms: 500 +random_beacon_dkg_version: 1 +consensus_max_transaction_size_bytes: 262144 +consensus_max_transactions_in_block_bytes: 524288 +consensus_max_num_transactions_in_block: 512 +max_accumulated_txn_cost_per_object_in_narwhal_commit: 100 +max_deferral_rounds_for_congestion_control: 10 +min_checkpoint_interval_ms: 200 +checkpoint_summary_version_specific_data: 1 +max_soft_bundle_size: 5 +bridge_should_try_to_finalize_committee: true +max_accumulated_txn_cost_per_object_in_mysticeti_commit: 10 diff --git a/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Testnet_version_59.snap b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Testnet_version_59.snap new file mode 100644 index 0000000000000..d373846e0ab7e --- /dev/null +++ b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__Testnet_version_59.snap @@ -0,0 +1,325 @@ +--- +source: crates/sui-protocol-config/src/lib.rs +expression: "ProtocolConfig::get_for_version(cur, *chain_id)" +--- +version: 59 +feature_flags: + package_upgrades: true + commit_root_state_digest: true + advance_epoch_start_time_in_safe_mode: true + loaded_child_objects_fixed: true + missing_type_is_compatibility_error: true + scoring_decision_with_validity_cutoff: true + consensus_order_end_of_epoch_last: true + disallow_adding_abilities_on_upgrade: true + disable_invariant_violation_check_in_swap_loc: true + advance_to_highest_supported_protocol_version: true + ban_entry_init: true + package_digest_hash_module: true + disallow_change_struct_type_params_on_upgrade: true + no_extraneous_module_bytes: true + narwhal_versioned_metadata: true + zklogin_auth: true + consensus_transaction_ordering: ByGasPrice + simplified_unwrap_then_delete: true + upgraded_multisig_supported: true + txn_base_cost_as_multiplier: true + shared_object_deletion: true + narwhal_new_leader_election_schedule: true + loaded_child_object_format: true + enable_jwk_consensus_updates: true + end_of_epoch_transaction_supported: true + simple_conservation_checks: true + loaded_child_object_format_type: true + receive_objects: true + random_beacon: true + bridge: true + enable_effects_v2: true + narwhal_certificate_v2: true + verify_legacy_zklogin_address: true + recompute_has_public_transfer_in_execution: true + accept_zklogin_in_multisig: true + include_consensus_digest_in_prologue: true + hardened_otw_check: true + allow_receiving_object_id: true + enable_coin_deny_list: true + enable_group_ops_native_functions: true + reject_mutable_random_on_entry_functions: true + per_object_congestion_control_mode: TotalTxCount + consensus_choice: Mysticeti + consensus_network: Tonic + zklogin_max_epoch_upper_bound_delta: 30 + mysticeti_leader_scoring_and_schedule: true + reshare_at_same_initial_version: true + resolve_abort_locations_to_package_id: true + mysticeti_use_committed_subdag_digest: true + record_consensus_determined_version_assignments_in_prologue: true + fresh_vm_on_framework_upgrade: true + prepend_prologue_tx_in_consensus_commit_in_checkpoints: true + mysticeti_num_leaders_per_round: 1 + soft_bundle: true + enable_coin_deny_list_v2: true + rethrow_serialization_type_layout_errors: true + consensus_round_prober: true +max_tx_size_bytes: 131072 +max_input_objects: 2048 +max_size_written_objects: 5000000 +max_size_written_objects_system_tx: 50000000 +max_serialized_tx_effects_size_bytes: 524288 +max_serialized_tx_effects_size_bytes_system_tx: 8388608 +max_gas_payment_objects: 256 +max_modules_in_publish: 64 +max_package_dependencies: 32 +max_arguments: 512 +max_type_arguments: 16 +max_type_argument_depth: 16 +max_pure_argument_size: 16384 +max_programmable_tx_commands: 1024 +move_binary_format_version: 7 +min_move_binary_format_version: 6 +binary_module_handles: 100 +binary_struct_handles: 300 +binary_function_handles: 1500 +binary_function_instantiations: 750 +binary_signatures: 1000 +binary_constant_pool: 4000 +binary_identifiers: 10000 +binary_address_identifiers: 100 +binary_struct_defs: 200 +binary_struct_def_instantiations: 100 +binary_function_defs: 1000 +binary_field_handles: 500 +binary_field_instantiations: 250 +binary_friend_decls: 100 +max_move_object_size: 256000 +max_move_package_size: 102400 +max_publish_or_upgrade_per_ptb: 5 +max_tx_gas: 50000000000 +max_gas_price: 100000 +max_gas_computation_bucket: 5000000 +gas_rounding_step: 1000 +max_loop_depth: 5 +max_generic_instantiation_length: 32 +max_function_parameters: 128 +max_basic_blocks: 1024 +max_value_stack_size: 1024 +max_type_nodes: 256 +max_push_size: 10000 +max_struct_definitions: 200 +max_function_definitions: 1000 +max_fields_in_struct: 32 +max_dependency_depth: 100 +max_num_event_emit: 1024 +max_num_new_move_object_ids: 2048 +max_num_new_move_object_ids_system_tx: 32768 +max_num_deleted_move_object_ids: 2048 +max_num_deleted_move_object_ids_system_tx: 32768 +max_num_transferred_move_object_ids: 2048 +max_num_transferred_move_object_ids_system_tx: 32768 +max_event_emit_size: 256000 +max_event_emit_size_total: 65536000 +max_move_vector_len: 262144 +max_move_identifier_len: 128 +max_move_value_depth: 128 +max_back_edges_per_function: 10000 +max_back_edges_per_module: 10000 +max_verifier_meter_ticks_per_function: 16000000 +max_meter_ticks_per_module: 16000000 +max_meter_ticks_per_package: 16000000 +object_runtime_max_num_cached_objects: 1000 +object_runtime_max_num_cached_objects_system_tx: 16000 +object_runtime_max_num_store_entries: 1000 +object_runtime_max_num_store_entries_system_tx: 16000 +base_tx_cost_fixed: 1000 +package_publish_cost_fixed: 1000 +base_tx_cost_per_byte: 0 +package_publish_cost_per_byte: 80 +obj_access_cost_read_per_byte: 15 +obj_access_cost_mutate_per_byte: 40 +obj_access_cost_delete_per_byte: 40 +obj_access_cost_verify_per_byte: 200 +max_type_to_layout_nodes: 512 +gas_model_version: 8 +obj_data_cost_refundable: 100 +obj_metadata_cost_non_refundable: 50 +storage_rebate_rate: 9900 +storage_fund_reinvest_rate: 500 +reward_slashing_rate: 10000 +storage_gas_price: 76 +max_transactions_per_checkpoint: 10000 +max_checkpoint_size_bytes: 31457280 +buffer_stake_for_protocol_upgrade_bps: 5000 +address_from_bytes_cost_base: 52 +address_to_u256_cost_base: 52 +address_from_u256_cost_base: 52 +config_read_setting_impl_cost_base: 100 +config_read_setting_impl_cost_per_byte: 40 +dynamic_field_hash_type_and_key_cost_base: 100 +dynamic_field_hash_type_and_key_type_cost_per_byte: 2 +dynamic_field_hash_type_and_key_value_cost_per_byte: 2 +dynamic_field_hash_type_and_key_type_tag_cost_per_byte: 2 +dynamic_field_add_child_object_cost_base: 100 +dynamic_field_add_child_object_type_cost_per_byte: 10 +dynamic_field_add_child_object_value_cost_per_byte: 10 +dynamic_field_add_child_object_struct_tag_cost_per_byte: 10 +dynamic_field_borrow_child_object_cost_base: 100 +dynamic_field_borrow_child_object_child_ref_cost_per_byte: 10 +dynamic_field_borrow_child_object_type_cost_per_byte: 10 +dynamic_field_remove_child_object_cost_base: 100 +dynamic_field_remove_child_object_child_cost_per_byte: 2 +dynamic_field_remove_child_object_type_cost_per_byte: 2 +dynamic_field_has_child_object_cost_base: 100 +dynamic_field_has_child_object_with_ty_cost_base: 100 +dynamic_field_has_child_object_with_ty_type_cost_per_byte: 2 +dynamic_field_has_child_object_with_ty_type_tag_cost_per_byte: 2 +event_emit_cost_base: 52 +event_emit_value_size_derivation_cost_per_byte: 2 +event_emit_tag_size_derivation_cost_per_byte: 5 +event_emit_output_cost_per_byte: 10 +object_borrow_uid_cost_base: 52 +object_delete_impl_cost_base: 52 +object_record_new_uid_cost_base: 52 +transfer_transfer_internal_cost_base: 52 +transfer_freeze_object_cost_base: 52 +transfer_share_object_cost_base: 52 +transfer_receive_object_cost_base: 52 +tx_context_derive_id_cost_base: 52 +types_is_one_time_witness_cost_base: 52 +types_is_one_time_witness_type_tag_cost_per_byte: 2 +types_is_one_time_witness_type_cost_per_byte: 2 +validator_validate_metadata_cost_base: 52 +validator_validate_metadata_data_cost_per_byte: 2 +crypto_invalid_arguments_cost: 100 +bls12381_bls12381_min_sig_verify_cost_base: 52 +bls12381_bls12381_min_sig_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_sig_verify_msg_cost_per_block: 2 +bls12381_bls12381_min_pk_verify_cost_base: 52 +bls12381_bls12381_min_pk_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_pk_verify_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_keccak256_cost_base: 52 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_sha256_cost_base: 52 +ecdsa_k1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_k1_decompress_pubkey_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_k1_secp256k1_verify_sha256_cost_base: 52 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_keccak256_cost_base: 52 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_sha256_cost_base: 52 +ecdsa_r1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_keccak256_cost_base: 52 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_sha256_cost_base: 52 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_block: 2 +ecvrf_ecvrf_verify_cost_base: 52 +ecvrf_ecvrf_verify_alpha_string_cost_per_byte: 2 +ecvrf_ecvrf_verify_alpha_string_cost_per_block: 2 +ed25519_ed25519_verify_cost_base: 52 +ed25519_ed25519_verify_msg_cost_per_byte: 2 +ed25519_ed25519_verify_msg_cost_per_block: 2 +groth16_prepare_verifying_key_bls12381_cost_base: 52 +groth16_prepare_verifying_key_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bn254_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_public_input_cost_per_byte: 2 +hash_blake2b256_cost_base: 52 +hash_blake2b256_data_cost_per_byte: 2 +hash_blake2b256_data_cost_per_block: 2 +hash_keccak256_cost_base: 52 +hash_keccak256_data_cost_per_byte: 2 +hash_keccak256_data_cost_per_block: 2 +group_ops_bls12381_decode_scalar_cost: 52 +group_ops_bls12381_decode_g1_cost: 52 +group_ops_bls12381_decode_g2_cost: 52 +group_ops_bls12381_decode_gt_cost: 52 +group_ops_bls12381_scalar_add_cost: 52 +group_ops_bls12381_g1_add_cost: 52 +group_ops_bls12381_g2_add_cost: 52 +group_ops_bls12381_gt_add_cost: 52 +group_ops_bls12381_scalar_sub_cost: 52 +group_ops_bls12381_g1_sub_cost: 52 +group_ops_bls12381_g2_sub_cost: 52 +group_ops_bls12381_gt_sub_cost: 52 +group_ops_bls12381_scalar_mul_cost: 52 +group_ops_bls12381_g1_mul_cost: 52 +group_ops_bls12381_g2_mul_cost: 52 +group_ops_bls12381_gt_mul_cost: 52 +group_ops_bls12381_scalar_div_cost: 52 +group_ops_bls12381_g1_div_cost: 52 +group_ops_bls12381_g2_div_cost: 52 +group_ops_bls12381_gt_div_cost: 52 +group_ops_bls12381_g1_hash_to_base_cost: 52 +group_ops_bls12381_g2_hash_to_base_cost: 52 +group_ops_bls12381_g1_hash_to_cost_per_byte: 2 +group_ops_bls12381_g2_hash_to_cost_per_byte: 2 +group_ops_bls12381_g1_msm_base_cost: 52 +group_ops_bls12381_g2_msm_base_cost: 52 +group_ops_bls12381_g1_msm_base_cost_per_input: 52 +group_ops_bls12381_g2_msm_base_cost_per_input: 52 +group_ops_bls12381_msm_max_len: 32 +group_ops_bls12381_pairing_cost: 52 +hmac_hmac_sha3_256_cost_base: 52 +hmac_hmac_sha3_256_input_cost_per_byte: 2 +hmac_hmac_sha3_256_input_cost_per_block: 2 +check_zklogin_id_cost_base: 200 +check_zklogin_issuer_cost_base: 200 +bcs_per_byte_serialized_cost: 2 +bcs_legacy_min_output_size_cost: 1 +bcs_failure_cost: 52 +hash_sha2_256_base_cost: 52 +hash_sha2_256_per_byte_cost: 2 +hash_sha2_256_legacy_min_input_len_cost: 1 +hash_sha3_256_base_cost: 52 +hash_sha3_256_per_byte_cost: 2 +hash_sha3_256_legacy_min_input_len_cost: 1 +type_name_get_base_cost: 52 +type_name_get_per_byte_cost: 2 +string_check_utf8_base_cost: 52 +string_check_utf8_per_byte_cost: 2 +string_is_char_boundary_base_cost: 52 +string_sub_string_base_cost: 52 +string_sub_string_per_byte_cost: 2 +string_index_of_base_cost: 52 +string_index_of_per_byte_pattern_cost: 2 +string_index_of_per_byte_searched_cost: 2 +vector_empty_base_cost: 52 +vector_length_base_cost: 52 +vector_push_back_base_cost: 52 +vector_push_back_legacy_per_abstract_memory_unit_cost: 2 +vector_borrow_base_cost: 52 +vector_pop_back_base_cost: 52 +vector_destroy_empty_base_cost: 52 +vector_swap_base_cost: 52 +debug_print_base_cost: 52 +debug_print_stack_trace_base_cost: 52 +execution_version: 3 +consensus_bad_nodes_stake_threshold: 20 +max_jwk_votes_per_validator_per_epoch: 240 +max_age_of_jwk_in_epochs: 1 +random_beacon_reduction_allowed_delta: 800 +random_beacon_reduction_lower_bound: 800 +random_beacon_dkg_timeout_round: 3000 +random_beacon_min_round_interval_ms: 500 +random_beacon_dkg_version: 1 +consensus_max_transaction_size_bytes: 262144 +consensus_max_transactions_in_block_bytes: 524288 +consensus_max_num_transactions_in_block: 512 +max_accumulated_txn_cost_per_object_in_narwhal_commit: 100 +max_deferral_rounds_for_congestion_control: 10 +min_checkpoint_interval_ms: 200 +checkpoint_summary_version_specific_data: 1 +max_soft_bundle_size: 5 +bridge_should_try_to_finalize_committee: true +max_accumulated_txn_cost_per_object_in_mysticeti_commit: 10 diff --git a/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_58.snap b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_58.snap index 8ffc7e45a17e7..617af2983b009 100644 --- a/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_58.snap +++ b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_58.snap @@ -65,6 +65,7 @@ feature_flags: passkey_auth: true authority_capabilities_v2: true rethrow_serialization_type_layout_errors: true + consensus_distributed_vote_scoring_strategy: true max_tx_size_bytes: 131072 max_input_objects: 2048 max_size_written_objects: 5000000 diff --git a/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_59.snap b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_59.snap new file mode 100644 index 0000000000000..e8b511eccbdf9 --- /dev/null +++ b/crates/sui-protocol-config/src/snapshots/sui_protocol_config__test__version_59.snap @@ -0,0 +1,335 @@ +--- +source: crates/sui-protocol-config/src/lib.rs +expression: "ProtocolConfig::get_for_version(cur, *chain_id)" +--- +version: 59 +feature_flags: + package_upgrades: true + commit_root_state_digest: true + advance_epoch_start_time_in_safe_mode: true + loaded_child_objects_fixed: true + missing_type_is_compatibility_error: true + scoring_decision_with_validity_cutoff: true + consensus_order_end_of_epoch_last: true + disallow_adding_abilities_on_upgrade: true + disable_invariant_violation_check_in_swap_loc: true + advance_to_highest_supported_protocol_version: true + ban_entry_init: true + package_digest_hash_module: true + disallow_change_struct_type_params_on_upgrade: true + no_extraneous_module_bytes: true + narwhal_versioned_metadata: true + zklogin_auth: true + consensus_transaction_ordering: ByGasPrice + simplified_unwrap_then_delete: true + upgraded_multisig_supported: true + txn_base_cost_as_multiplier: true + shared_object_deletion: true + narwhal_new_leader_election_schedule: true + loaded_child_object_format: true + enable_jwk_consensus_updates: true + end_of_epoch_transaction_supported: true + simple_conservation_checks: true + loaded_child_object_format_type: true + receive_objects: true + random_beacon: true + bridge: true + enable_effects_v2: true + narwhal_certificate_v2: true + verify_legacy_zklogin_address: true + recompute_has_public_transfer_in_execution: true + accept_zklogin_in_multisig: true + include_consensus_digest_in_prologue: true + hardened_otw_check: true + allow_receiving_object_id: true + enable_poseidon: true + enable_coin_deny_list: true + enable_group_ops_native_functions: true + enable_group_ops_native_function_msm: true + reject_mutable_random_on_entry_functions: true + per_object_congestion_control_mode: TotalTxCount + consensus_choice: Mysticeti + consensus_network: Tonic + zklogin_max_epoch_upper_bound_delta: 30 + mysticeti_leader_scoring_and_schedule: true + reshare_at_same_initial_version: true + resolve_abort_locations_to_package_id: true + mysticeti_use_committed_subdag_digest: true + enable_vdf: true + record_consensus_determined_version_assignments_in_prologue: true + fresh_vm_on_framework_upgrade: true + prepend_prologue_tx_in_consensus_commit_in_checkpoints: true + mysticeti_num_leaders_per_round: 1 + soft_bundle: true + enable_coin_deny_list_v2: true + passkey_auth: true + authority_capabilities_v2: true + rethrow_serialization_type_layout_errors: true + consensus_distributed_vote_scoring_strategy: true + consensus_round_prober: true +max_tx_size_bytes: 131072 +max_input_objects: 2048 +max_size_written_objects: 5000000 +max_size_written_objects_system_tx: 50000000 +max_serialized_tx_effects_size_bytes: 524288 +max_serialized_tx_effects_size_bytes_system_tx: 8388608 +max_gas_payment_objects: 256 +max_modules_in_publish: 64 +max_package_dependencies: 32 +max_arguments: 512 +max_type_arguments: 16 +max_type_argument_depth: 16 +max_pure_argument_size: 16384 +max_programmable_tx_commands: 1024 +move_binary_format_version: 7 +min_move_binary_format_version: 6 +binary_module_handles: 100 +binary_struct_handles: 300 +binary_function_handles: 1500 +binary_function_instantiations: 750 +binary_signatures: 1000 +binary_constant_pool: 4000 +binary_identifiers: 10000 +binary_address_identifiers: 100 +binary_struct_defs: 200 +binary_struct_def_instantiations: 100 +binary_function_defs: 1000 +binary_field_handles: 500 +binary_field_instantiations: 250 +binary_friend_decls: 100 +max_move_object_size: 256000 +max_move_package_size: 102400 +max_publish_or_upgrade_per_ptb: 5 +max_tx_gas: 50000000000 +max_gas_price: 100000 +max_gas_computation_bucket: 5000000 +gas_rounding_step: 1000 +max_loop_depth: 5 +max_generic_instantiation_length: 32 +max_function_parameters: 128 +max_basic_blocks: 1024 +max_value_stack_size: 1024 +max_type_nodes: 256 +max_push_size: 10000 +max_struct_definitions: 200 +max_function_definitions: 1000 +max_fields_in_struct: 32 +max_dependency_depth: 100 +max_num_event_emit: 1024 +max_num_new_move_object_ids: 2048 +max_num_new_move_object_ids_system_tx: 32768 +max_num_deleted_move_object_ids: 2048 +max_num_deleted_move_object_ids_system_tx: 32768 +max_num_transferred_move_object_ids: 2048 +max_num_transferred_move_object_ids_system_tx: 32768 +max_event_emit_size: 256000 +max_event_emit_size_total: 65536000 +max_move_vector_len: 262144 +max_move_identifier_len: 128 +max_move_value_depth: 128 +max_back_edges_per_function: 10000 +max_back_edges_per_module: 10000 +max_verifier_meter_ticks_per_function: 16000000 +max_meter_ticks_per_module: 16000000 +max_meter_ticks_per_package: 16000000 +object_runtime_max_num_cached_objects: 1000 +object_runtime_max_num_cached_objects_system_tx: 16000 +object_runtime_max_num_store_entries: 1000 +object_runtime_max_num_store_entries_system_tx: 16000 +base_tx_cost_fixed: 1000 +package_publish_cost_fixed: 1000 +base_tx_cost_per_byte: 0 +package_publish_cost_per_byte: 80 +obj_access_cost_read_per_byte: 15 +obj_access_cost_mutate_per_byte: 40 +obj_access_cost_delete_per_byte: 40 +obj_access_cost_verify_per_byte: 200 +max_type_to_layout_nodes: 512 +gas_model_version: 8 +obj_data_cost_refundable: 100 +obj_metadata_cost_non_refundable: 50 +storage_rebate_rate: 9900 +storage_fund_reinvest_rate: 500 +reward_slashing_rate: 10000 +storage_gas_price: 76 +max_transactions_per_checkpoint: 10000 +max_checkpoint_size_bytes: 31457280 +buffer_stake_for_protocol_upgrade_bps: 5000 +address_from_bytes_cost_base: 52 +address_to_u256_cost_base: 52 +address_from_u256_cost_base: 52 +config_read_setting_impl_cost_base: 100 +config_read_setting_impl_cost_per_byte: 40 +dynamic_field_hash_type_and_key_cost_base: 100 +dynamic_field_hash_type_and_key_type_cost_per_byte: 2 +dynamic_field_hash_type_and_key_value_cost_per_byte: 2 +dynamic_field_hash_type_and_key_type_tag_cost_per_byte: 2 +dynamic_field_add_child_object_cost_base: 100 +dynamic_field_add_child_object_type_cost_per_byte: 10 +dynamic_field_add_child_object_value_cost_per_byte: 10 +dynamic_field_add_child_object_struct_tag_cost_per_byte: 10 +dynamic_field_borrow_child_object_cost_base: 100 +dynamic_field_borrow_child_object_child_ref_cost_per_byte: 10 +dynamic_field_borrow_child_object_type_cost_per_byte: 10 +dynamic_field_remove_child_object_cost_base: 100 +dynamic_field_remove_child_object_child_cost_per_byte: 2 +dynamic_field_remove_child_object_type_cost_per_byte: 2 +dynamic_field_has_child_object_cost_base: 100 +dynamic_field_has_child_object_with_ty_cost_base: 100 +dynamic_field_has_child_object_with_ty_type_cost_per_byte: 2 +dynamic_field_has_child_object_with_ty_type_tag_cost_per_byte: 2 +event_emit_cost_base: 52 +event_emit_value_size_derivation_cost_per_byte: 2 +event_emit_tag_size_derivation_cost_per_byte: 5 +event_emit_output_cost_per_byte: 10 +object_borrow_uid_cost_base: 52 +object_delete_impl_cost_base: 52 +object_record_new_uid_cost_base: 52 +transfer_transfer_internal_cost_base: 52 +transfer_freeze_object_cost_base: 52 +transfer_share_object_cost_base: 52 +transfer_receive_object_cost_base: 52 +tx_context_derive_id_cost_base: 52 +types_is_one_time_witness_cost_base: 52 +types_is_one_time_witness_type_tag_cost_per_byte: 2 +types_is_one_time_witness_type_cost_per_byte: 2 +validator_validate_metadata_cost_base: 52 +validator_validate_metadata_data_cost_per_byte: 2 +crypto_invalid_arguments_cost: 100 +bls12381_bls12381_min_sig_verify_cost_base: 52 +bls12381_bls12381_min_sig_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_sig_verify_msg_cost_per_block: 2 +bls12381_bls12381_min_pk_verify_cost_base: 52 +bls12381_bls12381_min_pk_verify_msg_cost_per_byte: 2 +bls12381_bls12381_min_pk_verify_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_keccak256_cost_base: 52 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_k1_ecrecover_sha256_cost_base: 52 +ecdsa_k1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_k1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_k1_decompress_pubkey_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_cost_base: 52 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_k1_secp256k1_verify_sha256_cost_base: 52 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_k1_secp256k1_verify_sha256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_keccak256_cost_base: 52 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_keccak256_msg_cost_per_block: 2 +ecdsa_r1_ecrecover_sha256_cost_base: 52 +ecdsa_r1_ecrecover_sha256_msg_cost_per_byte: 2 +ecdsa_r1_ecrecover_sha256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_keccak256_cost_base: 52 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_keccak256_msg_cost_per_block: 2 +ecdsa_r1_secp256r1_verify_sha256_cost_base: 52 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_byte: 2 +ecdsa_r1_secp256r1_verify_sha256_msg_cost_per_block: 2 +ecvrf_ecvrf_verify_cost_base: 52 +ecvrf_ecvrf_verify_alpha_string_cost_per_byte: 2 +ecvrf_ecvrf_verify_alpha_string_cost_per_block: 2 +ed25519_ed25519_verify_cost_base: 52 +ed25519_ed25519_verify_msg_cost_per_byte: 2 +ed25519_ed25519_verify_msg_cost_per_block: 2 +groth16_prepare_verifying_key_bls12381_cost_base: 52 +groth16_prepare_verifying_key_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_base: 52 +groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_bn254_cost_base: 52 +groth16_verify_groth16_proof_internal_bn254_cost_per_public_input: 2 +groth16_verify_groth16_proof_internal_public_input_cost_per_byte: 2 +hash_blake2b256_cost_base: 52 +hash_blake2b256_data_cost_per_byte: 2 +hash_blake2b256_data_cost_per_block: 2 +hash_keccak256_cost_base: 52 +hash_keccak256_data_cost_per_byte: 2 +hash_keccak256_data_cost_per_block: 2 +poseidon_bn254_cost_base: 260 +poseidon_bn254_cost_per_block: 10 +group_ops_bls12381_decode_scalar_cost: 52 +group_ops_bls12381_decode_g1_cost: 52 +group_ops_bls12381_decode_g2_cost: 52 +group_ops_bls12381_decode_gt_cost: 52 +group_ops_bls12381_scalar_add_cost: 52 +group_ops_bls12381_g1_add_cost: 52 +group_ops_bls12381_g2_add_cost: 52 +group_ops_bls12381_gt_add_cost: 52 +group_ops_bls12381_scalar_sub_cost: 52 +group_ops_bls12381_g1_sub_cost: 52 +group_ops_bls12381_g2_sub_cost: 52 +group_ops_bls12381_gt_sub_cost: 52 +group_ops_bls12381_scalar_mul_cost: 52 +group_ops_bls12381_g1_mul_cost: 52 +group_ops_bls12381_g2_mul_cost: 52 +group_ops_bls12381_gt_mul_cost: 52 +group_ops_bls12381_scalar_div_cost: 52 +group_ops_bls12381_g1_div_cost: 52 +group_ops_bls12381_g2_div_cost: 52 +group_ops_bls12381_gt_div_cost: 52 +group_ops_bls12381_g1_hash_to_base_cost: 52 +group_ops_bls12381_g2_hash_to_base_cost: 52 +group_ops_bls12381_g1_hash_to_cost_per_byte: 2 +group_ops_bls12381_g2_hash_to_cost_per_byte: 2 +group_ops_bls12381_g1_msm_base_cost: 52 +group_ops_bls12381_g2_msm_base_cost: 52 +group_ops_bls12381_g1_msm_base_cost_per_input: 52 +group_ops_bls12381_g2_msm_base_cost_per_input: 52 +group_ops_bls12381_msm_max_len: 32 +group_ops_bls12381_pairing_cost: 52 +hmac_hmac_sha3_256_cost_base: 52 +hmac_hmac_sha3_256_input_cost_per_byte: 2 +hmac_hmac_sha3_256_input_cost_per_block: 2 +check_zklogin_id_cost_base: 200 +check_zklogin_issuer_cost_base: 200 +vdf_verify_vdf_cost: 1500 +vdf_hash_to_input_cost: 100 +bcs_per_byte_serialized_cost: 2 +bcs_legacy_min_output_size_cost: 1 +bcs_failure_cost: 52 +hash_sha2_256_base_cost: 52 +hash_sha2_256_per_byte_cost: 2 +hash_sha2_256_legacy_min_input_len_cost: 1 +hash_sha3_256_base_cost: 52 +hash_sha3_256_per_byte_cost: 2 +hash_sha3_256_legacy_min_input_len_cost: 1 +type_name_get_base_cost: 52 +type_name_get_per_byte_cost: 2 +string_check_utf8_base_cost: 52 +string_check_utf8_per_byte_cost: 2 +string_is_char_boundary_base_cost: 52 +string_sub_string_base_cost: 52 +string_sub_string_per_byte_cost: 2 +string_index_of_base_cost: 52 +string_index_of_per_byte_pattern_cost: 2 +string_index_of_per_byte_searched_cost: 2 +vector_empty_base_cost: 52 +vector_length_base_cost: 52 +vector_push_back_base_cost: 52 +vector_push_back_legacy_per_abstract_memory_unit_cost: 2 +vector_borrow_base_cost: 52 +vector_pop_back_base_cost: 52 +vector_destroy_empty_base_cost: 52 +vector_swap_base_cost: 52 +debug_print_base_cost: 52 +debug_print_stack_trace_base_cost: 52 +execution_version: 3 +consensus_bad_nodes_stake_threshold: 20 +max_jwk_votes_per_validator_per_epoch: 240 +max_age_of_jwk_in_epochs: 1 +random_beacon_reduction_allowed_delta: 800 +random_beacon_reduction_lower_bound: 800 +random_beacon_dkg_timeout_round: 3000 +random_beacon_min_round_interval_ms: 500 +random_beacon_dkg_version: 1 +consensus_max_transaction_size_bytes: 262144 +consensus_max_transactions_in_block_bytes: 524288 +consensus_max_num_transactions_in_block: 512 +max_accumulated_txn_cost_per_object_in_narwhal_commit: 100 +max_deferral_rounds_for_congestion_control: 10 +min_checkpoint_interval_ms: 200 +checkpoint_summary_version_specific_data: 1 +max_soft_bundle_size: 5 +bridge_should_try_to_finalize_committee: true +max_accumulated_txn_cost_per_object_in_mysticeti_commit: 10 diff --git a/crates/sui-proxy/Cargo.toml b/crates/sui-proxy/Cargo.toml index 2a05739c8491a..fde1f38f2670d 100644 --- a/crates/sui-proxy/Cargo.toml +++ b/crates/sui-proxy/Cargo.toml @@ -14,6 +14,8 @@ anyhow.workspace = true bytes.workspace = true clap.workspace = true protobuf.workspace = true +futures.workspace = true +url.workspace = true tokio = { workspace = true, features = ["full"] } tracing.workspace = true const-str.workspace = true diff --git a/crates/sui-proxy/src/admin.rs b/crates/sui-proxy/src/admin.rs index d6f149d043a51..d9e12f1409ee3 100644 --- a/crates/sui-proxy/src/admin.rs +++ b/crates/sui-proxy/src/admin.rs @@ -3,11 +3,10 @@ use crate::config::{DynamicPeerValidationConfig, RemoteWriteConfig, StaticPeerValidationConfig}; use crate::handlers::publish_metrics; use crate::histogram_relay::HistogramRelay; -use crate::ip::{is_private, to_multiaddr}; use crate::middleware::{ expect_content_length, expect_mysten_proxy_header, expect_valid_public_key, }; -use crate::peers::{SuiNodeProvider, SuiPeer}; +use crate::peers::{AllowedPeer, SuiNodeProvider}; use crate::var; use anyhow::Error; use anyhow::Result; @@ -16,7 +15,7 @@ use fastcrypto::ed25519::{Ed25519KeyPair, Ed25519PublicKey}; use fastcrypto::traits::{KeyPair, ToFromBytes}; use std::fs; use std::io::BufReader; -use std::net::{IpAddr, SocketAddr}; +use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use sui_tls::SUI_VALIDATOR_SERVER_NAME; @@ -29,7 +28,7 @@ use tower_http::{ trace::{DefaultOnResponse, TraceLayer}, LatencyUnit, }; -use tracing::{error, info, Level}; +use tracing::{info, Level}; /// Configure our graceful shutdown scenarios pub async fn shutdown_signal(h: axum_server::Handle) { @@ -200,30 +199,27 @@ fn load_private_key(filename: &str) -> rustls::pki_types::PrivateKeyDer<'static> /// load the static keys we'll use to allow external non-validator nodes to push metrics fn load_static_peers( static_peers: Option, -) -> Result, Error> { +) -> Result, Error> { let Some(static_peers) = static_peers else { return Ok(vec![]); }; - let static_keys = static_peers.pub_keys.into_iter().filter_map(|spk|{ - let p2p_address: IpAddr = spk.p2p_address.parse().unwrap(); - if is_private(p2p_address) { - error!("{} appears to be a private address. We only allow 169.254.0.0/16 addresses to be private; ignoring this entry", p2p_address); - dbg!("skipping {}", spk); - return None; - } - Some(spk) - }).map(|spk|{ - let peer_id = hex::decode(spk.peer_id).unwrap(); - let public_key = Ed25519PublicKey::from_bytes(peer_id.as_ref()).unwrap(); - let p2p_address: IpAddr = spk.p2p_address.parse().unwrap(); - let s = SuiPeer{ - name:spk.name.clone(), - p2p_address: to_multiaddr(p2p_address), - public_key, - }; - info!("loaded static peer: {} public key: {} p2p address: {}", &s.name, &s.public_key, &s.p2p_address); - s - }).collect(); + let static_keys = static_peers + .pub_keys + .into_iter() + .map(|spk| { + let peer_id = hex::decode(spk.peer_id).unwrap(); + let public_key = Ed25519PublicKey::from_bytes(peer_id.as_ref()).unwrap(); + let s = AllowedPeer { + name: spk.name.clone(), + public_key, + }; + info!( + "loaded static peer: {} public key: {}", + &s.name, &s.public_key, + ); + s + }) + .collect(); Ok(static_keys) } diff --git a/crates/sui-proxy/src/config.rs b/crates/sui-proxy/src/config.rs index f3aad0e955f3e..05efd8160b30b 100644 --- a/crates/sui-proxy/src/config.rs +++ b/crates/sui-proxy/src/config.rs @@ -38,7 +38,7 @@ pub struct RemoteWriteConfig { pub pool_max_idle_per_host: usize, } -/// DynamicPeerValidationConfig controls what sui-node binaries that are functioning as a validator that we'll speak with. +/// DynamicPeerValidationConfig controls what sui-node & sui-bridge binaries that are functioning as a validator that we'll speak with. /// Peer in this case is peers within the consensus committee, for each epoch. This membership is determined dynamically /// for each epoch via json-rpc calls to a full node. #[serde_as] @@ -79,8 +79,6 @@ pub struct StaticPeerValidationConfig { pub struct StaticPubKey { /// friendly name we will see in metrics pub name: String, - /// friendly ip address we may see in metrics - pub p2p_address: String, /// the peer_id from a node config file (Ed25519 PublicKey) pub peer_id: String, } diff --git a/crates/sui-proxy/src/handlers.rs b/crates/sui-proxy/src/handlers.rs index 03587e4225cf3..0e735a474e7e7 100644 --- a/crates/sui-proxy/src/handlers.rs +++ b/crates/sui-proxy/src/handlers.rs @@ -4,7 +4,7 @@ use crate::admin::{Labels, ReqwestClient}; use crate::consumer::{convert_to_remote_write, populate_labels, NodeMetric}; use crate::histogram_relay::HistogramRelay; use crate::middleware::LenDelimProtobuf; -use crate::peers::SuiPeer; +use crate::peers::AllowedPeer; use axum::{ extract::{ConnectInfo, Extension}, http::StatusCode, @@ -45,9 +45,7 @@ pub async fn publish_metrics( Extension(labels): Extension, Extension(client): Extension, ConnectInfo(addr): ConnectInfo, - Extension(SuiPeer { - name, public_key, .. - }): Extension, + Extension(AllowedPeer { name, public_key }): Extension, Extension(relay): Extension, LenDelimProtobuf(data): LenDelimProtobuf, ) -> (StatusCode, &'static str) { diff --git a/crates/sui-proxy/src/ip.rs b/crates/sui-proxy/src/ip.rs deleted file mode 100644 index cdfa65bcb64be..0000000000000 --- a/crates/sui-proxy/src/ip.rs +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -use ipnetwork::IpNetwork; -use multiaddr::Multiaddr; -use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; - -pub fn to_multiaddr(addr: IpAddr) -> Multiaddr { - match addr { - IpAddr::V4(a) => Multiaddr::from(a), - IpAddr::V6(a) => Multiaddr::from(a), - } -} - -/// is_private makes a decent guess at determining of an addr is publicly routable. -pub fn is_private(addr: IpAddr) -> bool { - match addr { - IpAddr::V4(a) => is_private_v4(a), - IpAddr::V6(a) => is_private_v6(a), - } -} - -/// is_private_v4 will say just that, is it private? we ignore 169.254.0.0/16 in this consideration -fn is_private_v4(addr: Ipv4Addr) -> bool { - // special case we will allow - let allowed_private: IpNetwork = "169.254.0.0/16".parse().unwrap(); - if allowed_private.contains(IpAddr::V4(addr)) { - // intentional - return false; - } - addr.is_private() -} - -/// is_private_v6 and the funcs below are based on an unstable const fn in core. yoinked it. -/// taken from https://github.com/rust-lang/rust/blob/340bb19fea20fd5f9357bbfac542fad84fc7ea2b/library/core/src/net/ip_addr.rs#L691-L783 -#[allow(clippy::manual_range_contains)] -fn is_private_v6(addr: Ipv6Addr) -> bool { - addr.is_unspecified() - || addr.is_loopback() - // IPv4-mapped Address (`::ffff:0:0/96`) - || matches!(addr.segments(), [0, 0, 0, 0, 0, 0xffff, _, _]) - // IPv4-IPv6 Translat. (`64:ff9b:1::/48`) - || matches!(addr.segments(), [0x64, 0xff9b, 1, _, _, _, _, _]) - // Discard-Only Address Block (`100::/64`) - || matches!(addr.segments(), [0x100, 0, 0, 0, _, _, _, _]) - // IETF Protocol Assignments (`2001::/23`) - || (matches!(addr.segments(), [0x2001, b, _, _, _, _, _, _] if b < 0x200) - && !( - // Port Control Protocol Anycast (`2001:1::1`) - u128::from_be_bytes(addr.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0001 - // Traversal Using Relays around NAT Anycast (`2001:1::2`) - || u128::from_be_bytes(addr.octets()) == 0x2001_0001_0000_0000_0000_0000_0000_0002 - // AMT (`2001:3::/32`) - || matches!(addr.segments(), [0x2001, 3, _, _, _, _, _, _]) - // AS112-v6 (`2001:4:112::/48`) - || matches!(addr.segments(), [0x2001, 4, 0x112, _, _, _, _, _]) - // ORCHIDv2 (`2001:20::/28`) - // Drone Remote ID Protocol Entity Tags (DETs) Prefix (`2001:30::/28`)` - || matches!(addr.segments(), [0x2001, b, _, _, _, _, _, _] if b >= 0x20 && b <= 0x3F) - )) - // 6to4 (`2002::/16`) – it's not explicitly documented as globally reachable, - // IANA says N/A. - || matches!(addr.segments(), [0x2002, _, _, _, _, _, _, _]) - || is_documentation(&addr) - || is_unique_local(&addr) - || is_unicast_link_local(&addr) -} - -fn is_documentation(addr: &Ipv6Addr) -> bool { - (addr.segments()[0] == 0x2001) && (addr.segments()[1] == 0xdb8) -} -fn is_unique_local(addr: &Ipv6Addr) -> bool { - (addr.segments()[0] & 0xfe00) == 0xfc00 -} -fn is_unicast_link_local(addr: &Ipv6Addr) -> bool { - (addr.segments()[0] & 0xffc0) == 0xfe80 -} diff --git a/crates/sui-proxy/src/lib.rs b/crates/sui-proxy/src/lib.rs index 149eba1a31156..273c06c1b6b63 100644 --- a/crates/sui-proxy/src/lib.rs +++ b/crates/sui-proxy/src/lib.rs @@ -5,7 +5,6 @@ pub mod config; pub mod consumer; pub mod handlers; pub mod histogram_relay; -mod ip; pub mod metrics; pub mod middleware; pub mod peers; @@ -43,7 +42,6 @@ mod tests { use axum::http::StatusCode; use axum::routing::post; use axum::Router; - use multiaddr::Multiaddr; use prometheus::Encoder; use prometheus::PROTOBUF_FORMAT; use protobuf::RepeatedField; @@ -143,11 +141,10 @@ mod tests { client.get(&server_url).send().await.unwrap_err(); // Insert the client's public key into the allowlist and verify the request is successful - allower.get_mut().write().unwrap().insert( + allower.get_sui_mut().write().unwrap().insert( client_pub_key.to_owned(), - peers::SuiPeer { + peers::AllowedPeer { name: "some-node".into(), - p2p_address: Multiaddr::empty(), public_key: client_pub_key.to_owned(), }, ); diff --git a/crates/sui-proxy/src/main.rs b/crates/sui-proxy/src/main.rs index 99be24e78b158..8c7f203bb916e 100644 --- a/crates/sui-proxy/src/main.rs +++ b/crates/sui-proxy/src/main.rs @@ -58,7 +58,7 @@ async fn main() -> Result<()> { let listener = std::net::TcpListener::bind(config.listen_address).unwrap(); let (tls_config, allower) = - // we'll only use the dynamic peers in some cases - it makes little sense to run with the statics + // we'll only use the dynamic peers in some cases - it makes little sense to run with the static's // since this first mode allows all. if config.dynamic_peers.certificate_file.is_none() || config.dynamic_peers.private_key.is_none() { ( diff --git a/crates/sui-proxy/src/peers.rs b/crates/sui-proxy/src/peers.rs index 250ba4b3bcc50..e00a090947a87 100644 --- a/crates/sui-proxy/src/peers.rs +++ b/crates/sui-proxy/src/peers.rs @@ -2,20 +2,24 @@ // SPDX-License-Identifier: Apache-2.0 use anyhow::{bail, Context, Result}; use fastcrypto::ed25519::Ed25519PublicKey; +use fastcrypto::encoding::Base64; +use fastcrypto::encoding::Encoding; use fastcrypto::traits::ToFromBytes; -use multiaddr::Multiaddr; +use futures::stream::{self, StreamExt}; use once_cell::sync::Lazy; use prometheus::{register_counter_vec, register_histogram_vec}; use prometheus::{CounterVec, HistogramVec}; use serde::Deserialize; -use std::time::Duration; use std::{ collections::HashMap, sync::{Arc, RwLock}, + time::Duration, }; use sui_tls::Allower; +use sui_types::bridge::BridgeSummary; use sui_types::sui_system_state::sui_system_state_summary::SuiSystemStateSummary; use tracing::{debug, error, info}; +use url::Url; static JSON_RPC_STATE: Lazy = Lazy::new(|| { register_counter_vec!( @@ -38,14 +42,14 @@ static JSON_RPC_DURATION: Lazy = Lazy::new(|| { .unwrap() }); -/// SuiNods a mapping of public key to SuiPeer data -pub type SuiPeers = Arc>>; +/// AllowedPeers is a mapping of public key to AllowedPeer data +pub type AllowedPeers = Arc>>; + +type MetricsPubKeys = Arc>>; -/// A SuiPeer is the collated sui chain data we have about validators #[derive(Hash, PartialEq, Eq, Debug, Clone)] -pub struct SuiPeer { +pub struct AllowedPeer { pub name: String, - pub p2p_address: Multiaddr, pub public_key: Ed25519PublicKey, } @@ -55,8 +59,9 @@ pub struct SuiPeer { /// Handlers also use this data in an Extractor extension to check incoming clients on the http api against known keys. #[derive(Debug, Clone)] pub struct SuiNodeProvider { - nodes: SuiPeers, - static_nodes: SuiPeers, + sui_nodes: AllowedPeers, + bridge_nodes: AllowedPeers, + static_nodes: AllowedPeers, rpc_url: String, rpc_poll_interval: Duration, } @@ -64,21 +69,28 @@ pub struct SuiNodeProvider { impl Allower for SuiNodeProvider { fn allowed(&self, key: &Ed25519PublicKey) -> bool { self.static_nodes.read().unwrap().contains_key(key) - || self.nodes.read().unwrap().contains_key(key) + || self.sui_nodes.read().unwrap().contains_key(key) + || self.bridge_nodes.read().unwrap().contains_key(key) } } impl SuiNodeProvider { - pub fn new(rpc_url: String, rpc_poll_interval: Duration, static_peers: Vec) -> Self { + pub fn new( + rpc_url: String, + rpc_poll_interval: Duration, + static_peers: Vec, + ) -> Self { // build our hashmap with the static pub keys. we only do this one time at binary startup. - let static_nodes: HashMap = static_peers + let static_nodes: HashMap = static_peers .into_iter() .map(|v| (v.public_key.clone(), v)) .collect(); let static_nodes = Arc::new(RwLock::new(static_nodes)); - let nodes = Arc::new(RwLock::new(HashMap::new())); + let sui_nodes = Arc::new(RwLock::new(HashMap::new())); + let bridge_nodes = Arc::new(RwLock::new(HashMap::new())); Self { - nodes, + sui_nodes, + bridge_nodes, static_nodes, rpc_url, rpc_poll_interval, @@ -86,34 +98,35 @@ impl SuiNodeProvider { } /// get is used to retrieve peer info in our handlers - pub fn get(&self, key: &Ed25519PublicKey) -> Option { + pub fn get(&self, key: &Ed25519PublicKey) -> Option { debug!("look for {:?}", key); // check static nodes first if let Some(v) = self.static_nodes.read().unwrap().get(key) { - return Some(SuiPeer { + return Some(AllowedPeer { + name: v.name.to_owned(), + public_key: v.public_key.to_owned(), + }); + } + // check sui validators + if let Some(v) = self.sui_nodes.read().unwrap().get(key) { + return Some(AllowedPeer { name: v.name.to_owned(), - p2p_address: v.p2p_address.to_owned(), public_key: v.public_key.to_owned(), }); } - // check dynamic nodes - if let Some(v) = self.nodes.read().unwrap().get(key) { - return Some(SuiPeer { + // check bridge validators + if let Some(v) = self.bridge_nodes.read().unwrap().get(key) { + return Some(AllowedPeer { name: v.name.to_owned(), - p2p_address: v.p2p_address.to_owned(), public_key: v.public_key.to_owned(), }); } None } - /// Get a reference to the inner service - pub fn get_ref(&self) -> &SuiPeers { - &self.nodes - } - /// Get a mutable reference to the inner service - pub fn get_mut(&mut self) -> &mut SuiPeers { - &mut self.nodes + /// Get a mutable reference to the allowed sui validator map + pub fn get_sui_mut(&mut self) -> &mut AllowedPeers { + &mut self.sui_nodes } /// get_validators will retrieve known validators @@ -180,13 +193,108 @@ impl SuiNodeProvider { Ok(body.result) } + /// get_bridge_validators will retrieve known bridge validators + async fn get_bridge_validators(url: String) -> Result { + let rpc_method = "suix_getLatestBridge"; + let _timer = JSON_RPC_DURATION + .with_label_values(&[rpc_method]) + .start_timer(); + let client = reqwest::Client::builder().build().unwrap(); + let request = serde_json::json!({ + "jsonrpc": "2.0", + "method":rpc_method, + "id":1, + }); + let response = client + .post(url) + .header(reqwest::header::CONTENT_TYPE, "application/json") + .body(request.to_string()) + .send() + .await + .with_context(|| { + JSON_RPC_STATE + .with_label_values(&[rpc_method, "failed_get"]) + .inc(); + "unable to perform json rpc" + })?; + + let raw = response.bytes().await.with_context(|| { + JSON_RPC_STATE + .with_label_values(&[rpc_method, "failed_body_extract"]) + .inc(); + "unable to extract body bytes from json rpc" + })?; + + #[derive(Debug, Deserialize)] + struct ResponseBody { + result: BridgeSummary, + } + let summary: BridgeSummary = match serde_json::from_slice::(&raw) { + Ok(b) => b.result, + Err(error) => { + JSON_RPC_STATE + .with_label_values(&[rpc_method, "failed_json_decode"]) + .inc(); + bail!( + "unable to decode json: {error} response from json rpc: {:?}", + raw + ) + } + }; + JSON_RPC_STATE + .with_label_values(&[rpc_method, "success"]) + .inc(); + Ok(summary) + } + + async fn update_sui_validator_set(&self) { + match Self::get_validators(self.rpc_url.to_owned()).await { + Ok(summary) => { + let validators = extract(summary); + let mut allow = self.sui_nodes.write().unwrap(); + allow.clear(); + allow.extend(validators); + info!( + "{} sui validators managed to make it on the allow list", + allow.len() + ); + } + Err(error) => { + JSON_RPC_STATE + .with_label_values(&["update_peer_count", "failed"]) + .inc(); + error!("unable to refresh peer list: {error}"); + } + }; + } + + async fn update_bridge_validator_set(&self, metrics_keys: MetricsPubKeys) { + match Self::get_bridge_validators(self.rpc_url.to_owned()).await { + Ok(summary) => { + let validators = extract_bridge(summary, metrics_keys).await; + let mut allow = self.bridge_nodes.write().unwrap(); + allow.clear(); + allow.extend(validators); + info!( + "{} bridge validators managed to make it on the allow list", + allow.len() + ); + } + Err(error) => { + JSON_RPC_STATE + .with_label_values(&["update_bridge_peer_count", "failed"]) + .inc(); + error!("unable to refresh sui bridge peer list: {error}"); + } + }; + } /// poll_peer_list will act as a refresh interval for our cache pub fn poll_peer_list(&self) { info!("Started polling for peers using rpc: {}", self.rpc_url); let rpc_poll_interval = self.rpc_poll_interval; - let rpc_url = self.rpc_url.to_owned(); - let nodes = self.nodes.clone(); + let cloned_self = self.clone(); + let bridge_metrics_keys: MetricsPubKeys = Arc::new(RwLock::new(HashMap::new())); tokio::spawn(async move { let mut interval = tokio::time::interval(rpc_poll_interval); interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); @@ -194,25 +302,10 @@ impl SuiNodeProvider { loop { interval.tick().await; - match Self::get_validators(rpc_url.to_owned()).await { - Ok(summary) => { - let peers = extract(summary); - // maintain the tls acceptor set - let mut allow = nodes.write().unwrap(); - allow.clear(); - allow.extend(peers); - info!("{} peers managed to make it on the allow list", allow.len()); - JSON_RPC_STATE - .with_label_values(&["update_peer_count", "success"]) - .inc_by(allow.len() as f64); - } - Err(error) => { - JSON_RPC_STATE - .with_label_values(&["update_peer_count", "failed"]) - .inc(); - error!("unable to refresh peer list: {error}") - } - } + cloned_self.update_sui_validator_set().await; + cloned_self + .update_bridge_validator_set(bridge_metrics_keys.clone()) + .await; } }); } @@ -221,26 +314,20 @@ impl SuiNodeProvider { /// extract will get the network pubkey bytes from a SuiValidatorSummary type. This type comes from a /// full node rpc result. See get_validators for details. The key here, if extracted successfully, will /// ultimately be stored in the allow list and let us communicate with those actual peers via tls. -fn extract(summary: SuiSystemStateSummary) -> impl Iterator { +fn extract( + summary: SuiSystemStateSummary, +) -> impl Iterator { summary.active_validators.into_iter().filter_map(|vm| { match Ed25519PublicKey::from_bytes(&vm.network_pubkey_bytes) { Ok(public_key) => { - let Ok(p2p_address) = Multiaddr::try_from(vm.p2p_address) else { - error!( - "refusing to add peer to allow list; unable to decode multiaddr for {}", - vm.name - ); - return None; // scoped to filter_map - }; debug!( - "adding public key {:?} for address {:?}", - public_key, p2p_address + "adding public key {:?} for sui validator {:?}", + public_key, vm.name ); Some(( public_key.clone(), - SuiPeer { + AllowedPeer { name: vm.name, - p2p_address, public_key, }, )) // scoped to filter_map @@ -255,12 +342,158 @@ fn extract(summary: SuiSystemStateSummary) -> impl Iterator Vec<(Ed25519PublicKey, AllowedPeer)> { + { + // Clean up the cache: retain only the metrics keys of the up-to-date bridge validator set + let mut metrics_keys_write = metrics_keys.write().unwrap(); + metrics_keys_write.retain(|url, _| { + summary.committee.members.iter().any(|(_, cm)| { + String::from_utf8(cm.http_rest_url.clone()).ok().as_ref() == Some(url) + }) + }); + } + + let client = reqwest::Client::builder().build().unwrap(); + let committee_members = summary.committee.members.clone(); + let results: Vec<_> = stream::iter(committee_members) + .filter_map(|(_, cm)| { + let client = client.clone(); + let metrics_keys = metrics_keys.clone(); + async move { + debug!( + address =% cm.sui_address, + "Extracting metrics public key for bridge node", + ); + + // Convert the Vec to a String and handle errors properly + let url_str = match String::from_utf8(cm.http_rest_url) { + Ok(url) => url, + Err(_) => { + error!( + address =% cm.sui_address, + "Invalid UTF-8 sequence in http_rest_url for bridge node ", + ); + return None; + } + }; + // Parse the URL + let mut bridge_url = match Url::parse(&url_str) { + Ok(url) => url, + Err(_) => { + error!(url_str, "Unable to parse http_rest_url"); + return None; + } + }; + bridge_url.set_path("/metrics_pub_key"); + + // Use the host portion of the http_rest_url as the "name" + let bridge_host = match bridge_url.host_str() { + Some(host) => host, + None => { + error!(url_str, "Hostname is missing from http_rest_url"); + return None; + } + }; + let bridge_name = String::from(bridge_host); + let bridge_request_url = bridge_url.as_str(); + + let metrics_pub_key = match client.get(bridge_request_url).send().await { + Ok(response) => { + let raw = response.bytes().await.ok()?; + let metrics_pub_key: String = match serde_json::from_slice(&raw) { + Ok(key) => key, + Err(error) => { + error!(?error, url_str, "Failed to deserialize response"); + return fallback_to_cached_key( + &metrics_keys, + &url_str, + &bridge_name, + ); + } + }; + let metrics_bytes = match Base64::decode(&metrics_pub_key) { + Ok(pubkey_bytes) => pubkey_bytes, + Err(error) => { + error!( + ?error, + bridge_name, "unable to decode public key for bridge node", + ); + return None; + } + }; + match Ed25519PublicKey::from_bytes(&metrics_bytes) { + Ok(pubkey) => { + // Successfully fetched the key, update the cache + let mut metrics_keys_write = metrics_keys.write().unwrap(); + metrics_keys_write.insert(url_str.clone(), pubkey.clone()); + pubkey + } + Err(error) => { + error!( + ?error, + bridge_request_url, + "unable to decode public key for bridge node", + ); + return None; + } + } + } + Err(_) => { + return fallback_to_cached_key(&metrics_keys, &url_str, &bridge_name); + } + }; + Some(( + metrics_pub_key.clone(), + AllowedPeer { + public_key: metrics_pub_key, + name: bridge_name, + }, + )) + } + }) + .collect() + .await; + + results +} + +fn fallback_to_cached_key( + metrics_keys: &MetricsPubKeys, + url_str: &str, + bridge_name: &str, +) -> Option<(Ed25519PublicKey, AllowedPeer)> { + let metrics_keys_read = metrics_keys.read().unwrap(); + if let Some(cached_key) = metrics_keys_read.get(url_str) { + debug!( + url_str, + "Using cached metrics public key after request failure" + ); + Some(( + cached_key.clone(), + AllowedPeer { + public_key: cached_key.clone(), + name: bridge_name.to_string(), + }, + )) + } else { + error!( + url_str, + "Failed to fetch public key and no cached key available" + ); + None + } +} #[cfg(test)] mod tests { use super::*; use crate::admin::{generate_self_cert, CertKeyPair}; use serde::Serialize; + use sui_types::base_types::SuiAddress; + use sui_types::bridge::{BridgeCommitteeSummary, BridgeSummary, MoveTypeCommitteeMember}; use sui_types::sui_system_state::sui_system_state_summary::{ SuiSystemStateSummary, SuiValidatorSummary, }; @@ -271,15 +504,11 @@ mod tests { #[test] fn depend_on_sui_sui_system_state_summary() { let CertKeyPair(_, client_pub_key) = generate_self_cert("sui".into()); - let p2p_address: Multiaddr = "/ip4/127.0.0.1/tcp/10000" - .parse() - .expect("expected a multiaddr value"); // all fields here just satisfy the field types, with exception to active_validators, we use // some of those. let depends_on = SuiSystemStateSummary { active_validators: vec![SuiValidatorSummary { network_pubkey_bytes: Vec::from(client_pub_key.as_bytes()), - p2p_address: format!("{p2p_address}"), primary_address: "empty".into(), worker_address: "empty".into(), ..Default::default() @@ -301,4 +530,84 @@ mod tests { let peers = extract(deserialized.result); assert_eq!(peers.count(), 1, "peers should have been a length of 1"); } + + #[tokio::test] + async fn test_extract_bridge_invalid_bridge_url() { + let summary = BridgeSummary { + committee: BridgeCommitteeSummary { + members: vec![( + vec![], + MoveTypeCommitteeMember { + sui_address: SuiAddress::ZERO, + http_rest_url: "invalid_bridge_url".as_bytes().to_vec(), + ..Default::default() + }, + )], + ..Default::default() + }, + ..Default::default() + }; + + let metrics_keys = Arc::new(RwLock::new(HashMap::new())); + { + let mut cache = metrics_keys.write().unwrap(); + cache.insert( + "invalid_bridge_url".to_string(), + Ed25519PublicKey::from_bytes(&[1u8; 32]).unwrap(), + ); + } + let result = extract_bridge(summary, metrics_keys.clone()).await; + + assert_eq!( + result.len(), + 0, + "Should not fall back on cache if invalid bridge url is set" + ); + } + + #[tokio::test] + async fn test_extract_bridge_interrupted_response() { + let summary = BridgeSummary { + committee: BridgeCommitteeSummary { + members: vec![( + vec![], + MoveTypeCommitteeMember { + sui_address: SuiAddress::ZERO, + http_rest_url: "https://unresponsive_bridge_url".as_bytes().to_vec(), + ..Default::default() + }, + )], + ..Default::default() + }, + ..Default::default() + }; + + let metrics_keys = Arc::new(RwLock::new(HashMap::new())); + { + let mut cache = metrics_keys.write().unwrap(); + cache.insert( + "https://unresponsive_bridge_url".to_string(), + Ed25519PublicKey::from_bytes(&[1u8; 32]).unwrap(), + ); + } + let result = extract_bridge(summary, metrics_keys.clone()).await; + + assert_eq!( + result.len(), + 1, + "Should fall back on cache if invalid response occurs" + ); + let allowed_peer = &result[0].1; + assert_eq!( + allowed_peer.public_key.as_bytes(), + &[1u8; 32], + "Should fall back to the cached public key" + ); + + let cache = metrics_keys.read().unwrap(); + assert!( + cache.contains_key("https://unresponsive_bridge_url"), + "Cache should still contain the original key" + ); + } } diff --git a/crates/sui-rest-api/src/error.rs b/crates/sui-rest-api/src/error.rs index 3d8fab535a48b..51432315d0d37 100644 --- a/crates/sui-rest-api/src/error.rs +++ b/crates/sui-rest-api/src/error.rs @@ -70,8 +70,7 @@ impl From for RestError { } ObjectsDoubleUsed { conflicting_txes, - retried_tx, - retried_tx_success, + retried_tx_status, } => { let new_map = conflicting_txes .into_iter() @@ -85,8 +84,8 @@ impl From for RestError { let message = format!( "Failed to sign transaction by a quorum of validators because of locked objects. Retried a conflicting transaction {:?}, success: {:?}. Conflicting Transactions:\n{:#?}", - retried_tx, - retried_tx_success, + retried_tx_status.map(|(tx, _)| tx), + retried_tx_status.map(|(_, success)| success), new_map, ); diff --git a/crates/sui-rest-api/src/response.rs b/crates/sui-rest-api/src/response.rs index cc30c9935cd1c..1863bc6c45d54 100644 --- a/crates/sui-rest-api/src/response.rs +++ b/crates/sui-rest-api/src/response.rs @@ -129,61 +129,43 @@ pub async fn append_info_headers( State(state): State, response: Response, ) -> impl IntoResponse { - let latest_checkpoint = state.reader.inner().get_latest_checkpoint().unwrap(); - let lowest_available_checkpoint = state - .reader - .inner() - .get_lowest_available_checkpoint() - .unwrap(); + let mut headers = HeaderMap::new(); + + if let Ok(chain_id) = state.chain_id().to_string().try_into() { + headers.insert(X_SUI_CHAIN_ID, chain_id); + } + + if let Ok(chain) = state.chain_id().chain().as_str().try_into() { + headers.insert(X_SUI_CHAIN, chain); + } + + if let Ok(latest_checkpoint) = state.reader.inner().get_latest_checkpoint() { + headers.insert(X_SUI_EPOCH, latest_checkpoint.epoch().into()); + headers.insert( + X_SUI_CHECKPOINT_HEIGHT, + latest_checkpoint.sequence_number.into(), + ); + headers.insert(X_SUI_TIMESTAMP_MS, latest_checkpoint.timestamp_ms.into()); + } + + if let Ok(lowest_available_checkpoint) = state.reader.inner().get_lowest_available_checkpoint() + { + headers.insert( + X_SUI_LOWEST_AVAILABLE_CHECKPOINT, + lowest_available_checkpoint.into(), + ); + } - let lowest_available_checkpoint_objects = state + if let Ok(lowest_available_checkpoint_objects) = state .reader .inner() .get_lowest_available_checkpoint_objects() - .unwrap(); - - let mut headers = HeaderMap::new(); - - headers.insert( - X_SUI_CHAIN_ID, - state.chain_id().to_string().try_into().unwrap(), - ); - headers.insert( - X_SUI_CHAIN, - state.chain_id().chain().as_str().try_into().unwrap(), - ); - headers.insert( - X_SUI_EPOCH, - latest_checkpoint.epoch().to_string().try_into().unwrap(), - ); - headers.insert( - X_SUI_CHECKPOINT_HEIGHT, - latest_checkpoint - .sequence_number() - .to_string() - .try_into() - .unwrap(), - ); - headers.insert( - X_SUI_TIMESTAMP_MS, - latest_checkpoint - .timestamp_ms - .to_string() - .try_into() - .unwrap(), - ); - headers.insert( - X_SUI_LOWEST_AVAILABLE_CHECKPOINT, - lowest_available_checkpoint.to_string().try_into().unwrap(), - ); - - headers.insert( - X_SUI_LOWEST_AVAILABLE_CHECKPOINT_OBJECTS, - lowest_available_checkpoint_objects - .to_string() - .try_into() - .unwrap(), - ); + { + headers.insert( + X_SUI_LOWEST_AVAILABLE_CHECKPOINT_OBJECTS, + lowest_available_checkpoint_objects.into(), + ); + } (headers, response) } diff --git a/crates/sui-rosetta/Cargo.toml b/crates/sui-rosetta/Cargo.toml index 7c3a1b5f925cf..4915e29c17732 100644 --- a/crates/sui-rosetta/Cargo.toml +++ b/crates/sui-rosetta/Cargo.toml @@ -49,3 +49,4 @@ test-cluster.workspace = true tempfile.workspace = true rand.workspace = true reqwest.workspace = true +move-cli.workspace = true diff --git a/crates/sui-rosetta/src/account.rs b/crates/sui-rosetta/src/account.rs index 9d366ee88d3e5..9c5a80e8057b6 100644 --- a/crates/sui-rosetta/src/account.rs +++ b/crates/sui-rosetta/src/account.rs @@ -4,7 +4,7 @@ use axum::extract::State; use axum::{Extension, Json}; use axum_extra::extract::WithRejection; -use futures::StreamExt; +use futures::{future::join_all, StreamExt}; use sui_sdk::rpc_types::StakeStatus; use sui_sdk::{SuiClient, SUI_COIN_TYPE}; @@ -14,10 +14,12 @@ use tracing::info; use crate::errors::Error; use crate::types::{ AccountBalanceRequest, AccountBalanceResponse, AccountCoinsRequest, AccountCoinsResponse, - Amount, Coin, SubAccount, SubAccountType, SubBalance, + Amount, Coin, Currencies, Currency, SubAccountType, SubBalance, }; use crate::{OnlineServerContext, SuiEnv}; use std::time::Duration; +use sui_sdk::error::SuiRpcResult; +use sui_types::messages_checkpoint::CheckpointSequenceNumber; /// Get an array of all AccountBalances for an AccountIdentifier and the BlockIdentifier /// at which the balance lookup was performed. @@ -29,108 +31,97 @@ pub async fn balance( ) -> Result { env.check_network_identifier(&request.network_identifier)?; let address = request.account_identifier.address; + let currencies = &request.currencies; let mut retry_attempts = 5; - if let Some(SubAccount { account_type }) = request.account_identifier.sub_account { - while retry_attempts > 0 { - let balances_first = - get_sub_account_balances(account_type.clone(), &ctx.client, address).await?; - let checkpoint1 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; - // Get another checkpoint which is greater than current - let mut checkpoint2 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; - - while checkpoint2 <= checkpoint1 { - checkpoint2 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; - tokio::time::sleep(Duration::from_secs(1)).await; - } - let balances_second = - get_sub_account_balances(account_type.clone(), &ctx.client, address).await?; - if balances_first.eq(&balances_second) { - return Ok(AccountBalanceResponse { - block_identifier: ctx.blocks().create_block_identifier(checkpoint2).await?, - balances: balances_first, - }); - } else { - // retry logic needs to be aaded - retry_attempts -= 1; - } + while retry_attempts > 0 { + let balances_first = get_balances(&ctx, &request, address, currencies.clone()).await?; + let checkpoint1 = get_checkpoint(&ctx).await?; + let mut checkpoint2 = get_checkpoint(&ctx).await?; + while checkpoint2 <= checkpoint1 { + checkpoint2 = get_checkpoint(&ctx).await?; + tokio::time::sleep(Duration::from_secs(1)).await; } - Err(Error::RetryExhausted(String::from("retry"))) - } else { - // Get current live balance - while retry_attempts > 0 { - let balances_first = ctx - .client - .coin_read_api() - .get_balance(address, Some(SUI_COIN_TYPE.to_string())) - .await? - .total_balance as i128; - - // Get current latest checkpoint - let checkpoint1 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; + let balances_second = get_balances(&ctx, &request, address, currencies.clone()).await?; + if balances_first.eq(&balances_second) { + info!( + "same balance for account {} at checkpoint {}", + address, checkpoint2 + ); + return Ok(AccountBalanceResponse { + block_identifier: ctx.blocks().create_block_identifier(checkpoint2).await?, + balances: balances_first, + }); + } else { + info!( + "different balance for account {} at checkpoint {}", + address, checkpoint2 + ); + retry_attempts -= 1; + } + } + Err(Error::RetryExhausted(String::from("retry"))) +} - // Get another checkpoint which is greater than current - let mut checkpoint2 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; +async fn get_checkpoint(ctx: &OnlineServerContext) -> SuiRpcResult { + ctx.client + .read_api() + .get_latest_checkpoint_sequence_number() + .await +} - while checkpoint2 <= checkpoint1 { - checkpoint2 = ctx - .client - .read_api() - .get_latest_checkpoint_sequence_number() - .await?; - tokio::time::sleep(Duration::from_secs(1)).await; +async fn get_balances( + ctx: &OnlineServerContext, + request: &AccountBalanceRequest, + address: SuiAddress, + currencies: Currencies, +) -> Result, Error> { + if let Some(sub_account) = &request.account_identifier.sub_account { + let account_type = sub_account.account_type.clone(); + get_sub_account_balances(account_type, &ctx.client, address).await + } else if !currencies.0.is_empty() { + let balance_futures = currencies.0.iter().map(|currency| { + let coin_type = currency.metadata.clone().coin_type.clone(); + async move { + ( + currency.clone(), + get_account_balances(ctx, address, &coin_type).await, + ) } - - // Get live balance again - let balances_second = ctx - .client - .coin_read_api() - .get_balance(address, Some(SUI_COIN_TYPE.to_string())) - .await? - .total_balance as i128; - - // if those two live balances are equal then that is the current balance for checkpoint2 - if balances_first.eq(&balances_second) { - info!( - "same balance for account {} at checkpoint {}", - address, checkpoint2 - ); - return Ok(AccountBalanceResponse { - block_identifier: ctx.blocks().create_block_identifier(checkpoint2).await?, - balances: vec![Amount::new(balances_first)], - }); - } else { - // balances are different so we need to try again. - info!( - "different balance for account {} at checkpoint {}", - address, checkpoint2 - ); - retry_attempts -= 1; + }); + let balances: Vec<(Currency, Result)> = join_all(balance_futures).await; + let mut amounts = Vec::new(); + for (currency, balance_result) in balances { + match balance_result { + Ok(value) => amounts.push(Amount::new(value, Some(currency))), + Err(_e) => { + return Err(Error::InvalidInput(format!( + "{:?}", + currency.metadata.coin_type + ))) + } } } - Err(Error::RetryExhausted(String::from("retry"))) + Ok(amounts) + } else { + Err(Error::InvalidInput( + "Coin type is required for this request".to_string(), + )) } } +async fn get_account_balances( + ctx: &OnlineServerContext, + address: SuiAddress, + coin_type: &String, +) -> Result { + Ok(ctx + .client + .coin_read_api() + .get_balance(address, Some(coin_type.to_string())) + .await? + .total_balance as i128) +} + async fn get_sub_account_balances( account_type: SubAccountType, client: &SuiClient, @@ -187,7 +178,7 @@ async fn get_sub_account_balances( // Make sure there are always one amount returned Ok(if amounts.is_empty() { - vec![Amount::new(0)] + vec![Amount::new(0, None)] } else { vec![Amount::new_from_sub_balances(amounts)] }) diff --git a/crates/sui-rosetta/src/block.rs b/crates/sui-rosetta/src/block.rs index a7397c8137a4b..547d28383b6b2 100644 --- a/crates/sui-rosetta/src/block.rs +++ b/crates/sui-rosetta/src/block.rs @@ -6,6 +6,7 @@ use axum::{Extension, Json}; use axum_extra::extract::WithRejection; use tracing::debug; +use crate::operations::Operations; use crate::types::{ BlockRequest, BlockResponse, BlockTransactionRequest, BlockTransactionResponse, Transaction, TransactionIdentifier, @@ -57,7 +58,7 @@ pub async fn transaction( .await?; let hash = response.digest; - let operations = response.try_into()?; + let operations = Operations::try_from_response(response, &context.coin_metadata_cache).await?; let transaction = Transaction { transaction_identifier: TransactionIdentifier { hash }, diff --git a/crates/sui-rosetta/src/construction.rs b/crates/sui-rosetta/src/construction.rs index d67c37ce34a94..97265f9ab62cc 100644 --- a/crates/sui-rosetta/src/construction.rs +++ b/crates/sui-rosetta/src/construction.rs @@ -16,7 +16,7 @@ use sui_json_rpc_types::{ SuiTransactionBlockResponseOptions, }; use sui_sdk::rpc_types::SuiExecutionStatus; -use sui_types::base_types::SuiAddress; +use sui_types::base_types::{ObjectRef, SuiAddress}; use sui_types::crypto::{DefaultHash, SignatureScheme, ToFromBytes}; use sui_types::error::SuiError; use sui_types::signature::{GenericSignature, VerifyParams}; @@ -198,7 +198,6 @@ pub async fn preprocess( let internal_operation = request.operations.into_internal()?; let sender = internal_operation.sender(); let budget = request.metadata.and_then(|m| m.budget); - Ok(ConstructionPreprocessResponse { options: Some(MetadataOptions { internal_operation, @@ -239,6 +238,12 @@ pub async fn metadata( let option = request.options.ok_or(Error::MissingMetadata)?; let budget = option.budget; let sender = option.internal_operation.sender(); + let currency = match &option.internal_operation { + InternalOperation::PayCoin { currency, .. } => Some(currency.clone()), + _ => None, + }; + let coin_type = currency.as_ref().map(|c| c.metadata.coin_type.clone()); + let mut gas_price = context .client .governance_api() @@ -253,6 +258,20 @@ pub async fn metadata( let amount = amounts.iter().sum::(); (Some(amount), vec![]) } + InternalOperation::PayCoin { amounts, .. } => { + let amount = amounts.iter().sum::(); + let coin_objs: Vec = context + .client + .coin_read_api() + .select_coins(sender, coin_type, amount.into(), vec![]) + .await + .ok() + .unwrap_or_default() + .iter() + .map(|coin| coin.object_ref()) + .collect(); + (Some(0), coin_objs) // amount is 0 for gas coin + } InternalOperation::Stake { amount, .. } => (*amount, vec![]), InternalOperation::WithdrawStake { sender, stake_ids } => { let stake_ids = if stake_ids.is_empty() { @@ -313,6 +332,7 @@ pub async fn metadata( gas_price, // MAX BUDGET budget: 50_000_000_000, + currency: currency.clone(), })?; let dry_run = context @@ -329,7 +349,7 @@ pub async fn metadata( } }; - // Try select coins for required amounts + // Try select gas coins for required amounts let coins = if let Some(amount) = total_required_amount { let total_amount = amount + budget; context @@ -369,8 +389,9 @@ pub async fn metadata( total_coin_value, gas_price, budget, + currency, }, - suggested_fee: vec![Amount::new(budget as i128)], + suggested_fee: vec![Amount::new(budget as i128, None)], }) } diff --git a/crates/sui-rosetta/src/lib.rs b/crates/sui-rosetta/src/lib.rs index 9887a8b95a8ca..27f6117f9f1a0 100644 --- a/crates/sui-rosetta/src/lib.rs +++ b/crates/sui-rosetta/src/lib.rs @@ -1,19 +1,24 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +use std::collections::HashMap; use std::net::SocketAddr; +use std::string::ToString; use std::sync::Arc; use axum::routing::post; use axum::{Extension, Router}; +use move_core_types::language_storage::TypeTag; use once_cell::sync::Lazy; +use tokio::sync::Mutex; use tracing::info; -use sui_sdk::SuiClient; +use sui_sdk::{SuiClient, SUI_COIN_TYPE}; use crate::errors::Error; +use crate::errors::Error::MissingMetadata; use crate::state::{CheckpointBlockProvider, OnlineServerContext}; -use crate::types::{Currency, SuiEnv}; +use crate::types::{Currency, CurrencyMetadata, SuiEnv}; /// This lib implements the Rosetta online and offline server defined by the [Rosetta API Spec](https://www.rosetta-api.org/docs/Reference.html) mod account; @@ -28,6 +33,9 @@ pub mod types; pub static SUI: Lazy = Lazy::new(|| Currency { symbol: "SUI".to_string(), decimals: 9, + metadata: CurrencyMetadata { + coin_type: SUI_COIN_TYPE.to_string(), + }, }); pub struct RosettaOnlineServer { @@ -37,10 +45,14 @@ pub struct RosettaOnlineServer { impl RosettaOnlineServer { pub fn new(env: SuiEnv, client: SuiClient) -> Self { - let blocks = Arc::new(CheckpointBlockProvider::new(client.clone())); + let coin_cache = CoinMetadataCache::new(client.clone()); + let blocks = Arc::new(CheckpointBlockProvider::new( + client.clone(), + coin_cache.clone(), + )); Self { env, - context: OnlineServerContext::new(client, blocks), + context: OnlineServerContext::new(client, blocks, coin_cache), } } @@ -99,3 +111,40 @@ impl RosettaOfflineServer { axum::serve(listener, app).await.unwrap(); } } + +#[derive(Clone)] +pub struct CoinMetadataCache { + client: SuiClient, + metadata: Arc>>, +} + +impl CoinMetadataCache { + pub fn new(client: SuiClient) -> Self { + CoinMetadataCache { + client, + metadata: Default::default(), + } + } + + pub async fn get_currency(&self, type_tag: &TypeTag) -> Result { + let mut cache = self.metadata.lock().await; + if !cache.contains_key(type_tag) { + let metadata = self + .client + .coin_read_api() + .get_coin_metadata(type_tag.to_string()) + .await? + .ok_or(MissingMetadata)?; + + let ccy = Currency { + symbol: metadata.symbol, + decimals: metadata.decimals as u64, + metadata: CurrencyMetadata { + coin_type: type_tag.to_string(), + }, + }; + cache.insert(type_tag.clone(), ccy); + } + cache.get(type_tag).cloned().ok_or(MissingMetadata) + } +} diff --git a/crates/sui-rosetta/src/operations.rs b/crates/sui-rosetta/src/operations.rs index 5f7a27381b070..38ea2cdca2e21 100644 --- a/crates/sui-rosetta/src/operations.rs +++ b/crates/sui-rosetta/src/operations.rs @@ -22,7 +22,7 @@ use sui_sdk::rpc_types::{ SuiTransactionBlockKind, SuiTransactionBlockResponse, }; use sui_types::base_types::{ObjectID, SequenceNumber, SuiAddress}; -use sui_types::gas_coin::{GasCoin, GAS}; +use sui_types::gas_coin::GasCoin; use sui_types::governance::{ADD_STAKE_FUN_NAME, WITHDRAW_STAKE_FUN_NAME}; use sui_types::object::Owner; use sui_types::sui_system_state::SUI_SYSTEM_MODULE_NAME; @@ -30,10 +30,10 @@ use sui_types::transaction::TransactionData; use sui_types::{SUI_SYSTEM_ADDRESS, SUI_SYSTEM_PACKAGE_ID}; use crate::types::{ - AccountIdentifier, Amount, CoinAction, CoinChange, CoinID, CoinIdentifier, InternalOperation, - OperationIdentifier, OperationStatus, OperationType, + AccountIdentifier, Amount, CoinAction, CoinChange, CoinID, CoinIdentifier, Currency, + InternalOperation, OperationIdentifier, OperationStatus, OperationType, }; -use crate::Error; +use crate::{CoinMetadataCache, Error, SUI}; #[cfg(test)] #[path = "unit_tests/operations_tests.rs"] @@ -101,6 +101,7 @@ impl Operations { .ok_or_else(|| Error::MissingInput("Operation type".into()))?; match type_ { OperationType::PaySui => self.pay_sui_ops_to_internal(), + OperationType::PayCoin => self.pay_coin_ops_to_internal(), OperationType::Stake => self.stake_ops_to_internal(), OperationType::WithdrawStake => self.withdraw_stake_ops_to_internal(), op => Err(Error::UnsupportedOperation(op)), @@ -135,6 +136,38 @@ impl Operations { }) } + fn pay_coin_ops_to_internal(self) -> Result { + let mut recipients = vec![]; + let mut amounts = vec![]; + let mut sender = None; + let mut currency = None; + for op in self { + if let (Some(amount), Some(account)) = (op.amount.clone(), op.account.clone()) { + currency = currency.or(Some(amount.currency)); + if amount.value.is_negative() { + sender = Some(account.address) + } else { + recipients.push(account.address); + let amount = amount.value.abs(); + if amount > u64::MAX as i128 { + return Err(Error::InvalidInput( + "Input amount exceed u64::MAX".to_string(), + )); + } + amounts.push(amount as u64) + } + } + } + let sender = sender.ok_or_else(|| Error::MissingInput("Sender address".to_string()))?; + let currency = currency.ok_or_else(|| Error::MissingInput("Currency".to_string()))?; + Ok(InternalOperation::PayCoin { + sender, + recipients, + amounts, + currency, + }) + } + fn stake_ops_to_internal(self) -> Result { let mut ops = self .0 @@ -259,7 +292,7 @@ impl Operations { } SuiArgument::GasCoin => (), // Might not be a SUI coin - SuiArgument::Input(_) => return None, + SuiArgument::Input(_) => (), }; let amounts = amounts .iter() @@ -366,6 +399,7 @@ impl Operations { let mut needs_generic = false; let mut operations = vec![]; let mut stake_ids = vec![]; + let mut currency: Option = None; for command in commands { let result = match command { SuiCommand::SplitCoins(coin, amounts) => { @@ -380,7 +414,7 @@ impl Operations { ), SuiCommand::MoveCall(m) if Self::is_stake_call(m) => { stake_call(inputs, &known_results, m)?.map(|(amount, validator)| { - let amount = amount.map(|amount| Amount::new(-(amount as i128))); + let amount = amount.map(|amount| Amount::new(-(amount as i128), None)); operations.push(Operation { operation_identifier: Default::default(), type_: OperationType::Stake, @@ -414,10 +448,43 @@ impl Operations { aggregated_recipients .into_iter() .map(|(recipient, amount)| { - Operation::pay_sui(status, recipient, amount.into()) + currency = inputs.iter().last().and_then(|arg| { + if let SuiCallArg::Pure(value) = arg { + let bytes = value + .value() + .to_json_value() + .as_array()? + .clone() + .into_iter() + .map(|v| v.as_u64().map(|n| n as u8)) + .collect::>>()?; + bcs::from_bytes::(&bytes) + .ok() + .and_then(|bcs_str| serde_json::from_str(&bcs_str).ok()) + } else { + None + } + }); + match currency { + Some(_) => Operation::pay_coin( + status, + recipient, + amount.into(), + currency.clone(), + ), + None => Operation::pay_sui(status, recipient, amount.into()), + } }), ); - operations.push(Operation::pay_sui(status, sender, -(total_paid as i128))); + match currency { + Some(_) => operations.push(Operation::pay_coin( + status, + sender, + -(total_paid as i128), + currency.clone(), + )), + _ => operations.push(Operation::pay_sui(status, sender, -(total_paid as i128))), + } } else if !stake_ids.is_empty() { let stake_ids = stake_ids.into_iter().flatten().collect::>(); let metadata = stake_ids @@ -458,28 +525,28 @@ impl Operations { fn process_balance_change( gas_owner: SuiAddress, gas_used: i128, - balance_changes: &[BalanceChange], + balance_changes: Vec<(BalanceChange, Currency)>, status: Option, - balances: HashMap, + balances: HashMap<(SuiAddress, Currency), i128>, ) -> impl Iterator { - let mut balances = balance_changes - .iter() - .fold(balances, |mut balances, balance_change| { - // Rosetta only care about address owner - if let Owner::AddressOwner(owner) = balance_change.owner { - if balance_change.coin_type == GAS::type_tag() { - *balances.entry(owner).or_default() += balance_change.amount; + let mut balances = + balance_changes + .iter() + .fold(balances, |mut balances, (balance_change, ccy)| { + // Rosetta only care about address owner + if let Owner::AddressOwner(owner) = balance_change.owner { + *balances.entry((owner, ccy.clone())).or_default() += balance_change.amount; } - } - balances - }); + balances + }); // separate gas from balances - *balances.entry(gas_owner).or_default() -= gas_used; + *balances.entry((gas_owner, SUI.clone())).or_default() -= gas_used; - let balance_change = balances - .into_iter() - .filter(|(_, amount)| *amount != 0) - .map(move |(addr, amount)| Operation::balance_change(status, addr, amount)); + let balance_change = balances.into_iter().filter(|(_, amount)| *amount != 0).map( + move |((addr, currency), amount)| { + Operation::balance_change(status, addr, amount, currency) + }, + ); let gas = if gas_used != 0 { vec![Operation::gas(gas_owner, gas_used)] @@ -502,10 +569,11 @@ impl TryFrom for Operations { )?)) } } - -impl TryFrom for Operations { - type Error = Error; - fn try_from(response: SuiTransactionBlockResponse) -> Result { +impl Operations { + pub async fn try_from_response( + response: SuiTransactionBlockResponse, + cache: &CoinMetadataCache, + ) -> Result { let tx = response .transaction .ok_or_else(|| anyhow!("Response input should not be empty"))?; @@ -532,7 +600,9 @@ impl TryFrom for Operations { if let (Some(acc), Some(amount), Some(OperationStatus::Success)) = (&op.account, &op.amount, &op.status) { - *balances.entry(acc.address).or_default() -= amount.value; + *balances + .entry((acc.address, amount.clone().currency)) + .or_default() -= amount.value; } balances }); @@ -564,8 +634,8 @@ impl TryFrom for Operations { } } let staking_balance = if principal_amounts != 0 { - *accounted_balances.entry(sender).or_default() -= principal_amounts; - *accounted_balances.entry(sender).or_default() -= reward_amounts; + *accounted_balances.entry((sender, SUI.clone())).or_default() -= principal_amounts; + *accounted_balances.entry((sender, SUI.clone())).or_default() -= reward_amounts; vec![ Operation::stake_principle(status, sender, principal_amounts), Operation::stake_reward(status, sender, reward_amounts), @@ -574,22 +644,75 @@ impl TryFrom for Operations { vec![] }; + let mut balance_changes = vec![]; + + for balance_change in &response + .balance_changes + .ok_or_else(|| anyhow!("Response balance changes should not be empty."))? + { + if let Ok(currency) = cache.get_currency(&balance_change.coin_type).await { + balance_changes.push((balance_change.clone(), currency)); + } + } + // Extract coin change operations from balance changes let coin_change_operations = Self::process_balance_change( gas_owner, gas_used, - &response - .balance_changes - .ok_or_else(|| anyhow!("Response balance changes should not be empty."))?, + balance_changes, status, accounted_balances, ); - Ok(ops + let ops: Operations = ops .into_iter() .chain(coin_change_operations) .chain(staking_balance) - .collect()) + .collect(); + + // This is a workaround for the payCoin cases that are mistakenly considered to be paySui operations + // In this case we remove any irrelevant, SUI specific operation entries that sum up to 0 balance changes per address + // and keep only the actual entries for the right coin type transfers, as they have been extracted from the transaction's + // balance changes section. + let mutually_cancelling_balances: HashMap<_, _> = ops + .clone() + .into_iter() + .fold( + HashMap::new(), + |mut balances: HashMap<(SuiAddress, Currency), i128>, op| { + if let (Some(acc), Some(amount), Some(OperationStatus::Success)) = + (&op.account, &op.amount, &op.status) + { + if op.type_ != OperationType::Gas { + *balances + .entry((acc.address, amount.clone().currency)) + .or_default() += amount.value; + } + } + balances + }, + ) + .into_iter() + .filter(|balance| { + let (_, amount) = balance; + *amount == 0 + }) + .collect(); + + let ops: Operations = ops + .clone() + .into_iter() + .filter(|op| { + if let (Some(acc), Some(amount)) = (&op.account, &op.amount) { + return op.type_ == OperationType::Gas + || !mutually_cancelling_balances + .contains_key(&(acc.address, amount.clone().currency)); + } + true + }) + .collect(); + + Ok(ops) } } @@ -672,7 +795,7 @@ impl Operation { type_: OperationType::Genesis, status: Some(OperationStatus::Success), account: Some(sender.into()), - amount: Some(Amount::new(coin.value().into())), + amount: Some(Amount::new(coin.value().into(), None)), coin_change: Some(CoinChange { coin_identifier: CoinIdentifier { identifier: CoinID { @@ -692,19 +815,41 @@ impl Operation { type_: OperationType::PaySui, status, account: Some(address.into()), - amount: Some(Amount::new(amount)), + amount: Some(Amount::new(amount, None)), coin_change: None, metadata: None, } } - fn balance_change(status: Option, addr: SuiAddress, amount: i128) -> Self { + fn pay_coin( + status: Option, + address: SuiAddress, + amount: i128, + currency: Option, + ) -> Self { + Operation { + operation_identifier: Default::default(), + type_: OperationType::PayCoin, + status, + account: Some(address.into()), + amount: Some(Amount::new(amount, currency)), + coin_change: None, + metadata: None, + } + } + + fn balance_change( + status: Option, + addr: SuiAddress, + amount: i128, + currency: Currency, + ) -> Self { Self { operation_identifier: Default::default(), type_: OperationType::SuiBalanceChange, status, account: Some(addr.into()), - amount: Some(Amount::new(amount)), + amount: Some(Amount::new(amount, Some(currency))), coin_change: None, metadata: None, } @@ -715,7 +860,7 @@ impl Operation { type_: OperationType::Gas, status: Some(OperationStatus::Success), account: Some(addr.into()), - amount: Some(Amount::new(amount)), + amount: Some(Amount::new(amount, None)), coin_change: None, metadata: None, } @@ -726,7 +871,7 @@ impl Operation { type_: OperationType::StakeReward, status, account: Some(addr.into()), - amount: Some(Amount::new(amount)), + amount: Some(Amount::new(amount, None)), coin_change: None, metadata: None, } @@ -737,7 +882,7 @@ impl Operation { type_: OperationType::StakePrinciple, status, account: Some(addr.into()), - amount: Some(Amount::new(amount)), + amount: Some(Amount::new(amount, None)), coin_change: None, metadata: None, } diff --git a/crates/sui-rosetta/src/state.rs b/crates/sui-rosetta/src/state.rs index 932cf5f0f9539..c7d479c078d58 100644 --- a/crates/sui-rosetta/src/state.rs +++ b/crates/sui-rosetta/src/state.rs @@ -1,18 +1,21 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use crate::operations::Operations; -use crate::types::{ - Block, BlockHash, BlockIdentifier, BlockResponse, Transaction, TransactionIdentifier, -}; -use crate::Error; -use async_trait::async_trait; use std::sync::Arc; + +use async_trait::async_trait; + use sui_json_rpc_types::SuiTransactionBlockResponseOptions; use sui_sdk::rpc_types::Checkpoint; use sui_sdk::SuiClient; use sui_types::messages_checkpoint::CheckpointSequenceNumber; +use crate::operations::Operations; +use crate::types::{ + Block, BlockHash, BlockIdentifier, BlockResponse, Transaction, TransactionIdentifier, +}; +use crate::{CoinMetadataCache, Error}; + #[cfg(test)] #[path = "unit_tests/balance_changing_tx_tests.rs"] mod balance_changing_tx_tests; @@ -20,14 +23,20 @@ mod balance_changing_tx_tests; #[derive(Clone)] pub struct OnlineServerContext { pub client: SuiClient, + pub coin_metadata_cache: CoinMetadataCache, block_provider: Arc, } impl OnlineServerContext { - pub fn new(client: SuiClient, block_provider: Arc) -> Self { + pub fn new( + client: SuiClient, + block_provider: Arc, + coin_metadata_cache: CoinMetadataCache, + ) -> Self { Self { - client, + client: client.clone(), block_provider, + coin_metadata_cache, } } @@ -53,6 +62,7 @@ pub trait BlockProvider { #[derive(Clone)] pub struct CheckpointBlockProvider { client: SuiClient, + coin_metadata_cache: CoinMetadataCache, } #[async_trait] @@ -103,8 +113,11 @@ impl BlockProvider for CheckpointBlockProvider { } impl CheckpointBlockProvider { - pub fn new(client: SuiClient) -> Self { - Self { client } + pub fn new(client: SuiClient, coin_metadata_cache: CoinMetadataCache) -> Self { + Self { + client, + coin_metadata_cache, + } } async fn create_block_response(&self, checkpoint: Checkpoint) -> Result { @@ -127,7 +140,8 @@ impl CheckpointBlockProvider { for tx in transaction_responses.into_iter() { transactions.push(Transaction { transaction_identifier: TransactionIdentifier { hash: tx.digest }, - operations: Operations::try_from(tx)?, + operations: Operations::try_from_response(tx, &self.coin_metadata_cache) + .await?, related_transactions: vec![], metadata: None, }) diff --git a/crates/sui-rosetta/src/types.rs b/crates/sui-rosetta/src/types.rs index 837d3c20d6249..57d906efd9a2b 100644 --- a/crates/sui-rosetta/src/types.rs +++ b/crates/sui-rosetta/src/types.rs @@ -29,6 +29,10 @@ use crate::errors::{Error, ErrorType}; use crate::operations::Operations; use crate::SUI; +#[cfg(test)] +#[path = "unit_tests/types_tests.rs"] +mod types_tests; + pub type BlockHeight = u64; #[derive(Serialize, Deserialize, Clone, Debug)] @@ -95,20 +99,63 @@ impl From for AccountIdentifier { } } -#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)] pub struct Currency { pub symbol: String, pub decimals: u64, + #[serde(default)] + pub metadata: CurrencyMetadata, } -#[derive(Serialize, Deserialize)] + +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq, Hash)] +pub struct CurrencyMetadata { + pub coin_type: String, +} + +impl Default for CurrencyMetadata { + fn default() -> Self { + SUI.metadata.clone() + } +} + +impl Default for Currency { + fn default() -> Self { + SUI.clone() + } +} + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +#[serde(transparent)] +pub struct Currencies(pub Vec); + +impl Default for Currencies { + fn default() -> Self { + Currencies(vec![Currency::default()]) + } +} + +fn deserialize_or_default_currencies<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let opt: Option> = Option::deserialize(deserializer)?; + match opt { + Some(vec) if vec.is_empty() => Ok(Currencies::default()), + Some(vec) => Ok(Currencies(vec)), + None => Ok(Currencies::default()), + } +} + +#[derive(Serialize, Deserialize, Debug)] pub struct AccountBalanceRequest { pub network_identifier: NetworkIdentifier, pub account_identifier: AccountIdentifier, #[serde(default)] pub block_identifier: PartialBlockIdentifier, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub currencies: Vec, + #[serde(default, deserialize_with = "deserialize_or_default_currencies")] + pub currencies: Currencies, } + #[derive(Serialize, Deserialize, Debug)] pub struct AccountBalanceResponse { pub block_identifier: BlockIdentifier, @@ -133,6 +180,7 @@ pub type BlockHash = CheckpointDigest; pub struct Amount { #[serde(with = "str_format")] pub value: i128, + #[serde(default)] pub currency: Currency, #[serde(default, skip_serializing_if = "Option::is_none")] pub metadata: Option, @@ -152,10 +200,10 @@ pub struct SubBalance { } impl Amount { - pub fn new(value: i128) -> Self { + pub fn new(value: i128, currency: Option) -> Self { Self { value, - currency: SUI.clone(), + currency: currency.unwrap_or_default(), metadata: None, } } @@ -164,7 +212,7 @@ impl Amount { Self { value, - currency: SUI.clone(), + currency: Currency::default(), metadata: Some(AmountMetadata { sub_balances }), } } @@ -409,6 +457,7 @@ pub enum OperationType { StakePrinciple, // sui-rosetta supported operation type PaySui, + PayCoin, Stake, WithdrawStake, // All other Sui transaction types, readonly @@ -617,6 +666,7 @@ pub struct ConstructionMetadata { pub total_coin_value: u64, pub gas_price: u64, pub budget: u64, + pub currency: Option, } impl IntoResponse for ConstructionMetadataResponse { @@ -867,6 +917,12 @@ pub enum InternalOperation { recipients: Vec, amounts: Vec, }, + PayCoin { + sender: SuiAddress, + recipients: Vec, + amounts: Vec, + currency: Currency, + }, Stake { sender: SuiAddress, validator: SuiAddress, @@ -883,6 +939,7 @@ impl InternalOperation { pub fn sender(&self) -> SuiAddress { match self { InternalOperation::PaySui { sender, .. } + | InternalOperation::PayCoin { sender, .. } | InternalOperation::Stake { sender, .. } | InternalOperation::WithdrawStake { sender, .. } => *sender, } @@ -899,6 +956,24 @@ impl InternalOperation { builder.pay_sui(recipients, amounts)?; builder.finish() } + Self::PayCoin { + recipients, + amounts, + .. + } => { + let mut builder = ProgrammableTransactionBuilder::new(); + builder.pay(metadata.objects.clone(), recipients, amounts)?; + let currency_str = serde_json::to_string(&metadata.currency.unwrap()).unwrap(); + // This is a workaround in order to have the currency info available during the process + // of constructing back the Operations object from the transaction data. A process that + // takes place upon the request to the construction's /parse endpoint. The pure value is + // not actually being used in any on-chain transaction execution and its sole purpose + // is to act as a bearer of the currency info between the various steps of the flow. + // See also the value is being later accessed within the operations.rs file's + // parse_programmable_transaction function. + builder.pure(currency_str)?; + builder.finish() + } InternalOperation::Stake { validator, amount, .. } => { diff --git a/crates/sui-rosetta/src/unit_tests/balance_changing_tx_tests.rs b/crates/sui-rosetta/src/unit_tests/balance_changing_tx_tests.rs index 3fcf3044a3ee7..a5bff2c3edee9 100644 --- a/crates/sui-rosetta/src/unit_tests/balance_changing_tx_tests.rs +++ b/crates/sui-rosetta/src/unit_tests/balance_changing_tx_tests.rs @@ -3,6 +3,7 @@ use crate::operations::Operations; use crate::types::{ConstructionMetadata, OperationStatus, OperationType}; +use crate::CoinMetadataCache; use anyhow::anyhow; use move_core_types::identifier::Identifier; use move_core_types::language_storage::StructTag; @@ -26,7 +27,7 @@ use sui_sdk::rpc_types::{ }; use sui_sdk::SuiClient; use sui_types::base_types::{ObjectID, ObjectRef, SuiAddress}; -use sui_types::gas_coin::GasCoin; +use sui_types::gas_coin::{GasCoin, GAS}; use sui_types::programmable_transaction_builder::ProgrammableTransactionBuilder; use sui_types::quorum_driver_types::ExecuteTransactionRequestType; use sui_types::transaction::{ @@ -629,6 +630,7 @@ async fn test_delegation_parsing() -> Result<(), anyhow::Error> { total_coin_value: 0, gas_price: rgp, budget: rgp * TEST_ONLY_GAS_UNIT_FOR_STAKING, + currency: None, }; let parsed_data = ops.clone().into_internal()?.try_into_data(metadata)?; assert_eq!(ops, Operations::try_from(parsed_data)?); @@ -748,8 +750,10 @@ async fn test_transaction( SuiExecutionStatus::Failure { .. } )); } - - let ops = response.clone().try_into().unwrap(); + let coin_cache = CoinMetadataCache::new(client.clone()); + let ops = Operations::try_from_response(response.clone(), &coin_cache) + .await + .unwrap(); let balances_from_ops = extract_balance_changes_from_ops(ops); // get actual balance changed after transaction @@ -775,11 +779,15 @@ fn extract_balance_changes_from_ops(ops: Operations) -> HashMap { if let (Some(addr), Some(amount)) = (op.account, op.amount) { - *changes.entry(addr.address).or_default() += amount.value + // Todo: amend this method and tests to cover other coin types too (eg. test_publish_and_move_call also mints MY_COIN) + if amount.currency.metadata.coin_type == GAS::type_().to_string() { + *changes.entry(addr.address).or_default() += amount.value + } } } _ => {} diff --git a/crates/sui-rosetta/src/unit_tests/operations_tests.rs b/crates/sui-rosetta/src/unit_tests/operations_tests.rs index 2d7d838fca13e..4eb4ee71d4192 100644 --- a/crates/sui-rosetta/src/unit_tests/operations_tests.rs +++ b/crates/sui-rosetta/src/unit_tests/operations_tests.rs @@ -8,10 +8,11 @@ use sui_types::programmable_transaction_builder::ProgrammableTransactionBuilder; use sui_types::transaction::{CallArg, TransactionData, TEST_ONLY_GAS_UNIT_FOR_TRANSFER}; use crate::operations::Operations; -use crate::types::ConstructionMetadata; +use crate::types::{ConstructionMetadata, OperationType}; +use crate::SUI; #[tokio::test] -async fn test_operation_data_parsing() -> Result<(), anyhow::Error> { +async fn test_operation_data_parsing_pay_sui() -> Result<(), anyhow::Error> { let gas = ( ObjectID::random(), SequenceNumber::new(), @@ -37,6 +38,9 @@ async fn test_operation_data_parsing() -> Result<(), anyhow::Error> { ); let ops: Operations = data.clone().try_into()?; + ops.0 + .iter() + .for_each(|op| assert_eq!(op.type_, OperationType::PaySui)); let metadata = ConstructionMetadata { sender, coins: vec![gas], @@ -44,6 +48,63 @@ async fn test_operation_data_parsing() -> Result<(), anyhow::Error> { total_coin_value: 0, gas_price, budget: TEST_ONLY_GAS_UNIT_FOR_TRANSFER * gas_price, + currency: None, + }; + let parsed_data = ops.into_internal()?.try_into_data(metadata)?; + assert_eq!(data, parsed_data); + + Ok(()) +} +#[tokio::test] +async fn test_operation_data_parsing_pay_coin() -> Result<(), anyhow::Error> { + let gas = ( + ObjectID::random(), + SequenceNumber::new(), + ObjectDigest::random(), + ); + + let coin = ( + ObjectID::random(), + SequenceNumber::new(), + ObjectDigest::random(), + ); + + let sender = SuiAddress::random_for_testing_only(); + + let pt = { + let mut builder = ProgrammableTransactionBuilder::new(); + builder + .pay( + vec![coin], + vec![SuiAddress::random_for_testing_only()], + vec![10000], + ) + .unwrap(); + // the following is important in order to be able to transfer the coin type info between the various flow steps + builder.pure(serde_json::to_string(&SUI.clone())?)?; + builder.finish() + }; + let gas_price = 10; + let data = TransactionData::new_programmable( + sender, + vec![gas], + pt, + TEST_ONLY_GAS_UNIT_FOR_TRANSFER * gas_price, + gas_price, + ); + + let ops: Operations = data.clone().try_into()?; + ops.0 + .iter() + .for_each(|op| assert_eq!(op.type_, OperationType::PayCoin)); + let metadata = ConstructionMetadata { + sender, + coins: vec![gas], + objects: vec![coin], + total_coin_value: 0, + gas_price, + budget: TEST_ONLY_GAS_UNIT_FOR_TRANSFER * gas_price, + currency: Some(SUI.clone()), }; let parsed_data = ops.into_internal()?.try_into_data(metadata)?; assert_eq!(data, parsed_data); diff --git a/crates/sui-rosetta/src/unit_tests/types_tests.rs b/crates/sui-rosetta/src/unit_tests/types_tests.rs new file mode 100644 index 0000000000000..72b66b986c309 --- /dev/null +++ b/crates/sui-rosetta/src/unit_tests/types_tests.rs @@ -0,0 +1,67 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 +use crate::types::{AccountBalanceRequest, Amount, Currency, CurrencyMetadata}; +use serde_json::json; + +#[tokio::test] +async fn test_currency_defaults() { + let expected = Currency { + symbol: "SUI".to_string(), + decimals: 9, + metadata: CurrencyMetadata { + coin_type: "0x2::sui::SUI".to_string(), + }, + }; + + let currency: Currency = serde_json::from_value(json!( + { + "symbol": "SUI", + "decimals": 9, + } + )) + .unwrap(); + assert_eq!(expected, currency); + + let amount: Amount = serde_json::from_value(json!( + { + "value": "1000000000", + } + )) + .unwrap(); + assert_eq!(expected, amount.currency); + + let account_balance_request: AccountBalanceRequest = serde_json::from_value(json!( + { + "network_identifier": { + "blockchain": "sui", + "network": "mainnet" + }, + "account_identifier": { + "address": "0xadc3a0bb21840f732435f8b649e99df6b29cd27854dfa4b020e3bee07ea09b96" + } + } + )) + .unwrap(); + assert_eq!( + expected, + account_balance_request.currencies.0.clone().pop().unwrap() + ); + + let account_balance_request: AccountBalanceRequest = serde_json::from_value(json!( + { + "network_identifier": { + "blockchain": "sui", + "network": "mainnet" + }, + "account_identifier": { + "address": "0xadc3a0bb21840f732435f8b649e99df6b29cd27854dfa4b020e3bee07ea09b96" + }, + "currencies": [] + } + )) + .unwrap(); + assert_eq!( + expected, + account_balance_request.currencies.0.clone().pop().unwrap() + ); +} diff --git a/crates/sui-rosetta/tests/custom_coins/test_coin/Move.toml b/crates/sui-rosetta/tests/custom_coins/test_coin/Move.toml new file mode 100644 index 0000000000000..3f5e73de1f5c7 --- /dev/null +++ b/crates/sui-rosetta/tests/custom_coins/test_coin/Move.toml @@ -0,0 +1,9 @@ +[package] +name = "test_coin" +edition = "2024.beta" # edition = "legacy" to use legacy (pre-2024) Move + +[dependencies] +Sui = { git = "https://github.com/MystenLabs/sui.git", subdir = "crates/sui-framework/packages/sui-framework", rev = "framework/testnet" } + +[addresses] +test_coin = "0x0" \ No newline at end of file diff --git a/crates/sui-rosetta/tests/custom_coins/test_coin/sources/test_coin.move b/crates/sui-rosetta/tests/custom_coins/test_coin/sources/test_coin.move new file mode 100644 index 0000000000000..2c0aa96202fb6 --- /dev/null +++ b/crates/sui-rosetta/tests/custom_coins/test_coin/sources/test_coin.move @@ -0,0 +1,24 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +/// Module: test_coin +module test_coin::test_coin { + + use sui::coin; + + public struct TEST_COIN has drop {} + + fun init(witness: TEST_COIN, ctx: &mut TxContext) { + let (treasury, metadata) = coin::create_currency( + witness, + 6, + b"TEST_COIN", + b"", + b"", + option::none(), + ctx + ); + transfer::public_freeze_object(metadata); + transfer::public_transfer(treasury, ctx.sender()) + } +} diff --git a/crates/sui-rosetta/tests/custom_coins/test_coin_utils.rs b/crates/sui-rosetta/tests/custom_coins/test_coin_utils.rs new file mode 100644 index 0000000000000..c7e640b268fc1 --- /dev/null +++ b/crates/sui-rosetta/tests/custom_coins/test_coin_utils.rs @@ -0,0 +1,297 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use std::path::Path; +use std::str::FromStr; + +use anyhow::{anyhow, Result}; + +use move_cli::base; +use shared_crypto::intent::Intent; +use sui_json_rpc_types::{ + ObjectChange, SuiObjectDataFilter, SuiObjectDataOptions, SuiObjectResponseQuery, SuiRawData, + SuiTransactionBlockResponse, SuiTransactionBlockResponseOptions, +}; +use sui_keys::keystore::{AccountKeystore, Keystore}; +use sui_move_build::BuildConfig as MoveBuildConfig; + +use sui_sdk::SuiClient; +use sui_types::base_types::{ObjectID, ObjectRef, SuiAddress}; +use sui_types::coin::COIN_MODULE_NAME; +use sui_types::gas_coin::GasCoin; +use sui_types::object::Owner; +use sui_types::programmable_transaction_builder::ProgrammableTransactionBuilder; +use sui_types::quorum_driver_types::ExecuteTransactionRequestType; +use sui_types::transaction::{ + Command, ObjectArg, Transaction, TransactionData, TransactionDataAPI, +}; +use sui_types::{Identifier, TypeTag, SUI_FRAMEWORK_PACKAGE_ID}; +use test_cluster::TestClusterBuilder; + +use tracing::debug; + +const DEFAULT_GAS_BUDGET: u64 = 900_000_000; + +pub struct GasRet { + pub object: ObjectRef, + pub budget: u64, + pub price: u64, +} + +pub async fn select_gas( + client: &SuiClient, + signer_addr: SuiAddress, + input_gas: Option, + budget: Option, + exclude_objects: Vec, + gas_price: Option, +) -> Result { + let price = match gas_price { + Some(p) => p, + None => { + debug!("No gas price given, fetching from fullnode"); + client.read_api().get_reference_gas_price().await? + } + }; + let budget = budget.unwrap_or_else(|| { + debug!("No gas budget given, defaulting to {DEFAULT_GAS_BUDGET}"); + debug_assert!(DEFAULT_GAS_BUDGET > price); + DEFAULT_GAS_BUDGET + }); + if budget < price { + return Err(anyhow!( + "Gas budget {budget} is less than the reference gas price {price}. + The gas budget must be at least the current reference gas price of {price}." + )); + } + + if let Some(gas) = input_gas { + let read_api = client.read_api(); + let object = read_api + .get_object_with_options(gas, SuiObjectDataOptions::new()) + .await? + .object_ref_if_exists() + .ok_or(anyhow!("No object-ref"))?; + return Ok(GasRet { + object, + budget, + price, + }); + } + + let read_api = client.read_api(); + let gas_objs = read_api + .get_owned_objects( + signer_addr, + Some(SuiObjectResponseQuery { + filter: Some(SuiObjectDataFilter::StructType(GasCoin::type_())), + options: Some(SuiObjectDataOptions::new().with_bcs()), + }), + None, + None, + ) + .await? + .data; + + for obj in gas_objs { + let SuiRawData::MoveObject(raw_obj) = &obj + .data + .as_ref() + .ok_or_else(|| anyhow!("data field is unexpectedly empty"))? + .bcs + .as_ref() + .ok_or_else(|| anyhow!("bcs field is unexpectedly empty"))? + else { + continue; + }; + + let gas: GasCoin = bcs::from_bytes(&raw_obj.bcs_bytes)?; + + let Some(obj_ref) = obj.object_ref_if_exists() else { + continue; + }; + if !exclude_objects.contains(&obj_ref.0) && gas.value() >= budget { + return Ok(GasRet { + object: obj_ref, + budget, + price, + }); + } + } + Err(anyhow!("Cannot find gas coin for signer address [{signer_addr}] with amount sufficient for the required gas amount [{budget}].")) +} + +#[derive(Debug)] +pub struct InitRet { + pub owner: SuiAddress, + pub treasury_cap: ObjectRef, + pub coin_tag: TypeTag, +} +pub async fn init_package( + client: &SuiClient, + keystore: &Keystore, + sender: SuiAddress, +) -> Result { + let path = Path::new("tests/custom_coins/test_coin"); + let path_buf = base::reroot_path(Some(path))?; + + let move_build_config = MoveBuildConfig::default(); + let compiled_modules = move_build_config.build(path_buf.as_path())?; + let modules_bytes = compiled_modules.get_package_bytes(false); + + let tx_kind = client + .transaction_builder() + .publish_tx_kind( + sender, + modules_bytes, + vec![ + ObjectID::from_hex_literal("0x1").unwrap(), + ObjectID::from_hex_literal("0x2").unwrap(), + ], + ) + .await?; + + let gas_data = select_gas(client, sender, None, None, vec![], None).await?; + let tx_data = client + .transaction_builder() + .tx_data( + sender, + tx_kind, + gas_data.budget, + gas_data.price, + vec![gas_data.object.0], + None, + ) + .await?; + + let sig = keystore.sign_secure(&tx_data.sender(), &tx_data, Intent::sui_transaction())?; + + let res = client + .quorum_driver_api() + .execute_transaction_block( + Transaction::from_data(tx_data, vec![sig]), + SuiTransactionBlockResponseOptions::new() + .with_effects() + .with_object_changes() + .with_input(), + Some(ExecuteTransactionRequestType::WaitForLocalExecution), + ) + .await?; + + let treasury_cap = res.object_changes.unwrap().into_iter().find_map(|change| { + if let ObjectChange::Created { + object_type, + object_id, + version, + digest, + owner, + .. + } = change + { + if object_type.to_string().contains("2::coin::TreasuryCap") { + let Owner::AddressOwner(owner) = owner else { + return None; + }; + let coin_tag = object_type.type_params.into_iter().next().unwrap(); + return Some(InitRet { + owner, + treasury_cap: (object_id, version, digest), + coin_tag, + }); + } + } + None + }); + + Ok(treasury_cap.unwrap()) +} + +pub async fn mint( + client: &SuiClient, + keystore: &Keystore, + init_ret: InitRet, + balances_to: Vec<(u64, SuiAddress)>, +) -> Result { + let treasury_cap_owner = init_ret.owner; + let gas_data = select_gas(client, treasury_cap_owner, None, None, vec![], None).await?; + + let mut ptb = ProgrammableTransactionBuilder::new(); + + let treasury_cap = ptb.obj(ObjectArg::ImmOrOwnedObject(init_ret.treasury_cap))?; + for (balance, to) in balances_to { + let balance = ptb.pure(balance)?; + let coin = ptb.command(Command::move_call( + SUI_FRAMEWORK_PACKAGE_ID, + Identifier::from(COIN_MODULE_NAME), + Identifier::from_str("mint")?, + vec![init_ret.coin_tag.clone()], + vec![treasury_cap, balance], + )); + ptb.transfer_arg(to, coin); + } + let builder = ptb.finish(); + + // Sign transaction + let tx_data = TransactionData::new_programmable( + treasury_cap_owner, + vec![gas_data.object], + builder, + gas_data.budget, + gas_data.price, + ); + + let sig = keystore.sign_secure(&tx_data.sender(), &tx_data, Intent::sui_transaction())?; + + let res = client + .quorum_driver_api() + .execute_transaction_block( + Transaction::from_data(tx_data, vec![sig]), + SuiTransactionBlockResponseOptions::new() + .with_effects() + .with_object_changes() + .with_input(), + Some(ExecuteTransactionRequestType::WaitForLocalExecution), + ) + .await?; + + Ok(res) +} + +#[tokio::test] +async fn test_mint() { + const COIN1_BALANCE: u64 = 100_000_000; + const COIN2_BALANCE: u64 = 200_000_000; + let test_cluster = TestClusterBuilder::new().build().await; + let client = test_cluster.wallet.get_client().await.unwrap(); + let keystore = &test_cluster.wallet.config.keystore; + + let sender = test_cluster.get_address_0(); + let init_ret = init_package(&client, keystore, sender).await.unwrap(); + + let address1 = test_cluster.get_address_1(); + let address2 = test_cluster.get_address_2(); + let balances_to = vec![(COIN1_BALANCE, address1), (COIN2_BALANCE, address2)]; + + let mint_res = mint(&client, keystore, init_ret, balances_to) + .await + .unwrap(); + let coins = mint_res + .object_changes + .unwrap() + .into_iter() + .filter_map(|change| { + if let ObjectChange::Created { + object_type, owner, .. + } = change + { + Some((object_type, owner)) + } else { + None + } + }) + .collect::>(); + let coin1 = coins.iter().find(|coin| coin.1 == address1).unwrap(); + let coin2 = coins.iter().find(|coin| coin.1 == address2).unwrap(); + assert!(coin1.0.to_string().contains("::test_coin::TEST_COIN")); + assert!(coin2.0.to_string().contains("::test_coin::TEST_COIN")); +} diff --git a/crates/sui-rosetta/tests/custom_coins_tests.rs b/crates/sui-rosetta/tests/custom_coins_tests.rs new file mode 100644 index 0000000000000..ca991c40b66b4 --- /dev/null +++ b/crates/sui-rosetta/tests/custom_coins_tests.rs @@ -0,0 +1,235 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +#[allow(dead_code)] +mod rosetta_client; +#[path = "custom_coins/test_coin_utils.rs"] +mod test_coin_utils; + +use serde_json::json; +use sui_json_rpc_types::{ + SuiExecutionStatus, SuiTransactionBlockEffectsAPI, SuiTransactionBlockResponseOptions, +}; +use sui_rosetta::operations::Operations; +use sui_rosetta::types::Currencies; +use sui_rosetta::types::{ + AccountBalanceRequest, AccountBalanceResponse, AccountIdentifier, Currency, CurrencyMetadata, + NetworkIdentifier, SuiEnv, +}; +use sui_rosetta::CoinMetadataCache; +use sui_rosetta::SUI; +use test_cluster::TestClusterBuilder; +use test_coin_utils::{init_package, mint}; + +use crate::rosetta_client::{start_rosetta_test_server, RosettaEndpoint}; + +#[tokio::test] +async fn test_custom_coin_balance() { + // mint coins to `test_culset.get_address_1()` and `test_culset.get_address_2()` + const SUI_BALANCE: u64 = 150_000_000_000_000_000; + const COIN1_BALANCE: u64 = 100_000_000; + const COIN2_BALANCE: u64 = 200_000_000; + let test_cluster = TestClusterBuilder::new().build().await; + let client = test_cluster.wallet.get_client().await.unwrap(); + let keystore = &test_cluster.wallet.config.keystore; + + let (rosetta_client, _handle) = start_rosetta_test_server(client.clone()).await; + + let sender = test_cluster.get_address_0(); + let init_ret = init_package(&client, keystore, sender).await.unwrap(); + + let address1 = test_cluster.get_address_1(); + let address2 = test_cluster.get_address_2(); + let balances_to = vec![(COIN1_BALANCE, address1), (COIN2_BALANCE, address2)]; + let coin_type = init_ret.coin_tag.to_canonical_string(true); + + let _mint_res = mint(&client, keystore, init_ret, balances_to) + .await + .unwrap(); + + // setup AccountBalanceRequest + let network_identifier = NetworkIdentifier { + blockchain: "sui".to_string(), + network: SuiEnv::LocalNet, + }; + + let sui_currency = SUI.clone(); + let test_coin_currency = Currency { + symbol: "TEST_COIN".to_string(), + decimals: 6, + metadata: CurrencyMetadata { + coin_type: coin_type.clone(), + }, + }; + + // Verify initial balance and stake + let request = AccountBalanceRequest { + network_identifier: network_identifier.clone(), + account_identifier: AccountIdentifier { + address: address1, + sub_account: None, + }, + block_identifier: Default::default(), + currencies: Currencies(vec![sui_currency, test_coin_currency]), + }; + + println!( + "request: {}", + serde_json::to_string_pretty(&request).unwrap() + ); + let response: AccountBalanceResponse = rosetta_client + .call(RosettaEndpoint::Balance, &request) + .await; + println!( + "response: {}", + serde_json::to_string_pretty(&response).unwrap() + ); + assert_eq!(response.balances.len(), 2); + assert_eq!(response.balances[0].value, SUI_BALANCE as i128); + assert_eq!( + response.balances[0].currency.clone().metadata.coin_type, + "0x2::sui::SUI" + ); + assert_eq!(response.balances[1].value, COIN1_BALANCE as i128); + assert_eq!( + response.balances[1].currency.clone().metadata.coin_type, + coin_type + ); +} + +#[tokio::test] +async fn test_default_balance() { + // mint coins to `test_culset.get_address_1()` and `test_culset.get_address_2()` + const SUI_BALANCE: u64 = 150_000_000_000_000_000; + let test_cluster = TestClusterBuilder::new().build().await; + let client = test_cluster.wallet.get_client().await.unwrap(); + + let (rosetta_client, _handles) = start_rosetta_test_server(client.clone()).await; + + let request: AccountBalanceRequest = serde_json::from_value(json!( + { + "network_identifier": { + "blockchain": "sui", + "network": "localnet" + }, + "account_identifier": { + "address": test_cluster.get_address_0() + } + } + )) + .unwrap(); + let response: AccountBalanceResponse = rosetta_client + .call(RosettaEndpoint::Balance, &request) + .await; + println!( + "response: {}", + serde_json::to_string_pretty(&response).unwrap() + ); + assert_eq!(response.balances.len(), 1); + assert_eq!(response.balances[0].value, SUI_BALANCE as i128); + + // Keep server running for testing with bash/curl + // To test with curl, + // 1. Uncomment the following lines + // 2. use `cargo test -- --nocapture` to print the server and + // 3. run curl 'localhost:/account/balance' --header 'Content-Type: application/json' \ + // --data-raw '{ + // "network_identifier": { + // "blockchain": "sui", + // "network": "localnet" + // }, + // "account_identifier": { + // "address": "" + // } + // }' + // println!("port: {}", rosetta_client.online_port()); + // println!("address0: {}", test_cluster.get_address_0()); + // for handle in _handles.into_iter() { + // handle.await.unwrap(); + // } +} + +#[tokio::test] +async fn test_custom_coin_transfer() { + const COIN1_BALANCE: u64 = 100_000_000_000_000_000; + let test_cluster = TestClusterBuilder::new().build().await; + let sender = test_cluster.get_address_0(); + let recipient = test_cluster.get_address_1(); + let client = test_cluster.wallet.get_client().await.unwrap(); + let keystore = &test_cluster.wallet.config.keystore; + + // TEST_COIN setup and mint + let init_ret = init_package(&client, keystore, sender).await.unwrap(); + let balances_to = vec![(COIN1_BALANCE, sender)]; + let coin_type = init_ret.coin_tag.to_canonical_string(true); + let _mint_res = mint(&client, keystore, init_ret, balances_to) + .await + .unwrap(); + + let (rosetta_client, _handle) = start_rosetta_test_server(client.clone()).await; + + let ops = serde_json::from_value(json!( + [{ + "operation_identifier":{"index":0}, + "type":"PayCoin", + "account": { "address" : recipient.to_string() }, + "amount" : { + "value": "30000000", + "currency": { + "symbol": "TEST_COIN", + "decimals": 6, + "metadata": { + "coin_type": coin_type.clone(), + } + } + }, + }, + { + "operation_identifier":{"index":1}, + "type":"PayCoin", + "account": { "address" : sender.to_string() }, + "amount" : { + "value": "-30000000", + "currency": { + "symbol": "TEST_COIN", + "decimals": 6, + "metadata": { + "coin_type": coin_type.clone(), + } + } + }, + }] + )) + .unwrap(); + + let response = rosetta_client.rosetta_flow(&ops, keystore).await; + + let tx = client + .read_api() + .get_transaction_with_options( + response.transaction_identifier.hash, + SuiTransactionBlockResponseOptions::new() + .with_input() + .with_effects() + .with_balance_changes() + .with_events(), + ) + .await + .unwrap(); + + assert_eq!( + &SuiExecutionStatus::Success, + tx.effects.as_ref().unwrap().status() + ); + println!("Sui TX: {tx:?}"); + let coin_cache = CoinMetadataCache::new(client); + let ops2 = Operations::try_from_response(tx, &coin_cache) + .await + .unwrap(); + assert!( + ops2.contains(&ops), + "Operation mismatch. expecting:{}, got:{}", + serde_json::to_string(&ops).unwrap(), + serde_json::to_string(&ops2).unwrap() + ); +} diff --git a/crates/sui-rosetta/tests/end_to_end_tests.rs b/crates/sui-rosetta/tests/end_to_end_tests.rs index f75dc853f6043..b7f8eec73057a 100644 --- a/crates/sui-rosetta/tests/end_to_end_tests.rs +++ b/crates/sui-rosetta/tests/end_to_end_tests.rs @@ -1,18 +1,19 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -use std::time::Duration; - use serde_json::json; +use std::time::Duration; use rosetta_client::start_rosetta_test_server; use sui_json_rpc_types::SuiTransactionBlockResponseOptions; use sui_keys::keystore::AccountKeystore; use sui_rosetta::operations::Operations; +use sui_rosetta::types::Currencies; use sui_rosetta::types::{ - AccountBalanceRequest, AccountBalanceResponse, AccountIdentifier, NetworkIdentifier, + AccountBalanceRequest, AccountBalanceResponse, AccountIdentifier, Currency, NetworkIdentifier, SubAccount, SubAccountType, SuiEnv, }; +use sui_rosetta::CoinMetadataCache; use sui_sdk::rpc_types::{SuiExecutionStatus, SuiTransactionBlockEffectsAPI}; use sui_swarm_config::genesis_config::{DEFAULT_GAS_AMOUNT, DEFAULT_NUMBER_OF_OBJECT_PER_ACCOUNT}; use sui_types::quorum_driver_types::ExecuteTransactionRequestType; @@ -46,7 +47,7 @@ async fn test_get_staked_sui() { sub_account: None, }, block_identifier: Default::default(), - currencies: vec![], + currencies: Currencies(vec![Currency::default()]), }; let response: AccountBalanceResponse = rosetta_client @@ -67,7 +68,7 @@ async fn test_get_staked_sui() { }), }, block_identifier: Default::default(), - currencies: vec![], + currencies: Currencies(vec![Currency::default()]), }; let response: AccountBalanceResponse = rosetta_client .call(RosettaEndpoint::Balance, &request) @@ -146,7 +147,7 @@ async fn test_stake() { "operation_identifier":{"index":0}, "type":"Stake", "account": { "address" : sender.to_string() }, - "amount" : { "value": "-1000000000" , "currency": { "symbol": "SUI", "decimals": 9}}, + "amount" : { "value": "-1000000000" }, "metadata": { "Stake" : {"validator": validator.to_string()} } }] )) @@ -174,7 +175,8 @@ async fn test_stake() { tx.effects.as_ref().unwrap().status() ); - let ops2 = Operations::try_from(tx).unwrap(); + let con_cache = CoinMetadataCache::new(client); + let ops2 = Operations::try_from_response(tx, &con_cache).await.unwrap(); assert!( ops2.contains(&ops), "Operation mismatch. expecting:{}, got:{}", @@ -234,7 +236,10 @@ async fn test_stake_all() { tx.effects.as_ref().unwrap().status() ); - let ops2 = Operations::try_from(tx).unwrap(); + let coin_cache = CoinMetadataCache::new(client); + let ops2 = Operations::try_from_response(tx, &coin_cache) + .await + .unwrap(); assert!( ops2.contains(&ops), "Operation mismatch. expecting:{}, got:{}", @@ -273,7 +278,7 @@ async fn test_withdraw_stake() { "operation_identifier":{"index":0}, "type":"Stake", "account": { "address" : sender.to_string() }, - "amount" : { "value": "-1000000000" , "currency": { "symbol": "SUI", "decimals": 9}}, + "amount" : { "value": "-1000000000" }, "metadata": { "Stake" : {"validator": validator.to_string()} } }] )) @@ -349,8 +354,10 @@ async fn test_withdraw_stake() { tx.effects.as_ref().unwrap().status() ); println!("Sui TX: {tx:?}"); - - let ops2 = Operations::try_from(tx).unwrap(); + let coin_cache = CoinMetadataCache::new(client); + let ops2 = Operations::try_from_response(tx, &coin_cache) + .await + .unwrap(); assert!( ops2.contains(&ops), "Operation mismatch. expecting:{}, got:{}", @@ -388,12 +395,12 @@ async fn test_pay_sui() { "operation_identifier":{"index":0}, "type":"PaySui", "account": { "address" : recipient.to_string() }, - "amount" : { "value": "1000000000" , "currency": { "symbol": "SUI", "decimals": 9}} + "amount" : { "value": "1000000000" } },{ "operation_identifier":{"index":1}, "type":"PaySui", "account": { "address" : sender.to_string() }, - "amount" : { "value": "-1000000000" , "currency": { "symbol": "SUI", "decimals": 9}} + "amount" : { "value": "-1000000000" } }] )) .unwrap(); @@ -418,8 +425,10 @@ async fn test_pay_sui() { tx.effects.as_ref().unwrap().status() ); println!("Sui TX: {tx:?}"); - - let ops2 = Operations::try_from(tx).unwrap(); + let coin_cache = CoinMetadataCache::new(client); + let ops2 = Operations::try_from_response(tx, &coin_cache) + .await + .unwrap(); assert!( ops2.contains(&ops), "Operation mismatch. expecting:{}, got:{}", @@ -440,6 +449,7 @@ async fn test_pay_sui_multiple_times() { let keystore = &test_cluster.wallet.config.keystore; let (rosetta_client, _handle) = start_rosetta_test_server(client.clone()).await; + let coin_cache = CoinMetadataCache::new(client.clone()); for i in 1..20 { println!("Iteration: {}", i); @@ -448,12 +458,12 @@ async fn test_pay_sui_multiple_times() { "operation_identifier":{"index":0}, "type":"PaySui", "account": { "address" : recipient.to_string() }, - "amount" : { "value": "1000000000" , "currency": { "symbol": "SUI", "decimals": 9}} + "amount" : { "value": "1000000000" } },{ "operation_identifier":{"index":1}, "type":"PaySui", "account": { "address" : sender.to_string() }, - "amount" : { "value": "-1000000000" , "currency": { "symbol": "SUI", "decimals": 9}} + "amount" : { "value": "-1000000000" } }] )) .unwrap(); @@ -477,8 +487,9 @@ async fn test_pay_sui_multiple_times() { &SuiExecutionStatus::Success, tx.effects.as_ref().unwrap().status() ); - - let ops2 = Operations::try_from(tx).unwrap(); + let ops2 = Operations::try_from_response(tx, &coin_cache) + .await + .unwrap(); assert!( ops2.contains(&ops), "Operation mismatch. expecting:{}, got:{}", diff --git a/crates/sui-rosetta/tests/gas_budget_tests.rs b/crates/sui-rosetta/tests/gas_budget_tests.rs index f5666298bdd58..5a10c7dc31a70 100644 --- a/crates/sui-rosetta/tests/gas_budget_tests.rs +++ b/crates/sui-rosetta/tests/gas_budget_tests.rs @@ -73,7 +73,13 @@ async fn pay_with_gas_budget(budget: u64) -> TransactionIdentifierResponseResult "operation_identifier":{"index":1}, "type":"PaySui", "account": { "address" : sender.to_string() }, - "amount" : { "value": "-1000000000" , "currency": { "symbol": "SUI", "decimals": 9}} + "amount" : { + "value": "-1000000000", + "currency": { + "symbol": "SUI", + "decimals": 9, + } + }, }] )) .unwrap(); diff --git a/crates/sui-rosetta/tests/rosetta_client.rs b/crates/sui-rosetta/tests/rosetta_client.rs index 05c3703c0a584..e1f63bae47646 100644 --- a/crates/sui-rosetta/tests/rosetta_client.rs +++ b/crates/sui-rosetta/tests/rosetta_client.rs @@ -20,8 +20,8 @@ use sui_rosetta::types::{ AccountBalanceRequest, AccountBalanceResponse, AccountIdentifier, ConstructionCombineRequest, ConstructionCombineResponse, ConstructionMetadataRequest, ConstructionMetadataResponse, ConstructionPayloadsRequest, ConstructionPayloadsResponse, ConstructionPreprocessRequest, - ConstructionPreprocessResponse, ConstructionSubmitRequest, NetworkIdentifier, Signature, - SignatureType, SubAccount, SubAccountType, SuiEnv, TransactionIdentifierResponse, + ConstructionPreprocessResponse, ConstructionSubmitRequest, Currencies, NetworkIdentifier, + Signature, SignatureType, SubAccount, SubAccountType, SuiEnv, TransactionIdentifierResponse, }; use sui_rosetta::{RosettaOfflineServer, RosettaOnlineServer}; use sui_sdk::SuiClient; @@ -70,6 +70,13 @@ impl RosettaClient { offline_port: offline, } } + + // Used to print port, when keeping test running by waiting for online server handle. + #[allow(dead_code)] + pub fn online_port(&self) -> u16 { + self.online_port + } + pub async fn call( &self, endpoint: RosettaEndpoint, @@ -192,7 +199,7 @@ impl RosettaClient { sub_account, }, block_identifier: Default::default(), - currencies: vec![], + currencies: Currencies(vec![]), }; self.call(RosettaEndpoint::Balance, &request).await } diff --git a/crates/sui-source-validation/src/lib.rs b/crates/sui-source-validation/src/lib.rs index a692974c68a3f..fdff015dc299c 100644 --- a/crates/sui-source-validation/src/lib.rs +++ b/crates/sui-source-validation/src/lib.rs @@ -217,6 +217,7 @@ impl ValidationMode { /// modules from the root package will be expected at address `0x0` and this address will be /// substituted with the specified address. fn local(&self, package: &CompiledPackage) -> Result { + let sui_package = package; let package = &package.package; let root_package = package.compiled_package_info.package_name; let mut map = LocalModules::new(); @@ -243,6 +244,19 @@ impl ValidationMode { map.insert((address, module), (package, m.module.clone())); } + + // Include bytecode dependencies. + for (package, module) in sui_package.bytecode_deps.iter() { + let address = *module.address(); + if address == AccountAddress::ZERO { + continue; + } + + map.insert( + (address, Symbol::from(module.name().as_str())), + (*package, module.clone()), + ); + } } let Self::Root { at, .. } = self else { diff --git a/crates/sui-source-validation/src/tests.rs b/crates/sui-source-validation/src/tests.rs index 620b209e43d5b..9d908817839a7 100644 --- a/crates/sui-source-validation/src/tests.rs +++ b/crates/sui-source-validation/src/tests.rs @@ -667,6 +667,66 @@ async fn successful_versioned_dependency_verification() -> anyhow::Result<()> { Ok(()) } +#[tokio::test] +async fn successful_verification_with_bytecode_dep() -> anyhow::Result<()> { + let mut cluster = TestClusterBuilder::new().build().await; + let context = &mut cluster.wallet; + + let tempdir = tempfile::tempdir()?; + + { + // publish b + fs::create_dir_all(tempdir.path().join("publish"))?; + let b_src = + copy_published_package(&tempdir.path().join("publish"), "b", SuiAddress::ZERO).await?; + let b_ref = publish_package(context, b_src).await.0; + + // setup b as a bytecode package + let pkg_path = copy_published_package(&tempdir, "b", b_ref.0.into()).await?; + + move_package::package_hooks::register_package_hooks(Box::new(SuiPackageHooks)); + BuildConfig::default().build(&pkg_path).unwrap(); + + fs::remove_dir_all(pkg_path.join("sources"))?; + }; + + let (a_pkg, a_ref) = { + let a_src = copy_published_package(&tempdir, "a", SuiAddress::ZERO).await?; + ( + compile_package(a_src.clone()), + publish_package(context, a_src).await.0, + ) + }; + + assert!( + !a_pkg.bytecode_deps.is_empty(), + "Invalid test setup: expected bytecode deps to be present." + ); + + let client = context.get_client().await?; + let verifier = BytecodeSourceVerifier::new(client.read_api()); + + // Verify deps but skip root + verifier + .verify(&a_pkg, ValidationMode::deps()) + .await + .unwrap(); + + // Skip deps but verify root + verifier + .verify(&a_pkg, ValidationMode::root_at(a_ref.0.into())) + .await + .unwrap(); + + // Verify both deps and root + verifier + .verify(&a_pkg, ValidationMode::root_and_deps_at(a_ref.0.into())) + .await + .unwrap(); + + Ok(()) +} + /// Compile the package at absolute path `package`. fn compile_package(package: impl AsRef) -> CompiledPackage { move_package::package_hooks::register_package_hooks(Box::new(SuiPackageHooks)); diff --git a/crates/sui-storage/src/indexes.rs b/crates/sui-storage/src/indexes.rs index 898bba45a45d5..6302045a55f88 100644 --- a/crates/sui-storage/src/indexes.rs +++ b/crates/sui-storage/src/indexes.rs @@ -1461,7 +1461,7 @@ impl IndexStore { metrics.all_balance_lookup_from_db.inc(); let mut balances: HashMap = HashMap::new(); let coins = Self::get_owned_coins_iterator(&coin_index, owner, None)? - .group_by(|(coin_type, _obj_id, _coin)| coin_type.clone()); + .chunk_by(|(coin_type, _obj_id, _coin)| coin_type.clone()); for (coin_type, coins) in &coins { let mut total_balance = 0i128; let mut coin_object_count = 0; diff --git a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__genesis_config_snapshot_matches.snap b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__genesis_config_snapshot_matches.snap index cc22ab99c3024..1ab457a462cf7 100644 --- a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__genesis_config_snapshot_matches.snap +++ b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__genesis_config_snapshot_matches.snap @@ -6,7 +6,7 @@ ssfn_config_info: ~ validator_config_info: ~ parameters: chain_start_timestamp_ms: 0 - protocol_version: 58 + protocol_version: 59 allow_insertion_of_extra_objects: true epoch_duration_ms: 86400000 stake_subsidy_start_epoch: 0 diff --git a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__network_config_snapshot_matches.snap b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__network_config_snapshot_matches.snap index 993c53011a274..f5567e12e24d8 100644 --- a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__network_config_snapshot_matches.snap +++ b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__network_config_snapshot_matches.snap @@ -120,12 +120,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -272,12 +276,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -424,12 +432,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -576,12 +588,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -728,12 +744,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -880,12 +900,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -1032,12 +1056,16 @@ validator_configs: - "AwsTenant-region:us-east-1-tenant_id:us-east-1_qPsZxYqd8" - Credenza3 - Facebook + - FanTV - Google - Kakao - KarrierOne - Microsoft + - Onefc + - Playtron - Slack - TestIssuer + - Threedos - Twitch authority-overload-config: max-txn-age-in-queue: @@ -1074,4 +1102,3 @@ account_keys: - mfPjCoE6SX0Sl84MnmNS/LS+tfPpkn7I8tziuk2g0WM= - 5RWlYF22jS9i76zLl8jP2D3D8GC5ht+IP1dWUBGZxi8= genesis: "[fake genesis]" - diff --git a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__populated_genesis_snapshot_matches-2.snap b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__populated_genesis_snapshot_matches-2.snap index 69a4cdf546adf..6e09f14c217bb 100644 --- a/crates/sui-swarm-config/tests/snapshots/snapshot_tests__populated_genesis_snapshot_matches-2.snap +++ b/crates/sui-swarm-config/tests/snapshots/snapshot_tests__populated_genesis_snapshot_matches-2.snap @@ -3,7 +3,7 @@ source: crates/sui-swarm-config/tests/snapshot_tests.rs expression: genesis.sui_system_object().into_genesis_version_for_tooling() --- epoch: 0 -protocol_version: 58 +protocol_version: 59 system_state_version: 1 validators: total_stake: 20000000000000000 @@ -240,13 +240,13 @@ validators: next_epoch_worker_address: ~ extra_fields: id: - id: "0x5fc92798fb05f6b4688c86746c6d3b7afe8bda616f4037638257f40a3d520a8e" + id: "0xf740d77b450cbd0a531eb3b573f35e6b21069dc749de720fc8c887e4bf91bd76" size: 0 voting_power: 10000 - operation_cap_id: "0x15319750e590d9aa5131ccf6d33ef52222b128cc51680b0bc2d26fb4bc661433" + operation_cap_id: "0x4f9d31d822ec13c45a1f77898c8410e1686d8b01a1e63865074992e2856b3cfe" gas_price: 1000 staking_pool: - id: "0x2fb1a8a7a99843b35191061d324e4d5eb808321efec57ac50b32d31ac9196fe6" + id: "0xb71bd5e5fc7db9fc330c655949fcaa8d269a06abe8eefb81306e789e06e39615" activation_epoch: 0 deactivation_epoch: ~ sui_balance: 20000000000000000 @@ -254,14 +254,14 @@ validators: value: 0 pool_token_balance: 20000000000000000 exchange_rates: - id: "0x44455057de5845a7bdd5038d9a17546a0b9d69b70e699d6b4c756e2611bd24ee" + id: "0x29797e161432eb7bffca4f03fc4dcf158460e64a3fb73fe2c58a9fb16899bd8d" size: 1 pending_stake: 0 pending_total_sui_withdraw: 0 pending_pool_token_withdraw: 0 extra_fields: id: - id: "0x73e6b469559e26d738cc0104fca64026e9f5154a0ce46b14ad248d4453013c3d" + id: "0x58fa63a2e5102a1d265d7b0fbee422bffcff5ef22e07b8337dca9b0656bb4fb3" size: 0 commission_rate: 200 next_epoch_stake: 20000000000000000 @@ -269,27 +269,27 @@ validators: next_epoch_commission_rate: 200 extra_fields: id: - id: "0xe9cae7b79620a6f8e84e3e09cddf253fa74436d9406b0da03af76b9ee4d77541" + id: "0xe8774814b22df59ae0930352d022e3b7e76d00ed6cb6a8534ccd370ab19162e2" size: 0 pending_active_validators: contents: - id: "0x00b086f5c9df1a01b4bc79e968bcfeaf4dc2d3db13edc06b8c24e4a2e468b861" + id: "0x986a1f233f6d82168b6de38e9bf83c8b24d382cd182b09dd17883d83889660ef" size: 0 pending_removals: [] staking_pool_mappings: - id: "0x100022e6b6cbf1597bc1f7e4b2a962aadc73e9c8e62552006c1c74d0edf18ced" + id: "0x464e6965816cea41b355d56a1aabedb9f37e8c0218d8c26aa964ee603a1ec4b9" size: 1 inactive_validators: - id: "0xc68c518dd907ca00db7ba7e5a822bef1a54947c6a7d59bd727f0356afc2ca9c9" + id: "0xc0a279eeb003db01998a6bf54aa8a7a3a583f3912db21d67c0f93bd4b101f2fc" size: 0 validator_candidates: - id: "0xeed2293c4aaa350ac747a098592c575f569a76aacd3efa2a3b8453bb97da13a2" + id: "0xab0af005295f6191b585383491fa12084024cc692445f2093c1f241c7d246211" size: 0 at_risk_validators: contents: [] extra_fields: id: - id: "0x4c89c796decc9a8423616883da124ed5d016f4cbdf70109ce0536ffdb2f4126f" + id: "0x75dbd2de4df6d7361316a5aa763510825fe305b5e43fe315ebcdbe2f3d614b3c" size: 0 storage_fund: total_object_storage_rebates: @@ -306,7 +306,7 @@ parameters: validator_low_stake_grace_period: 7 extra_fields: id: - id: "0x0dcff821419de846a6b6c524facbe3a89dd9566bbae975718ef39defc13ee085" + id: "0x74d93fd0bd4b8f55ce82e4976ed02b1403620c5b618f336a2e116bf6550c71ad" size: 0 reference_gas_price: 1000 validator_report_records: @@ -320,7 +320,7 @@ stake_subsidy: stake_subsidy_decrease_rate: 1000 extra_fields: id: - id: "0xc000927d246f1d68734f925d0f82c7b189b3a57236270971aa9ee3b5f19d6389" + id: "0x17c5551bb386c3b4e0d2ac8321819a3eaf1f14ac3c25cd1b794c0090639d2869" size: 0 safe_mode: false safe_mode_storage_rewards: @@ -332,5 +332,6 @@ safe_mode_non_refundable_storage_fee: 0 epoch_start_timestamp_ms: 10 extra_fields: id: - id: "0x042cfdec0c8903dc6d4033e0090c3a6081e9b7b3b57bcbe04511543e97d44b84" + id: "0x1599e9402c642857d6f303efaa7d4e898bdf6a8babc0cd37d6e6e0a929ed8269" size: 0 + diff --git a/crates/sui-tool/src/lib.rs b/crates/sui-tool/src/lib.rs index c748e6c76f49a..853445cf9f921 100644 --- a/crates/sui-tool/src/lib.rs +++ b/crates/sui-tool/src/lib.rs @@ -406,7 +406,7 @@ pub async fn get_transaction_block( .sorted_by(|(k1, err1, _), (k2, err2, _)| { Ord::cmp(k1, k2).then_with(|| Ord::cmp(err1, err2)) }) - .group_by(|(_, _err, r)| { + .chunk_by(|(_, _err, r)| { r.2.as_ref().map(|ok_result| match &ok_result.status { TransactionStatus::Signed(_) => None, TransactionStatus::Executed(_, effects, _) => Some(( diff --git a/crates/sui-transaction-checks/src/lib.rs b/crates/sui-transaction-checks/src/lib.rs index 8fe96a4b159cd..e55d62ebd859f 100644 --- a/crates/sui-transaction-checks/src/lib.rs +++ b/crates/sui-transaction-checks/src/lib.rs @@ -588,7 +588,7 @@ mod checked { .try_for_each(|module_bytes| { verifier.meter_module_bytes(protocol_config, module_bytes, meter.as_mut()) }) - .map_err(|e| UserInputError::PackageVerificationTimedout { err: e.to_string() }); + .map_err(|e| UserInputError::PackageVerificationTimeout { err: e.to_string() }); match verifier_status { Ok(_) => { diff --git a/crates/sui-types/src/balance.rs b/crates/sui-types/src/balance.rs index 85c19dbaa8424..0f5094b91af5f 100644 --- a/crates/sui-types/src/balance.rs +++ b/crates/sui-types/src/balance.rs @@ -78,10 +78,10 @@ impl Balance { pub fn layout(type_param: TypeTag) -> MoveStructLayout { MoveStructLayout { type_: Self::type_(type_param), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("value").to_owned(), MoveTypeLayout::U64, - )], + )]), } } } diff --git a/crates/sui-types/src/bridge.rs b/crates/sui-types/src/bridge.rs index 84e65ed53fdd5..661a05cc052de 100644 --- a/crates/sui-types/src/bridge.rs +++ b/crates/sui-types/src/bridge.rs @@ -169,6 +169,22 @@ pub struct BridgeSummary { // TODO: add treasury } +impl Default for BridgeSummary { + fn default() -> Self { + BridgeSummary { + bridge_version: 1, + message_version: 1, + chain_id: 1, + sequence_nums: vec![], + committee: BridgeCommitteeSummary::default(), + treasury: BridgeTreasurySummary::default(), + bridge_records_id: ObjectID::random(), + limiter: BridgeLimiterSummary::default(), + is_frozen: false, + } + } +} + pub fn get_bridge_wrapper(object_store: &dyn ObjectStore) -> Result { let wrapper = object_store .get_object(&SUI_BRIDGE_OBJECT_ID)? diff --git a/crates/sui-types/src/coin.rs b/crates/sui-types/src/coin.rs index c03a93e47d910..d1f19defc6cc3 100644 --- a/crates/sui-types/src/coin.rs +++ b/crates/sui-types/src/coin.rs @@ -96,16 +96,16 @@ impl Coin { pub fn layout(type_param: TypeTag) -> MoveStructLayout { MoveStructLayout { type_: Self::type_(type_param.clone()), - fields: vec![ + fields: Box::new(vec![ MoveFieldLayout::new( ident_str!("id").to_owned(), - MoveTypeLayout::Struct(UID::layout()), + MoveTypeLayout::Struct(Box::new(UID::layout())), ), MoveFieldLayout::new( ident_str!("balance").to_owned(), - MoveTypeLayout::Struct(Balance::layout(type_param)), + MoveTypeLayout::Struct(Box::new(Balance::layout(type_param))), ), - ], + ]), } } diff --git a/crates/sui-types/src/dynamic_field.rs b/crates/sui-types/src/dynamic_field.rs index 357b7d840c46a..3f6068e9fa22e 100644 --- a/crates/sui-types/src/dynamic_field.rs +++ b/crates/sui-types/src/dynamic_field.rs @@ -240,7 +240,7 @@ fn extract_object_id(value: &MoveStruct) -> Option { extract_id_value(id_value) } -fn extract_id_value(id_value: &MoveValue) -> Option { +pub fn extract_id_value(id_value: &MoveValue) -> Option { // the id struct has a single bytes field let id_bytes_value = match id_value { MoveValue::Struct(MoveStruct { fields, .. }) => &fields.first()?.1, diff --git a/crates/sui-types/src/error.rs b/crates/sui-types/src/error.rs index bbf89ee82d91b..a1fc1f9a7ea56 100644 --- a/crates/sui-types/src/error.rs +++ b/crates/sui-types/src/error.rs @@ -98,13 +98,16 @@ pub enum UserInputError { object_id: ObjectID, version: Option, }, - #[error("Object {provided_obj_ref:?} is not available for consumption, its current version: {current_version:?}")] + #[error( + "Object ID {} Version {} Digest {} is not available for consumption, current version: {current_version}", + .provided_obj_ref.0, .provided_obj_ref.1, .provided_obj_ref.2 + )] ObjectVersionUnavailableForConsumption { provided_obj_ref: ObjectRef, current_version: SequenceNumber, }, #[error("Package verification failed: {err:?}")] - PackageVerificationTimedout { err: String }, + PackageVerificationTimeout { err: String }, #[error("Dependent package not found on-chain: {package_id:?}")] DependentPackageNotFound { package_id: ObjectID }, #[error("Mutable parameter provided, immutable parameter expected")] @@ -269,7 +272,7 @@ pub enum UserInputError { #[error("Transaction {:?} in Soft Bundle has already been executed", digest)] AlreadyExecutedError { digest: TransactionDigest }, #[error("At least one certificate in Soft Bundle has already been processed")] - CeritificateAlreadyProcessed, + CertificateAlreadyProcessed, #[error( "Gas price for transaction {:?} in Soft Bundle mismatch: want {:?}, have {:?}", digest, diff --git a/crates/sui-types/src/id.rs b/crates/sui-types/src/id.rs index a4410f1f1c3a3..ffb6314465145 100644 --- a/crates/sui-types/src/id.rs +++ b/crates/sui-types/src/id.rs @@ -61,10 +61,10 @@ impl UID { pub fn layout() -> MoveStructLayout { MoveStructLayout { type_: Self::type_(), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("id").to_owned(), - MoveTypeLayout::Struct(ID::layout()), - )], + MoveTypeLayout::Struct(Box::new(ID::layout())), + )]), } } } @@ -86,10 +86,10 @@ impl ID { pub fn layout() -> MoveStructLayout { MoveStructLayout { type_: Self::type_(), - fields: vec![MoveFieldLayout::new( + fields: Box::new(vec![MoveFieldLayout::new( ident_str!("bytes").to_owned(), MoveTypeLayout::Address, - )], + )]), } } } diff --git a/crates/sui-types/src/layout_resolver.rs b/crates/sui-types/src/layout_resolver.rs index 372e0fd86eb14..cc551ed5aa540 100644 --- a/crates/sui-types/src/layout_resolver.rs +++ b/crates/sui-types/src/layout_resolver.rs @@ -38,7 +38,7 @@ pub fn get_layout_from_struct_tag( pub fn into_struct_layout(layout: A::MoveDatatypeLayout) -> Result { match layout { - A::MoveDatatypeLayout::Struct(s) => Ok(s), + A::MoveDatatypeLayout::Struct(s) => Ok(*s), A::MoveDatatypeLayout::Enum(e) => Err(SuiError::ObjectSerializationError { error: format!("Expected struct layout but got an enum {e:?}"), }), diff --git a/crates/sui-types/src/object.rs b/crates/sui-types/src/object.rs index 97f0f7a7e2c7b..04146c2699e08 100644 --- a/crates/sui-types/src/object.rs +++ b/crates/sui-types/src/object.rs @@ -314,7 +314,7 @@ impl MoveObject { } })?; match layout { - MoveTypeLayout::Struct(l) => Ok(l), + MoveTypeLayout::Struct(l) => Ok(*l), _ => unreachable!( "We called build_with_types on Struct type, should get a struct layout" ), @@ -1204,73 +1204,83 @@ impl Display for PastObjectRead { } } -// Ensure that object digest computation and bcs serialized format are not inadvertently changed. -#[test] -fn test_object_digest_and_serialized_format() { - let g = GasCoin::new_for_testing_with_id(ObjectID::ZERO, 123).to_object(OBJECT_START_VERSION); - let o = Object::new_move( - g, - Owner::AddressOwner(SuiAddress::ZERO), - TransactionDigest::ZERO, - ); - let bytes = bcs::to_bytes(&o).unwrap(); - - assert_eq!( - bytes, - [ - 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] - ); - - let objref = format!("{:?}", o.compute_object_reference()); - assert_eq!(objref, "(0x0000000000000000000000000000000000000000000000000000000000000000, SequenceNumber(1), o#59tZq65HVqZjUyNtD7BCGLTD87N5cpayYwEFrtwR4aMz)"); -} +#[cfg(test)] +mod tests { + use crate::object::{Object, Owner, OBJECT_START_VERSION}; + use crate::{ + base_types::{ObjectID, SuiAddress, TransactionDigest}, + gas_coin::GasCoin, + }; + + // Ensure that object digest computation and bcs serialized format are not inadvertently changed. + #[test] + fn test_object_digest_and_serialized_format() { + let g = + GasCoin::new_for_testing_with_id(ObjectID::ZERO, 123).to_object(OBJECT_START_VERSION); + let o = Object::new_move( + g, + Owner::AddressOwner(SuiAddress::ZERO), + TransactionDigest::ZERO, + ); + let bytes = bcs::to_bytes(&o).unwrap(); + + assert_eq!( + bytes, + [ + 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ] + ); -#[test] -fn test_get_coin_value_unsafe() { - fn test_for_value(v: u64) { - let g = GasCoin::new_for_testing(v).to_object(OBJECT_START_VERSION); - assert_eq!(g.get_coin_value_unsafe(), v); - assert_eq!(GasCoin::try_from(&g).unwrap().value(), v); - } - - test_for_value(0); - test_for_value(1); - test_for_value(8); - test_for_value(9); - test_for_value(u8::MAX as u64); - test_for_value(u8::MAX as u64 + 1); - test_for_value(u16::MAX as u64); - test_for_value(u16::MAX as u64 + 1); - test_for_value(u32::MAX as u64); - test_for_value(u32::MAX as u64 + 1); - test_for_value(u64::MAX); -} + let objref = format!("{:?}", o.compute_object_reference()); + assert_eq!(objref, "(0x0000000000000000000000000000000000000000000000000000000000000000, SequenceNumber(1), o#59tZq65HVqZjUyNtD7BCGLTD87N5cpayYwEFrtwR4aMz)"); + } -#[test] -fn test_set_coin_value_unsafe() { - fn test_for_value(v: u64) { - let mut g = GasCoin::new_for_testing(u64::MAX).to_object(OBJECT_START_VERSION); - g.set_coin_value_unsafe(v); - assert_eq!(g.get_coin_value_unsafe(), v); - assert_eq!(GasCoin::try_from(&g).unwrap().value(), v); - assert_eq!(g.version(), OBJECT_START_VERSION); - assert_eq!(g.contents().len(), 40); - } - - test_for_value(0); - test_for_value(1); - test_for_value(8); - test_for_value(9); - test_for_value(u8::MAX as u64); - test_for_value(u8::MAX as u64 + 1); - test_for_value(u16::MAX as u64); - test_for_value(u16::MAX as u64 + 1); - test_for_value(u32::MAX as u64); - test_for_value(u32::MAX as u64 + 1); - test_for_value(u64::MAX); + #[test] + fn test_get_coin_value_unsafe() { + fn test_for_value(v: u64) { + let g = GasCoin::new_for_testing(v).to_object(OBJECT_START_VERSION); + assert_eq!(g.get_coin_value_unsafe(), v); + assert_eq!(GasCoin::try_from(&g).unwrap().value(), v); + } + + test_for_value(0); + test_for_value(1); + test_for_value(8); + test_for_value(9); + test_for_value(u8::MAX as u64); + test_for_value(u8::MAX as u64 + 1); + test_for_value(u16::MAX as u64); + test_for_value(u16::MAX as u64 + 1); + test_for_value(u32::MAX as u64); + test_for_value(u32::MAX as u64 + 1); + test_for_value(u64::MAX); + } + + #[test] + fn test_set_coin_value_unsafe() { + fn test_for_value(v: u64) { + let mut g = GasCoin::new_for_testing(u64::MAX).to_object(OBJECT_START_VERSION); + g.set_coin_value_unsafe(v); + assert_eq!(g.get_coin_value_unsafe(), v); + assert_eq!(GasCoin::try_from(&g).unwrap().value(), v); + assert_eq!(g.version(), OBJECT_START_VERSION); + assert_eq!(g.contents().len(), 40); + } + + test_for_value(0); + test_for_value(1); + test_for_value(8); + test_for_value(9); + test_for_value(u8::MAX as u64); + test_for_value(u8::MAX as u64 + 1); + test_for_value(u16::MAX as u64); + test_for_value(u16::MAX as u64 + 1); + test_for_value(u32::MAX as u64); + test_for_value(u32::MAX as u64 + 1); + test_for_value(u64::MAX); + } } diff --git a/crates/sui-types/src/object/balance_traversal.rs b/crates/sui-types/src/object/balance_traversal.rs index 81b055d62caa5..cbbcc49236d74 100644 --- a/crates/sui-types/src/object/balance_traversal.rs +++ b/crates/sui-types/src/object/balance_traversal.rs @@ -255,7 +255,7 @@ mod tests { layout_( &format!("0x2::coin::Coin<{tag}>"), vec![ - ("id", A::MoveTypeLayout::Struct(UID::layout())), + ("id", A::MoveTypeLayout::Struct(Box::new(UID::layout()))), ("balance", bal_t(tag)), ], ) @@ -285,7 +285,10 @@ mod tests { .map(|(name, layout)| A::MoveFieldLayout::new(Identifier::new(name).unwrap(), layout)) .collect(); - A::MoveTypeLayout::Struct(A::MoveStructLayout { type_, fields }) + A::MoveTypeLayout::Struct(Box::new(A::MoveStructLayout { + type_, + fields: Box::new(fields), + })) } /// BCS encode Move value. diff --git a/crates/sui-types/src/object/bounded_visitor.rs b/crates/sui-types/src/object/bounded_visitor.rs index c4f654a01cc94..54efcbccf7407 100644 --- a/crates/sui-types/src/object/bounded_visitor.rs +++ b/crates/sui-types/src/object/bounded_visitor.rs @@ -205,7 +205,7 @@ impl Visitor for BoundedVisitor { let tag = driver.struct_layout().type_.clone().into(); self.debit_type_size(&tag)?; - for field in &driver.struct_layout().fields { + for field in driver.struct_layout().fields.iter() { self.debit(field.name.len())?; } @@ -469,7 +469,10 @@ mod tests { .map(|(name, layout)| A::MoveFieldLayout::new(Identifier::new(name).unwrap(), layout)) .collect(); - A::MoveTypeLayout::Struct(A::MoveStructLayout { type_, fields }) + A::MoveTypeLayout::Struct(Box::new(A::MoveStructLayout { + type_, + fields: Box::new(fields), + })) } /// BCS encode Move value. diff --git a/crates/sui-types/src/quorum_driver_types.rs b/crates/sui-types/src/quorum_driver_types.rs index 252ede2c5b810..6a0b6e39d1a07 100644 --- a/crates/sui-types/src/quorum_driver_types.rs +++ b/crates/sui-types/src/quorum_driver_types.rs @@ -31,20 +31,19 @@ pub const NON_RECOVERABLE_ERROR_MSG: &str = /// Every invariant needs detailed documents to instruct client handling. #[derive(Eq, PartialEq, Clone, Debug, Serialize, Deserialize, Error, Hash, AsRefStr)] pub enum QuorumDriverError { - #[error("QuorumDriver internal error: {0:?}.")] + #[error("QuorumDriver internal error: {0}.")] QuorumDriverInternalError(SuiError), - #[error("Invalid user signature: {0:?}.")] + #[error("Invalid user signature: {0}.")] InvalidUserSignature(SuiError), #[error( "Failed to sign transaction by a quorum of validators because of locked objects: {:?}, retried a conflicting transaction {:?}, success: {:?}", conflicting_txes, - retried_tx, - retried_tx_success + .retried_tx_status.map(|(tx, success)| tx), + .retried_tx_status.map(|(tx, success)| success), )] ObjectsDoubleUsed { conflicting_txes: BTreeMap, StakeUnit)>, - retried_tx: Option, - retried_tx_success: Option, + retried_tx_status: Option<(TransactionDigest, bool)>, }, #[error("Transaction timed out before reaching finality")] TimeoutBeforeFinality, diff --git a/crates/sui/src/keytool.rs b/crates/sui/src/keytool.rs index a2a02b531574e..88f70f8718f91 100644 --- a/crates/sui/src/keytool.rs +++ b/crates/sui/src/keytool.rs @@ -951,7 +951,7 @@ impl KeyToolCommand { ) .await .unwrap(); - let (_, aud) = parse_and_validate_jwt(&parsed_token).unwrap(); + let (_, aud, _) = parse_and_validate_jwt(&parsed_token).unwrap(); let address_seed = gen_address_seed(user_salt, "sub", sub, &aud).unwrap(); let zk_login_inputs = ZkLoginInputs::from_reader(reader, &address_seed.to_string()).unwrap(); @@ -1114,6 +1114,7 @@ impl KeyToolCommand { "https://api.ambrus.studio/callback", &jwt_randomness, )?; + // This is only for CLI testing. If frontend apps will be built, no need to add anything here. println!("Visit URL (Google): {url}"); println!("Visit URL (Twitch): {url_2}"); println!("Visit URL (Facebook): {url_3}"); diff --git a/crates/sui/src/sui_commands.rs b/crates/sui/src/sui_commands.rs index 569007d9c2d5e..93af3d3623e42 100644 --- a/crates/sui/src/sui_commands.rs +++ b/crates/sui/src/sui_commands.rs @@ -7,7 +7,7 @@ use crate::fire_drill::{run_fire_drill, FireDrill}; use crate::genesis_ceremony::{run, Ceremony}; use crate::keytool::KeyToolCommand; use crate::validator_commands::SuiValidatorCommand; -use anyhow::{anyhow, bail, ensure}; +use anyhow::{anyhow, bail, ensure, Context}; use clap::*; use fastcrypto::traits::KeyPair; use move_analyzer::analyzer; @@ -20,6 +20,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use std::{fs, io}; use sui_bridge::config::BridgeCommitteeConfig; +use sui_bridge::metrics::BridgeMetrics; use sui_bridge::sui_client::SuiBridgeClient; use sui_bridge::sui_transaction_builder::build_committee_register_transaction; use sui_config::node::Genesis; @@ -504,7 +505,8 @@ impl SuiCommand { let rgp = context.get_reference_gas_price().await?; let rpc_url = &context.config.get_active_env()?.rpc; println!("rpc_url: {}", rpc_url); - let sui_bridge_client = SuiBridgeClient::new(rpc_url).await?; + let bridge_metrics = Arc::new(BridgeMetrics::new_for_testing()); + let sui_bridge_client = SuiBridgeClient::new(rpc_url, bridge_metrics).await?; let bridge_arg = sui_bridge_client .get_mutable_bridge_object_arg_must_succeed() .await; @@ -1138,10 +1140,26 @@ async fn prompt_if_no_config( }; if let Some(env) = env { - let keystore_path = wallet_conf_path - .parent() - .unwrap_or(&sui_config_dir()?) - .join(SUI_KEYSTORE_FILENAME); + let keystore_path = match wallet_conf_path.parent() { + // Wallet config was created in the current directory as a relative path. + Some(parent) if parent.as_os_str().is_empty() => { + std::env::current_dir().context("Couldn't find current directory")? + } + + // Wallet config was given a path with some parent (could be relative or absolute). + Some(parent) => parent + .canonicalize() + .context("Could not find sui config directory")?, + + // No parent component and the wallet config was the empty string, use the default + // config. + None if wallet_conf_path.as_os_str().is_empty() => sui_config_dir()?, + + // Wallet config was requested at the root of the file system ...for some reason. + None => wallet_conf_path.to_owned(), + } + .join(SUI_KEYSTORE_FILENAME); + let mut keystore = Keystore::from(FileBasedKeystore::new(&keystore_path)?); let key_scheme = if accept_defaults { SignatureScheme::ED25519 diff --git a/crates/sui/src/validator_commands.rs b/crates/sui/src/validator_commands.rs index 1034aa3b7a317..0e4465c844c92 100644 --- a/crates/sui/src/validator_commands.rs +++ b/crates/sui/src/validator_commands.rs @@ -8,6 +8,7 @@ use std::{ fmt::{self, Debug, Display, Formatter, Write}, fs, path::PathBuf, + sync::Arc, }; use sui_genesis_builder::validator_info::GenesisValidatorInfo; use url::{ParseError, Url}; @@ -35,6 +36,7 @@ use fastcrypto::{ }; use serde::Serialize; use shared_crypto::intent::{Intent, IntentMessage, IntentScope}; +use sui_bridge::metrics::BridgeMetrics; use sui_bridge::sui_client::SuiClient as SuiBridgeClient; use sui_bridge::sui_transaction_builder::{ build_committee_register_transaction, build_committee_update_url_transaction, @@ -543,7 +545,8 @@ impl SuiValidatorCommand { } println!("Starting bridge committee registration for Sui validator: {address}, with bridge public key: {} and url: {}", ecdsa_keypair.public, bridge_authority_url); let sui_rpc_url = &context.config.get_active_env().unwrap().rpc; - let bridge_client = SuiBridgeClient::new(sui_rpc_url).await?; + let bridge_metrics = Arc::new(BridgeMetrics::new_for_testing()); + let bridge_client = SuiBridgeClient::new(sui_rpc_url, bridge_metrics).await?; let bridge = bridge_client .get_mutable_bridge_object_arg_must_succeed() .await; @@ -604,7 +607,8 @@ impl SuiValidatorCommand { print_unsigned_transaction_only, )?; let sui_rpc_url = &context.config.get_active_env().unwrap().rpc; - let bridge_client = SuiBridgeClient::new(sui_rpc_url).await?; + let bridge_metrics = Arc::new(BridgeMetrics::new_for_testing()); + let bridge_client = SuiBridgeClient::new(sui_rpc_url, bridge_metrics).await?; let committee_members = bridge_client .get_bridge_summary() .await diff --git a/crates/sui/src/zklogin_commands_util.rs b/crates/sui/src/zklogin_commands_util.rs index 53b3720824509..01acf6b9f8c53 100644 --- a/crates/sui/src/zklogin_commands_util.rs +++ b/crates/sui/src/zklogin_commands_util.rs @@ -95,7 +95,7 @@ pub async fn perform_zk_login_test_tx( println!("ZkLogin inputs:"); println!("{:?}", serde_json::to_string(&reader).unwrap()); - let (sub, aud) = parse_and_validate_jwt(parsed_token)?; + let (sub, aud, _) = parse_and_validate_jwt(parsed_token)?; let address_seed = gen_address_seed(&user_salt, "sub", &sub, &aud)?; let zk_login_inputs = ZkLoginInputs::from_reader(reader, &address_seed)?; diff --git a/crates/suiop-cli/Cargo.toml b/crates/suiop-cli/Cargo.toml index 65d4763c07d5a..43eb0695c4d46 100644 --- a/crates/suiop-cli/Cargo.toml +++ b/crates/suiop-cli/Cargo.toml @@ -29,6 +29,7 @@ docker-api.workspace = true field_names.workspace = true include_dir.workspace = true inquire.workspace = true +itertools.workspace = true open = "5.1.2" prettytable-rs.workspace = true rand.workspace = true @@ -50,6 +51,8 @@ toml_edit.workspace = true tracing-subscriber.workspace = true tracing.workspace = true once_cell.workspace = true +futures.workspace = true +strsim = "0.11.1" [dev-dependencies] tempfile.workspace = true diff --git a/crates/suiop-cli/src/cli/incidents/incident.rs b/crates/suiop-cli/src/cli/incidents/incident.rs new file mode 100644 index 0000000000000..4d6d30056371f --- /dev/null +++ b/crates/suiop-cli/src/cli/incidents/incident.rs @@ -0,0 +1,152 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use chrono::NaiveDateTime; +use chrono::Utc; +use colored::{ColoredString, Colorize}; +use serde::{Deserialize, Serialize}; + +use super::pd::PagerDutyIncident; +use super::pd::Priority; +use crate::cli::slack::{Channel, User}; + +const DATE_FORMAT_IN: &str = "%Y-%m-%dT%H:%M:%SZ"; +const DATE_FORMAT_OUT: &str = "%m/%d/%Y %H:%M"; +const DATE_FORMAT_OUT_SHORT: &str = "%m/%d/%y"; + +#[derive(Debug, Deserialize, Serialize, Clone, Default)] +pub struct Incident { + pub number: u64, + pub title: String, + pub created_at: Option, + pub resolved_at: Option, + pub html_url: String, + /// The slack users responsible for reporting + #[serde(skip_deserializing)] + pub poc_users: Option>, + pub priority: Option, + pub slack_channel: Option, +} + +impl From for Incident { + fn from(p: PagerDutyIncident) -> Self { + Self { + number: p.number, + title: p.title, + created_at: p.created_at, + resolved_at: p.resolved_at, + html_url: p.html_url, + poc_users: None, + priority: p.priority, + slack_channel: None, + } + } +} + +impl Incident { + pub fn print(&self, long_output: bool) -> Result<()> { + let priority = self.priority(); + if long_output { + println!( + "Incident #: {} {}", + self.number.to_string().bright_purple(), + if priority.is_empty() { + "".to_string() + } else { + format!("({})", priority) + } + ); + println!("Title: {}", self.title.green()); + if let Some(created_at) = self.created_at.clone() { + println!( + "Created at: {}", + NaiveDateTime::parse_from_str(&created_at, DATE_FORMAT_IN)? + .format(DATE_FORMAT_OUT) + .to_string() + .yellow() + ); + } + if let Some(resolved_at) = self.resolved_at.clone() { + println!( + "Resolved at: {}", + NaiveDateTime::parse_from_str(&resolved_at, DATE_FORMAT_IN)? + .format(DATE_FORMAT_OUT) + .to_string() + .yellow() + ); + } + println!("URL: {}", self.html_url.bright_purple()); + if let Some(channel) = self.slack_channel.clone() { + println!("Predicted Slack channel: {}", channel.url().bright_purple()); + } + println!("---"); + } else { + let resolved_at = if let Some(resolved_at) = self.resolved_at.clone() { + let now = Utc::now().naive_utc(); + + Some(now - NaiveDateTime::parse_from_str(&resolved_at, DATE_FORMAT_IN)?) + } else { + None + }; + println!( + "{}:{}{} {} ({})", + self.number.to_string().bright_purple(), + resolved_at + .map(|v| (v.num_days().to_string() + "d").yellow()) + .unwrap_or("".to_string().yellow()), + if priority.is_empty() { + " ".to_string() + } else { + format!(" {} ", priority) + }, + self.title.green(), + if let Some(channel) = self.slack_channel.clone() { + format!("({})", channel.url().bright_magenta()) + } else { + self.html_url.bright_purple().to_string() + } + ); + } + Ok(()) + } + + pub fn priority(&self) -> ColoredString { + // println!("{}", self.priority.as_ref().unwrap_or(&"none".to_string())); + match self.priority.clone().map(|p| p.name).as_deref() { + Some("P0") => "P0".red(), + Some("P1") => "P1".magenta(), + Some("P2") => "P2".truecolor(255, 165, 0), + Some("P3") => "P3".yellow(), + Some("P4") => "P4".white(), + _ => "".white(), + } + } + + pub fn short_fmt(&self) -> String { + format!( + "• {} {} {} {}", + if let Some(channel) = self.slack_channel.clone() { + format!("{} (<#{}>)", self.number, channel.id) + } else { + self.number.to_string() + }, + self.resolved_at + .clone() + .map(|c| NaiveDateTime::parse_from_str(&c, DATE_FORMAT_IN) + .expect("parsing closed date") + .format(DATE_FORMAT_OUT_SHORT) + .to_string()) + .unwrap_or("".to_owned()), + self.title, + self.poc_users.as_ref().map_or_else( + || "".to_string(), + |u| u + .iter() + .map(|u| { format!("<@{}>", u.id) }) + .collect::>() + .join(", ") + ) + ) + } +} diff --git a/crates/suiop-cli/src/cli/incidents/mod.rs b/crates/suiop-cli/src/cli/incidents/mod.rs index 1a583c1ec1ab1..657dcd3d624e0 100644 --- a/crates/suiop-cli/src/cli/incidents/mod.rs +++ b/crates/suiop-cli/src/cli/incidents/mod.rs @@ -1,16 +1,21 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 +mod incident; mod jira; mod pd; -mod slack; +mod selection; +use crate::cli::slack::Slack; use anyhow::Result; use chrono::{Duration, Local}; use clap::Parser; +use incident::Incident; use jira::generate_follow_up_tasks; -use pd::{fetch_incidents, print_recent_incidents, review_recent_incidents}; +use pd::print_recent_incidents; +use selection::review_recent_incidents; use std::path::PathBuf; +use tracing::debug; #[derive(Parser, Debug, Clone)] pub struct IncidentsArgs { @@ -47,6 +52,28 @@ pub enum IncidentsAction { }, } +/// - Fetch incidents from the PagerDuty API. +/// - Associate slack channels when they exist. +/// - Return the combined incident list. +async fn get_incidents(limit: &usize, days: &usize) -> Result> { + let current_time = Local::now(); + let start_time = current_time - Duration::days(*days as i64); + let slack = Slack::new().await; + Ok(pd::fetch_incidents(*limit, start_time, current_time) + .await? + .into_iter() + // Change into more robust Incident type + .map(incident::Incident::from) + .map(|mut incident| { + // Add associated slack channel if it exists + debug!("Checking if incidents list contains {}", incident.number); + incident.slack_channel = selection::get_channel_for(&incident, &slack).cloned(); + debug!("Found channel: {:?}", incident.slack_channel); + incident + }) + .collect()) +} + pub async fn incidents_cmd(args: &IncidentsArgs) -> Result<()> { match &args.action { IncidentsAction::GetRecentIncidents { @@ -56,10 +83,7 @@ pub async fn incidents_cmd(args: &IncidentsArgs) -> Result<()> { with_priority, interactive, } => { - let current_time = Local::now(); - let start_time = current_time - Duration::days(*days as i64); - - let incidents = fetch_incidents(*limit, start_time, current_time).await?; + let incidents = get_incidents(limit, days).await?; if *interactive { review_recent_incidents(incidents).await? } else { diff --git a/crates/suiop-cli/src/cli/incidents/pd.rs b/crates/suiop-cli/src/cli/incidents/pd.rs deleted file mode 100644 index 2434f775e9a70..0000000000000 --- a/crates/suiop-cli/src/cli/incidents/pd.rs +++ /dev/null @@ -1,323 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -use anyhow::Result; -use chrono::Utc; -use chrono::{DateTime, Local, NaiveDateTime}; -use colored::{ColoredString, Colorize}; -use inquire::Confirm; -use reqwest; -use reqwest::header::HeaderMap; -use reqwest::header::ACCEPT; -use reqwest::header::AUTHORIZATION; -use serde::{Deserialize, Serialize}; -use serde_json::Value as JsonValue; -use std::env; -use tracing::debug; - -use crate::cli::incidents::slack::Channel; -use crate::cli::lib::utils::day_of_week; -use crate::DEBUG_MODE; - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct Priority { - name: String, - id: String, - color: String, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct Incident { - #[serde(rename = "incident_number")] - number: u64, - title: String, - created_at: Option, - resolved_at: Option, - html_url: String, - priority: Option, - slack_channel: Option, -} -const DATE_FORMAT_IN: &str = "%Y-%m-%dT%H:%M:%SZ"; -const DATE_FORMAT_OUT: &str = "%m/%d/%Y %H:%M"; -const DATE_FORMAT_OUT_SHORT: &str = "%m/%d/%y"; - -impl Incident { - fn print(&self, long_output: bool) -> Result<()> { - let priority = self.priority(); - if long_output { - println!( - "Incident #: {} {}", - self.number.to_string().bright_purple(), - if priority.is_empty() { - "".to_string() - } else { - format!("({})", priority) - } - ); - println!("Title: {}", self.title.green()); - if let Some(created_at) = self.created_at.clone() { - println!( - "Created at: {}", - NaiveDateTime::parse_from_str(&created_at, DATE_FORMAT_IN)? - .format(DATE_FORMAT_OUT) - .to_string() - .yellow() - ); - } - if let Some(resolved_at) = self.resolved_at.clone() { - println!( - "Resolved at: {}", - NaiveDateTime::parse_from_str(&resolved_at, DATE_FORMAT_IN)? - .format(DATE_FORMAT_OUT) - .to_string() - .yellow() - ); - } - println!("URL: {}", self.html_url.bright_purple()); - if let Some(channel) = self.slack_channel.clone() { - println!("Predicted Slack channel: {}", channel.url().bright_purple()); - } - println!("---"); - } else { - let resolved_at = if let Some(resolved_at) = self.resolved_at.clone() { - let now = Utc::now().naive_utc(); - - Some(now - NaiveDateTime::parse_from_str(&resolved_at, DATE_FORMAT_IN)?) - } else { - None - }; - println!( - "{}:{}{} {} ({})", - self.number.to_string().bright_purple(), - resolved_at - .map(|v| (v.num_days().to_string() + "d").yellow()) - .unwrap_or("".to_string().yellow()), - if priority.is_empty() { - " ".to_string() - } else { - format!(" {} ", priority) - }, - self.title.green(), - if let Some(channel) = self.slack_channel.clone() { - format!("({})", channel.url().bright_magenta()) - } else { - self.html_url.bright_purple().to_string() - } - ); - } - Ok(()) - } - - fn priority(&self) -> ColoredString { - // println!("{}", self.priority.as_ref().unwrap_or(&"none".to_string())); - match self.priority.clone().map(|p| p.name).as_deref() { - Some("P0") => "P0".red(), - Some("P1") => "P1".magenta(), - Some("P2") => "P2".truecolor(255, 165, 0), - Some("P3") => "P3".yellow(), - Some("P4") => "P4".white(), - _ => "".white(), - } - } -} - -/// Fetch incidents from the API using the given parameters until {limit} incidents have been received. -pub async fn fetch_incidents( - limit: usize, - start_time: DateTime, - _end_time: DateTime, -) -> Result> { - let slack = super::slack::Slack::new().await; - let url = "https://api.pagerduty.com/incidents"; - - let mut headers = HeaderMap::new(); - headers.insert( - AUTHORIZATION, - format!( - "Token token={}", - env::var("PD_API_KEY").expect("please set the PD_API_KEY env var") - ) - .parse() - .expect("header parsing"), - ); - headers.insert( - ACCEPT, - "application/vnd.pagerduty+json;version=2" - .parse() - .expect("header parsing"), - ); - - let mut more_records = true; - let mut all_incidents = vec![]; - let mut offset = 0; - while more_records { - let params = [ - ("offset", offset.to_string()), - ("limit", limit.to_string()), - ("sort_by", "resolved_at:desc".to_owned()), - ("date_range", "all".to_owned()), - ("statuses[]", "resolved".to_owned()), - ]; - let client = reqwest::Client::new(); - let response = client - .get(url) - .headers(headers.clone()) - .query(¶ms) - .send() - .await?; - let response = &response.json::().await?; - let incidents_received: Vec = - serde_json::from_value(response["incidents"].clone())?; - let count_received = incidents_received.len(); - - offset += count_received; - more_records = response["more"].as_bool().expect("'more' was not a bool"); - - let truncated_incidents_received: Vec<_> = incidents_received - .clone() - .into_iter() - .take_while(|i| { - let latest_resolved_at: DateTime = - serde_json::from_value(i["resolved_at"].clone()).unwrap(); - latest_resolved_at > start_time - }) - .collect(); - let num_truncated_incidents = truncated_incidents_received.len(); - all_incidents.extend(truncated_incidents_received); - if all_incidents.len() >= limit { - // don't need any more incidents. - all_incidents.truncate(limit); - break; - } - if num_truncated_incidents < incidents_received.len() { - // we already got all incidents that were resolved in the given time - break; - } - } - Ok(all_incidents - .into_iter() - .map(serde_json::from_value) - .filter_map(|i| i.ok()) - .map(|mut i: Incident| { - debug!("Checking if incidents list contains {}", i.number); - i.slack_channel = slack - .channels - .iter() - .find(|c| c.name.contains(&i.number.to_string())) - .cloned(); - debug!("Found channel: {:?}", i.slack_channel); - i - }) - .collect()) -} - -pub async fn print_recent_incidents( - incidents: Vec, - long_output: bool, - with_priority: bool, -) -> Result<()> { - for incident in &incidents { - if with_priority && incident.priority() == " ".white() { - // skip incidents without priority - continue; - } - incident.print(long_output)?; - } - Ok(()) -} - -pub async fn review_recent_incidents(incidents: Vec) -> Result<()> { - let mut to_review = vec![]; - let mut excluded = vec![]; - for incident in incidents { - incident.print(false)?; - let ans = Confirm::new("Keep this incident for review?") - .with_default(false) - .prompt() - .expect("Unexpected response"); - if ans { - to_review.push(incident); - } else { - excluded.push(incident); - } - } - println!( - "Incidents marked for review: {}", - to_review - .iter() - .map(|i| i.number.to_string()) - .collect::>() - .join(", ") - ); - - fn short_print(i: &Incident) -> String { - format!( - "• {} {} {} {}", - if let Some(channel) = i.slack_channel.clone() { - format!("{} ({})", i.number, channel.url()) - } else { - i.number.to_string() - }, - i.resolved_at - .clone() - .map(|c| NaiveDateTime::parse_from_str(&c, DATE_FORMAT_IN) - .expect("parsing closed date") - .format(DATE_FORMAT_OUT_SHORT) - .to_string()) - .unwrap_or("".to_owned()), - i.title, - i.slack_channel - .clone() - .map(|c| c.name) - .unwrap_or("".to_string()), - ) - } - - let message = format!( - " -Hello everyone and happy {}! - -We have selected the following incidents for review: -{} - -and the following incidents have been excluded from review: -{} - -Please comment in the thread to request an adjustment to the list.", - day_of_week(), - to_review - .iter() - .map(short_print) - .collect::>() - .join("\n"), - excluded - .iter() - .map(short_print) - .collect::>() - .join("\n") - ); - println!( - "Here is the message to send in the channel: - {} - ", - message - ); - let slack_channel = if *DEBUG_MODE { - "test-notifications" - } else { - "incident-postmortems" - }; - let ans = Confirm::new(&format!( - "Send this message to the #{} channel?", - slack_channel - )) - .with_default(false) - .prompt() - .expect("Unexpected response"); - if ans { - let slack = super::slack::Slack::new().await; - slack.send_message(slack_channel, &message).await?; - } - // post to https://slack.com/api/chat.postMessage with message - Ok(()) -} diff --git a/crates/suiop-cli/src/cli/incidents/pd/mod.rs b/crates/suiop-cli/src/cli/incidents/pd/mod.rs new file mode 100644 index 0000000000000..ff96fcd4ece71 --- /dev/null +++ b/crates/suiop-cli/src/cli/incidents/pd/mod.rs @@ -0,0 +1,136 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use chrono::{DateTime, Local}; +use colored::Colorize; +use reqwest; +use reqwest::header::HeaderMap; +use reqwest::header::ACCEPT; +use reqwest::header::AUTHORIZATION; +use serde::{Deserialize, Serialize}; +use serde_json::Value as JsonValue; +use std::env; + +use super::incident::Incident; + +#[derive(Debug, Deserialize, Serialize, Clone, Default)] +pub struct Priority { + pub name: String, + id: String, + color: String, +} + +impl Priority { + pub fn u8(&self) -> u8 { + self.name + .trim_start_matches("P") + .parse() + .expect("Parsing priority") + } +} + +#[derive(Debug, Deserialize, Serialize, Clone, Default)] +pub(crate) struct PagerDutyIncident { + #[serde(rename = "incident_number")] + pub number: u64, + pub title: String, + pub created_at: Option, + pub resolved_at: Option, + pub html_url: String, + pub priority: Option, +} + +/// Fetch incidents from the API using the given parameters until {limit} incidents have been received. +pub async fn fetch_incidents( + limit: usize, + start_time: DateTime, + _end_time: DateTime, +) -> Result> { + let url = "https://api.pagerduty.com/incidents"; + + let mut headers = HeaderMap::new(); + headers.insert( + AUTHORIZATION, + format!( + "Token token={}", + env::var("PD_API_KEY").expect("please set the PD_API_KEY env var") + ) + .parse() + .expect("header parsing"), + ); + headers.insert( + ACCEPT, + "application/vnd.pagerduty+json;version=2" + .parse() + .expect("header parsing"), + ); + + let mut more_records = true; + let mut all_incidents = vec![]; + let mut offset = 0; + while more_records { + let params = [ + ("offset", offset.to_string()), + ("limit", limit.to_string()), + ("sort_by", "resolved_at:desc".to_owned()), + ("date_range", "all".to_owned()), + ("statuses[]", "resolved".to_owned()), + ]; + let client = reqwest::Client::new(); + let response = client + .get(url) + .headers(headers.clone()) + .query(¶ms) + .send() + .await?; + let response = &response.json::().await?; + let incidents_received: Vec = + serde_json::from_value(response["incidents"].clone())?; + let count_received = incidents_received.len(); + + offset += count_received; + more_records = response["more"].as_bool().expect("'more' was not a bool"); + + let truncated_incidents_received: Vec<_> = incidents_received + .clone() + .into_iter() + .take_while(|i| { + let latest_resolved_at: DateTime = + serde_json::from_value(i["resolved_at"].clone()).unwrap(); + latest_resolved_at > start_time + }) + .collect(); + let num_truncated_incidents = truncated_incidents_received.len(); + all_incidents.extend(truncated_incidents_received); + if all_incidents.len() >= limit { + // don't need any more incidents. + all_incidents.truncate(limit); + break; + } + if num_truncated_incidents < incidents_received.len() { + // we already got all incidents that were resolved in the given time + break; + } + } + Ok(all_incidents + .into_iter() + .map(serde_json::from_value) + .filter_map(|i| i.ok()) + .collect()) +} + +pub async fn print_recent_incidents( + incidents: Vec, + long_output: bool, + with_priority: bool, +) -> Result<()> { + for incident in &incidents { + if with_priority && incident.priority() == " ".white() { + // skip incidents without priority + continue; + } + incident.print(long_output)?; + } + Ok(()) +} diff --git a/crates/suiop-cli/src/cli/incidents/selection.rs b/crates/suiop-cli/src/cli/incidents/selection.rs new file mode 100644 index 0000000000000..ebeca0f532504 --- /dev/null +++ b/crates/suiop-cli/src/cli/incidents/selection.rs @@ -0,0 +1,300 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use inquire::{Confirm, MultiSelect}; +use std::collections::HashMap; +use strsim::normalized_damerau_levenshtein; +use tracing::debug; + +use crate::cli::lib::utils::day_of_week; +use crate::cli::slack::{Channel, Slack, User}; +use crate::DEBUG_MODE; + +use super::incident::Incident; + +fn request_pocs(slack: &Slack) -> Result> { + MultiSelect::new( + "Please select the users who are POCs for this incident", + slack.users.clone(), + ) + .with_default(&[]) + .prompt() + .map_err(|e| anyhow::anyhow!(e)) +} + +/// Filter incidents based on whether they have <= min_priority priority or any slack +/// channel associated. +fn filter_incidents_for_review(incidents: Vec, min_priority: &str) -> Vec { + let min_priority_u = min_priority + .trim_start_matches("P") + .parse::() + .expect("Parsing priority"); + println!("min_priority_u: {}", min_priority_u); + incidents + .into_iter() + // filter on priority <= min_priority and any slack channel association + .filter(|i| { + i.priority + .clone() + .filter(|p| !p.name.is_empty() && p.u8() <= min_priority_u) + .is_some() + || i.slack_channel.is_some() + }) + .collect() +} + +pub async fn review_recent_incidents(incidents: Vec) -> Result<()> { + let slack = Slack::new().await; + let filtered_incidents = filter_incidents_for_review(incidents, "P2"); + let mut group_map = group_by_similar_title(filtered_incidents, 0.9); + let mut to_review = vec![]; + let mut excluded = vec![]; + for (title, incident_group) in group_map.iter_mut() { + let treat_as_one = if incident_group.len() > 1 { + println!( + "There are {} incidents with a title similar to this: {}", + &incident_group.len(), + title + ); + println!("All incidents with a similar title:"); + for i in incident_group.iter() { + i.print(false)?; + } + Confirm::new("Treat them as one?") + .with_default(true) + .prompt() + .expect("Unexpected response") + } else { + false + }; + if treat_as_one { + let ans = Confirm::new("Keep these incidents for review?") + .with_default(false) + .prompt() + .expect("Unexpected response"); + if ans { + let poc_users = request_pocs(&slack)?; + incident_group + .iter_mut() + .for_each(|i| i.poc_users = Some(poc_users.clone())); + to_review.extend(incident_group.clone()); + } else { + excluded.extend(incident_group.clone()); + } + } else { + for incident in incident_group.iter_mut() { + incident.print(false)?; + let ans = Confirm::new("Keep this incident for review?") + .with_default(false) + .prompt() + .expect("Unexpected response"); + if ans { + let poc_users = request_pocs(&slack)?; + incident.poc_users = Some(poc_users.clone()); + to_review.push(incident.clone()); + } else { + excluded.push(incident.clone()); + } + } + } + } + println!( + "Incidents marked for review: {}", + to_review + .iter() + .map(|i| i.number.to_string()) + .collect::>() + .join(", ") + ); + + let message = format!( + " +Hello everyone and happy {}! + +We have selected the following incidents for review: +{} + +and the following incidents have been excluded from review: +{} + +Please comment in the thread to request an adjustment to the list.", + day_of_week(), + to_review + .iter() + .map(Incident::short_fmt) + .collect::>() + .join("\n"), + excluded + .iter() + .map(Incident::short_fmt) + .collect::>() + .join("\n") + ); + println!( + "Here is the message to send in the channel: + {} + ", + message + ); + let slack_channel = if *DEBUG_MODE { + "test-notifications" + } else { + "incident-postmortems" + }; + let ans = Confirm::new(&format!( + "Send this message to the #{} channel?", + slack_channel + )) + .with_default(false) + .prompt() + .expect("Unexpected response"); + if ans { + slack.send_message(slack_channel, &message).await?; + } + // post to https://slack.com/api/chat.postMessage with message + Ok(()) +} + +fn group_by_similar_title( + incidents: Vec, + threshold: f64, +) -> HashMap> { + if !(0.0..=1.0).contains(&threshold) { + panic!("Threshold must be between 0.0 and 1.0"); + } + + let mut groups: HashMap> = HashMap::new(); + + for incident in incidents { + // Try to find an existing title that is similar enough + let mut found = false; + for (existing_title, group) in groups.iter_mut() { + if normalized_damerau_levenshtein( + &incident.title.chars().take(20).collect::(), + &existing_title.chars().take(20).collect::(), + ) >= threshold + { + // If similar, add it to this group + group.push(incident.clone()); + found = true; + break; + } + } + + // If no similar title found, add a new group + if !found { + groups + .entry(incident.title.clone()) + .or_default() + .push(incident); + } + } + + debug!( + "map: {:#?}", + groups.iter().map(|(k, v)| (k, v.len())).collect::>() + ); + groups +} + +pub fn get_channel_for<'a>(incident: &Incident, slack: &'a Slack) -> Option<&'a Channel> { + slack + .channels + .iter() + .find(|c| c.name.contains(&incident.number.to_string())) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_group_by_similar_title() { + let incidents = vec![ + Incident { + title: "Incident 1".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 2".to_string(), + ..Default::default() + }, + Incident { + title: "Another thing entirely".to_string(), + ..Default::default() + }, + Incident { + title: "Another thing entirely 2".to_string(), + ..Default::default() + }, + Incident { + title: "A third thing that doesn't look the same".to_string(), + ..Default::default() + }, + ]; + + let groups = group_by_similar_title(incidents, 0.8); + println!("{:#?}", groups); + + assert_eq!(groups.len(), 3); + assert_eq!(groups.get("Incident 1").unwrap().len(), 2); + assert!(groups.get("Incident 2").is_none()); + assert_eq!(groups.get("Another thing entirely").unwrap().len(), 2); + assert_eq!( + groups + .get("A third thing that doesn't look the same") + .unwrap() + .len(), + 1 + ); + } + + #[test] + fn test_group_by_similar_title_with_similar_titles() { + let incidents = vec![ + Incident { + title: "Incident 1".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 1".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 2".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 2".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 3".to_string(), + ..Default::default() + }, + ]; + + let groups = group_by_similar_title(incidents, 0.8); + + assert_eq!(groups.len(), 1); + assert_eq!(groups.get("Incident 1").unwrap().len(), 5); + } + + #[test] + #[should_panic(expected = "Threshold must be between 0.0 and 1.0")] + fn test_group_by_similar_title_with_invalid_threshold() { + let incidents = vec![ + Incident { + title: "Incident 1".to_string(), + ..Default::default() + }, + Incident { + title: "Incident 2".to_string(), + ..Default::default() + }, + ]; + + group_by_similar_title(incidents, -0.5); + } +} diff --git a/crates/suiop-cli/src/cli/incidents/slack.rs b/crates/suiop-cli/src/cli/incidents/slack.rs deleted file mode 100644 index e58cfb7df4896..0000000000000 --- a/crates/suiop-cli/src/cli/incidents/slack.rs +++ /dev/null @@ -1,164 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -use anyhow::anyhow; -use anyhow::Context; -use anyhow::Result; -use once_cell::sync::Lazy; -use reqwest::{header, Client}; -use serde::Deserialize; -use serde::Serialize; -use std::fs::File; -use std::path::PathBuf; -use std::time::{SystemTime, UNIX_EPOCH}; -use tracing::debug; - -const CHANNELS_URL: &str = "https://slack.com/api/conversations.list"; -static CHANNELS_FILEPATH: Lazy = Lazy::new(|| { - dirs::home_dir() - .expect("HOME env var not set") - .join(".suiop") - .join("channels") -}); - -pub struct Slack { - client: Client, - pub channels: Vec, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -pub struct Channel { - pub id: String, - pub name: String, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -struct ResponseMetadata { - next_cursor: Option, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -struct ConversationsResponse { - ok: bool, - error: Option, - channels: Option>, - response_metadata: ResponseMetadata, -} - -#[derive(Debug, Deserialize, Serialize, Clone)] -struct SendMessageBody { - channel: String, - text: String, - ts: String, - mrkdwn: bool, -} - -impl Slack { - pub async fn new() -> Self { - let token = std::env::var("SLACK_BOT_TOKEN").expect( - "Please set SLACK_BOT_TOKEN env var ('slack bot token (incidentbot)' in 1password)", - ); - let mut headers = header::HeaderMap::new(); - headers.insert( - header::AUTHORIZATION, - header::HeaderValue::from_str(format!("Bearer {}", token).as_str()) - .expect("failed to add Bearer token for slack client"), - ); - let client = reqwest::ClientBuilder::new() - .default_headers(headers) - .build() - .expect("failed to build reqwest client"); - let mut s = Self { - client, - channels: vec![], - }; - s.get_channels().await.expect("Failed to get channels"); - s - } - - pub async fn serialize_channels(&self) -> Result<()> { - let file = File::create(CHANNELS_FILEPATH.as_path())?; - serde_json::to_writer(file, &self.channels)?; - Ok(()) - } - - pub async fn get_channels(&mut self) -> Result> { - let mut channels: Vec = vec![]; - let file_path = CHANNELS_FILEPATH.as_path(); - if let Ok(metadata) = file_path.metadata() { - if let Ok(modified) = metadata.modified() { - if let Ok(elapsed) = modified.elapsed() { - // 1 day - if elapsed.as_secs() < 24 * 60 * 60 { - if let Ok(file) = File::open(file_path) { - if let Ok(channels) = serde_json::from_reader::<_, Vec>(file) { - debug!("Using cached channels"); - self.channels = channels; - return Ok(self.channels.iter().map(|c| c.name.clone()).collect()); - } - } - } - } - } - } - - let mut result: ConversationsResponse = self - .client - .get(CHANNELS_URL) - .send() - .await - .map_err(|e| anyhow!(e))? - .json() - .await?; - let new_channels = result.channels.expect("Expected channels to exist").clone(); - channels.extend(new_channels.into_iter()); - while let Some(cursor) = result.response_metadata.next_cursor { - if cursor.is_empty() { - break; - } - result = self - .client - .get(CHANNELS_URL) - .query(&[("cursor", cursor)]) - .send() - .await - .map_err(|e| anyhow!(e))? - .json() - .await - .context("parsing json from channels api")?; - let extra_channels = result.channels.expect("Expected channels to exist").clone(); - channels.extend(extra_channels.into_iter()); - } - self.channels = channels.iter().map(|c| (*c).clone()).collect(); - self.serialize_channels().await?; - let names = self.channels.iter().map(|c| c.name.clone()).collect(); - Ok(names) - } - - pub async fn send_message(&self, channel: &str, message: &str) -> Result<()> { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("Time went backwards") - .as_millis(); - let message_body = SendMessageBody { - channel: channel.to_owned(), - text: message.to_owned(), - ts: timestamp.to_string(), - mrkdwn: true, - }; - let url = "https://slack.com/api/chat.postMessage"; - let response = self.client.post(url).json(&message_body).send().await?; - let response = response.json::().await?; - if response["ok"].as_bool().expect("ok was not a bool") { - Ok(()) - } else { - Err(anyhow!("Failed to send message: {}", response)) - } - } -} - -impl Channel { - pub fn url(self) -> String { - format!("https://mysten-labs.slack.com/archives/{}", self.id) - } -} diff --git a/crates/suiop-cli/src/cli/mod.rs b/crates/suiop-cli/src/cli/mod.rs index c3df0a537c679..fceb7ded495be 100644 --- a/crates/suiop-cli/src/cli/mod.rs +++ b/crates/suiop-cli/src/cli/mod.rs @@ -8,6 +8,7 @@ mod incidents; pub mod lib; pub mod pulumi; pub mod service; +mod slack; pub use ci::{ci_cmd, CIArgs}; pub use docker::{docker_cmd, DockerArgs}; diff --git a/crates/suiop-cli/src/cli/slack/mod.rs b/crates/suiop-cli/src/cli/slack/mod.rs new file mode 100644 index 0000000000000..bf7907a07e393 --- /dev/null +++ b/crates/suiop-cli/src/cli/slack/mod.rs @@ -0,0 +1,121 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +mod slack_api; + +use anyhow::Result; +use futures::future::Either; +use reqwest::{header, Client}; +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::fs::File; +use std::path::PathBuf; +use tracing::debug; + +/// Reexport for convenience +pub use slack_api::*; + +#[derive(Debug, Default)] +pub struct Slack { + client: Client, + pub channels: Vec, + pub users: Vec, +} + +fn get_serialize_filepath(subname: &str) -> PathBuf { + dirs::home_dir() + .expect("HOME env var not set") + .join(".suiop") + .join(subname) +} + +/// Serialize the obj into ~/.suiop/{subname} so we can cache it across +/// executions +pub fn serialize_to_file(subname: &str, obj: &Vec) -> Result<()> { + let file = File::create(get_serialize_filepath(subname).as_path())?; + serde_json::to_writer(file, obj)?; + Ok(()) +} + +/// Check if the file in ~/.suiop/{subname} is less than 1 day old +/// and if so, deserialize the value from it. +/// +/// Otherwise return None +pub fn deserialize_from_file(subname: &str) -> Option> { + let mut result = None; + let file_path = get_serialize_filepath(subname); + if let Ok(metadata) = file_path.metadata() { + if let Ok(modified) = metadata.modified() { + if let Ok(elapsed) = modified.elapsed() { + // 1 day + if elapsed.as_secs() < 24 * 60 * 60 { + if let Ok(file) = File::open(file_path) { + if let Ok(obj) = serde_json::from_reader::<_, Vec>(file) { + debug!("Using cached {}", subname); + result = Some(obj); + } + } + } + } + } + } + result +} + +impl Slack { + pub async fn new() -> Self { + let token = std::env::var("SLACK_BOT_TOKEN").expect( + "Please set SLACK_BOT_TOKEN env var ('slack bot token (incidentbot)' in 1password)", + ); + let mut headers = header::HeaderMap::new(); + headers.insert( + header::AUTHORIZATION, + header::HeaderValue::from_str(format!("Bearer {}", token).as_str()) + .expect("failed to add Bearer token for slack client"), + ); + let client = reqwest::ClientBuilder::new() + .default_headers(headers) + .build() + .expect("failed to build reqwest client"); + let channels = deserialize_from_file("channels") + .map_or_else( + || { + Either::Left(async { + let channels = get_channels(&client).await.expect("Failed to get channels"); + serialize_to_file("channels", &channels) + .expect("Failed to serialize channels"); + channels + }) + }, + |v| Either::Right(async { v }), + ) + .await; + let users = deserialize_from_file("users") + .map_or_else( + || { + Either::Left(async { + let users = get_users(&client).await.expect("Failed to get users"); + serialize_to_file("users", &users).expect("Failed to serialize users"); + users + }) + }, + |u| Either::Right(async { u }), + ) + .await; + Self { + client, + channels, + users, + } + } + + pub async fn send_message(self, channel: &str, message: &str) -> Result<()> { + slack_api::send_message(&self.client, channel, message).await + } +} + +impl Channel { + pub fn url(self) -> String { + format!("https://mysten-labs.slack.com/archives/{}", self.id) + } +} diff --git a/crates/suiop-cli/src/cli/slack/slack_api.rs b/crates/suiop-cli/src/cli/slack/slack_api.rs new file mode 100644 index 0000000000000..633c5f652987d --- /dev/null +++ b/crates/suiop-cli/src/cli/slack/slack_api.rs @@ -0,0 +1,128 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::anyhow; +use anyhow::Context; +use anyhow::Result; +use reqwest::Client; +use serde::Deserialize; +use serde::Serialize; +use std::fmt::Display; +use std::fmt::Formatter; +use std::time::{SystemTime, UNIX_EPOCH}; + +const CHANNELS_URL: &str = "https://slack.com/api/conversations.list"; + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct UsersResponse { + ok: bool, + members: Option>, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct User { + pub id: String, + pub name: String, +} + +impl Display for User { + fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { + write!(f, "{}", self.name) + } +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct Channel { + pub id: String, + pub name: String, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +struct ResponseMetadata { + next_cursor: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +struct ConversationsResponse { + ok: bool, + error: Option, + channels: Option>, + response_metadata: ResponseMetadata, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +struct SendMessageBody { + channel: String, + text: String, + ts: String, + mrkdwn: bool, +} + +pub async fn get_channels(client: &Client) -> Result> { + let mut channels: Vec = vec![]; + + let mut result: ConversationsResponse = client + .get(CHANNELS_URL) + .send() + .await + .map_err(|e| anyhow!(e))? + .json() + .await?; + let new_channels = result.channels.expect("Expected channels to exist").clone(); + channels.extend(new_channels.into_iter()); + while let Some(cursor) = result.response_metadata.next_cursor { + if cursor.is_empty() { + break; + } + result = client + .get(CHANNELS_URL) + .query(&[("cursor", cursor)]) + .send() + .await + .map_err(|e| anyhow!(e))? + .json() + .await + .context("parsing json from channels api")?; + let extra_channels = result.channels.expect("Expected channels to exist").clone(); + channels.extend(extra_channels.into_iter()); + } + channels = channels.iter().map(|c| (*c).clone()).collect(); + Ok(channels) +} + +pub async fn get_users(client: &Client) -> Result> { + let url = "https://slack.com/api/users.list"; + let response = client + .get(url) + .send() + .await + .map_err(|e| anyhow!(e))? + .json::() + .await?; + + if !response.ok { + panic!("Failed to get users"); + } + response.members.ok_or(anyhow::anyhow!("missing members")) +} + +pub async fn send_message(client: &Client, channel: &str, message: &str) -> Result<()> { + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("Time went backwards") + .as_millis(); + let message_body = SendMessageBody { + channel: channel.to_owned(), + text: message.to_owned(), + ts: timestamp.to_string(), + mrkdwn: true, + }; + let url = "https://slack.com/api/chat.postMessage"; + let response = client.post(url).json(&message_body).send().await?; + let response = response.json::().await?; + if response["ok"].as_bool().expect("ok was not a bool") { + Ok(()) + } else { + Err(anyhow!("Failed to send message: {}", response)) + } +} diff --git a/crates/x/src/lint.rs b/crates/x/src/lint.rs index 0e510044e550e..cd9bcfb9b4e38 100644 --- a/crates/x/src/lint.rs +++ b/crates/x/src/lint.rs @@ -62,6 +62,13 @@ pub fn run(args: Args) -> crate::Result<()> { type_: BannedDepType::Always, }, ), + ( + "pq-sys".to_owned(), + BannedDepConfig { + message: "diesel_async asynchronous database connections instead".to_owned(), + type_: BannedDepType::Always, + }, + ), ] .into_iter() .collect(), diff --git a/docker/sui-tools/Dockerfile b/docker/sui-tools/Dockerfile index 4f800e03b01e6..f6470d12a4783 100644 --- a/docker/sui-tools/Dockerfile +++ b/docker/sui-tools/Dockerfile @@ -21,6 +21,7 @@ RUN cargo build --profile ${PROFILE} \ --bin stress \ --bin sui-bridge \ --bin bridge-indexer \ + --bin deepbook-indexer \ --bin sui-bridge-cli \ --bin sui-analytics-indexer \ --bin sui-proxy \ @@ -40,6 +41,7 @@ COPY --from=builder /sui/target/release/sui-node /usr/local/bin COPY --from=builder /sui/target/release/stress /usr/local/bin COPY --from=builder /sui/target/release/sui-bridge /usr/local/bin COPY --from=builder /sui/target/release/bridge-indexer /usr/local/bin +COPY --from=builder /sui/target/release/deepbook-indexer /usr/local/bin COPY --from=builder /sui/target/release/sui-bridge-cli /usr/local/bin COPY --from=builder /sui/target/release/sui-analytics-indexer /usr/local/bin COPY --from=builder /sui/target/release/sui-proxy /usr/local/bin diff --git a/docs/content/guides/developer/cryptography/groth16.mdx b/docs/content/guides/developer/cryptography/groth16.mdx index e9b89ac4a9d53..5ffa5263dca38 100644 --- a/docs/content/guides/developer/cryptography/groth16.mdx +++ b/docs/content/guides/developer/cryptography/groth16.mdx @@ -18,23 +18,21 @@ The following example demonstrates how to create a Groth16 proof from a statemen ### Create circuit -The proof demonstrates that we know a secret input to a hash function which gives a certain public output. +In this example, we crate a proof which demonstrates that we know a factorisation `a * b = c` of a publicly known number `c` without revealing `a` and `b`. + ```circom pragma circom 2.1.5; -include "node_modules/circomlib/circuits/poseidon.circom"; - template Main() { - component poseidon = Poseidon(1); - signal input in; - signal output digest; - poseidon.inputs[0] <== in; - digest <== poseidon.out; -} + signal input a; + signal input b; + signal output c; + c <== a * b; +} component main = Main(); ``` -We use the [Poseidon hash function](https://www.poseidon-hash.info) which is a ZK-friendly hash function. Assuming that the [circom compiler has been installed](https://docs.circom.io/getting-started/installation/), the above circuit is compiled using the following command: +Assuming that the [circom compiler has been installed](https://docs.circom.io/getting-started/installation/), the above circuit is compiled using the following command: ```shell circom main.circom --r1cs --wasm ``` @@ -48,41 +46,66 @@ To generate a proof verifiable in Sui, you need to generate a witness. This exam use ark_bn254::Bn254; use ark_circom::CircomBuilder; use ark_circom::CircomConfig; -use ark_groth16::Groth16; +use ark_groth16::{Groth16, prepare_verifying_key}; +use ark_serialize::CanonicalSerialize; use ark_snark::SNARK; +use rand::rngs::StdRng; +use rand::SeedableRng; fn main() { // Load the WASM and R1CS for witness and proof generation - let cfg = CircomConfig::::new("main.wasm", "main.r1cs").unwrap(); - - // Insert our secret inputs as key value pairs. We insert a single input, namely the input to the hash function. + let cfg = CircomConfig::::new("../circuit/main_js/main.wasm", "../circuit/main.r1cs").unwrap(); let mut builder = CircomBuilder::new(cfg); - builder.push_input("in", 7); - // Create an empty instance for setting it up - let circom = builder.setup(); + // Private inputs: A factorisation of a number + builder.push_input("a", 641); + builder.push_input("b", 6_700_417); - // WARNING: The code below is just for debugging, and should instead use a verification key generated from a trusted setup. - // See for example https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau. - let mut rng = rand::thread_rng(); - let params = - Groth16::::generate_random_parameters_with_reduction(circom, &mut rng).unwrap(); + let circuit = builder.setup(); - let circom = builder.build().unwrap(); + // Generate a random proving key. WARNING: This is not secure. A proving key generated from a ceremony should be used in production. + let mut rng: StdRng = SeedableRng::from_seed([0; 32]); + let pk = + Groth16::::generate_random_parameters_with_reduction(circuit, &mut rng).unwrap(); - // There's only one public input, namely the hash digest. - let inputs = circom.get_public_inputs().unwrap(); + let circuit = builder.build().unwrap(); + let public_inputs = circuit.get_public_inputs().unwrap(); - // Generate the proof - let proof = Groth16::::prove(¶ms, circom, &mut rng).unwrap(); + // Create proof + let proof = Groth16::::prove(&pk, circuit, &mut rng).unwrap(); - // Check that the proof is valid - let pvk = Groth16::::process_vk(¶ms.vk).unwrap(); - let verified = Groth16::::verify_with_processed_vk(&pvk, &inputs, &proof).unwrap(); + // Verify proof + let pvk = prepare_verifying_key(&pk.vk); + let verified = Groth16::::verify_with_processed_vk(&pvk, &public_inputs, &proof).unwrap(); assert!(verified); + + // Print verifying key + let mut pk_bytes = Vec::new(); + pk.vk.serialize_compressed(&mut pk_bytes).unwrap(); + println!("Verifying key: {}", hex::encode(pk_bytes)); + + // Print proof + let mut proof_serialized = Vec::new(); + proof.serialize_compressed(&mut proof_serialized).unwrap(); + println!("Proof: {}", hex::encode(proof_serialized)); + + // Print public inputs. Note that they are concatenated. + let mut public_inputs_serialized = Vec::new(); + public_inputs.iter().for_each(|input| { + input.serialize_compressed(&mut public_inputs_serialized).unwrap(); + }); + println!("Public inputs: {}", hex::encode(public_inputs_serialized)); } ``` -The proof shows that an input (7) which, when hashed with the Poseidon hash function, gives a certain output (which in this case is `inputs[0].to_string() = 7061949393491957813657776856458368574501817871421526214197139795307327923534`). +Recall that this creates a proof that the prover knows a factorisation, in this case of the [5th Fermat number](https://en.wikipedia.org/wiki/Fermat_number#Factorization) (232 + 1 = 4294967297 = 641 * 6700417). + +The output of the above function will be +``` +Verifying key: 94d781ec65145ed90beca1859d5f38ec4d1e30d4123424bb7b0c6fc618257b1551af0374b50e5da874ed3abbc80822e4378fdef9e72c423a66095361dacad8243d1a043fc217ea306d7c3dcab877be5f03502c824833fc4301ef8b712711c49ebd491d7424efffd121baf85244404bded1fe26bdf6ef5962a3361cef3ed1661d897d6654c60dca3d648ce82fa91dc737f35aa798fb52118bb20fd9ee1f84a7aabef505258940dc3bc9de41472e20634f311e5b6f7a17d82f2f2fcec06553f71e5cd295f9155e0f93cb7ed6f212d0ccddb01ebe7dd924c97a3f1fc9d03a9eb915020000000000000072548cb052d61ed254de62618c797853ad3b8a96c60141c2bfc12236638f1b0faf9ecf024817d8964c4b2fed6537bcd70600a85cdec0ca4b0435788dbffd81ab +Proof: 212d4457550f258654a24a6871522797ab262dee4d7d1f89af7da90dc0904eac57ce183e6f7caca9a98755904c1398ff6288cec9877f98f2d3c776c448b9ad166839e09d77967b66129c4942eee6d3eaf4a0ce2a841acc873a46ae35e40f0088288d038857c70a1415300544d7cf376949a372049679afa35ee5206b58266184 +Public inputs: 0100000001000000000000000000000000000000000000000000000000000000 +``` +All these outputs are needed to verify the proof. ### Verification in Sui @@ -90,38 +113,17 @@ The API in Sui for verifying a proof expects a special processed verification ke The output of the `prepare_verifying_key` function is a vector with four byte arrays, which corresponds to the `vk_gamma_abc_g1_bytes`, `alpha_g1_beta_g2_bytes`, `gamma_g2_neg_pc_bytes`, `delta_g2_neg_pc_bytes`. -To verify a proof, you also need two more inputs, `public_inputs_bytes` and `proof_points_bytes`, which contain the public inputs and the proof respectively. These are serializations of the `inputs` and `proof` values from the previous example, which you can compute in Rust as follows: - -```rust -let mut vk_bytes = Vec::new(); -params.vk.serialize_compressed(&mut vk_bytes).unwrap(); - -let mut public_inputs_bytes = Vec::new(); -for i in 0..inputs.len() { // if there is more than one public input, serialize one by one - inputs[i].serialize_compressed(&mut inputs_bytes).unwrap(); -} - -let mut proof_points_bytes = Vec::new(); -proof.serialize_compressed(&mut proof_points_bytes).unwrap(); -``` +To verify a proof, you also need two more inputs, `public_inputs_bytes` and `proof_points_bytes`, which are printed by the program above. -The following example smart contract prepares a verification key and verifies the corresponding proof. This example uses the BN254 elliptic curve construction, which is given as the first parameter to the `prepare_verifying_key` and `verify_groth16_proof` functions. You can use the `bls12381` function instead for BLS12-381 construction. +The following example smart contract uses the output from the program above. It first prepares a verification key and verifies the corresponding proof. This example uses the BN254 elliptic curve construction, which is given as the first parameter to the `prepare_verifying_key` and `verify_groth16_proof` functions. You can use the `bls12381` function instead for BLS12-381 construction. ```rust -module test::groth16_test { - use sui::groth16; - use sui::event; - - /// Event on whether the proof is verified - struct VerifiedEvent has copy, drop { - is_verified: bool, - } - - public fun verify_proof(vk_bytes: vector, public_inputs_bytes: vector, proof_points_bytes: vector) { - let pvk = groth16::prepare_verifying_key(&groth16::bn254(), &vk_bytes); - let public_inputs = groth16::public_proof_inputs_from_bytes(public_inputs_bytes); - let proof_points = groth16::proof_points_from_bytes(proof_points_bytes); - event::emit(VerifiedEvent {is_verified: groth16::verify_groth16_proof(&groth16::bn254(), &pvk, &public_inputs, &proof_points)}); - } +use sui::groth16; + +public fun groth16_bn254_test() { + let pvk = groth16::prepare_verifying_key(&groth16::bn254(), &x"94d781ec65145ed90beca1859d5f38ec4d1e30d4123424bb7b0c6fc618257b1551af0374b50e5da874ed3abbc80822e4378fdef9e72c423a66095361dacad8243d1a043fc217ea306d7c3dcab877be5f03502c824833fc4301ef8b712711c49ebd491d7424efffd121baf85244404bded1fe26bdf6ef5962a3361cef3ed1661d897d6654c60dca3d648ce82fa91dc737f35aa798fb52118bb20fd9ee1f84a7aabef505258940dc3bc9de41472e20634f311e5b6f7a17d82f2f2fcec06553f71e5cd295f9155e0f93cb7ed6f212d0ccddb01ebe7dd924c97a3f1fc9d03a9eb915020000000000000072548cb052d61ed254de62618c797853ad3b8a96c60141c2bfc12236638f1b0faf9ecf024817d8964c4b2fed6537bcd70600a85cdec0ca4b0435788dbffd81ab"); + let proof_points = groth16::proof_points_from_bytes(x"212d4457550f258654a24a6871522797ab262dee4d7d1f89af7da90dc0904eac57ce183e6f7caca9a98755904c1398ff6288cec9877f98f2d3c776c448b9ad166839e09d77967b66129c4942eee6d3eaf4a0ce2a841acc873a46ae35e40f0088288d038857c70a1415300544d7cf376949a372049679afa35ee5206b58266184"); + let public_inputs = groth16::public_proof_inputs_from_bytes(x"0100000001000000000000000000000000000000000000000000000000000000"); + assert!(groth16::verify_groth16_proof(&groth16::bn254(), &pvk, &public_inputs, &proof_points)); } ``` diff --git a/docs/content/guides/developer/first-app/write-package.mdx b/docs/content/guides/developer/first-app/write-package.mdx index c3754cd1fbac5..a574c62673b8d 100644 --- a/docs/content/guides/developer/first-app/write-package.mdx +++ b/docs/content/guides/developer/first-app/write-package.mdx @@ -49,3 +49,4 @@ After you save the file, you have a complete Move package. - [Build and Test Packages](./build-test.mdx): Continue this example to build and test your package to get it ready for publishing. - [Sui Move CLI](../../../references/cli/move.mdx): Available Move commands the CLI provides. +- [Sui Move Book](https://move-book.com/): A comprehensive guide to the Move programming language and the Sui blockchain. \ No newline at end of file diff --git a/docs/content/snippets/openid-providers.mdx b/docs/content/snippets/openid-providers.mdx index f66f9b014960b..a8373daf8764d 100644 --- a/docs/content/snippets/openid-providers.mdx +++ b/docs/content/snippets/openid-providers.mdx @@ -9,11 +9,13 @@ The following table lists the OpenID providers that can support zkLogin or are c | Slack | Yes | Yes | No | No | | Kakao | Yes | Yes | No | No | | Microsoft | Yes | Yes | No | No | -| AWS (Tenant) | Yes | Yes | No | No | -| Karrier One | Yes | Yes | No | No | -| Credenza3 | Yes | Yes | No | No | +| AWS (Tenant)*| Yes | Yes | Yes | Yes | +| Karrier One | Yes | Yes | Yes | Yes | +| Credenza3 | Yes | Yes | Yes | Yes | | RedBull | Under review | No | No | No | | Amazon | Under review | No | No | No | | WeChat | Under review | No | No | No | | Auth0 | Under review | No | No | No | -| Okta | Under review | No | No | No | \ No newline at end of file +| Okta | Under review | No | No | No | + +* Sui supports AWS (Tenant) but the provider is enabled per tenant. Contact us for more information. \ No newline at end of file diff --git a/docs/site/docusaurus.config.js b/docs/site/docusaurus.config.js index 5760ff48882b1..d007ea0a7bd06 100644 --- a/docs/site/docusaurus.config.js +++ b/docs/site/docusaurus.config.js @@ -89,6 +89,7 @@ const config = { }, path.resolve(__dirname, `./src/plugins/descriptions`), path.resolve(__dirname, `./src/plugins/framework`), + path.resolve(__dirname, `./src/plugins/askcookbook`), ], presets: [ [ diff --git a/docs/site/src/plugins/askcookbook/index.js b/docs/site/src/plugins/askcookbook/index.js new file mode 100644 index 0000000000000..5a4b7e81dfaee --- /dev/null +++ b/docs/site/src/plugins/askcookbook/index.js @@ -0,0 +1,44 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +// It is going to be exposed in HTTP requests anyway, so it's fine to just hardcode it here. +const COOKBOOK_PUBLIC_API_KEY = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI2NjU5ODBiNDAwZTliZDQ2MzcwZDlhNzYiLCJpYXQiOjE3MTcxNDE2ODQsImV4cCI6MjAzMjcxNzY4NH0.0JCgi4bJ_f6ILFgEyYAP-KeCm1dzOKwH30tC3jEs2_A"; + +async function askCookbookPlugin() { + return { + name: "askCookbook", + injectHtmlTags() { + return { + postBodyTags: [ + { + tagName: "div", + attributes: { + id: "__cookbook", + "data-api-key": COOKBOOK_PUBLIC_API_KEY, + }, + }, + ` + + `, + ], + }; + }, + }; +}; + +module.exports = askCookbookPlugin; diff --git a/docs/site/src/theme/SearchBar/index.js b/docs/site/src/theme/SearchBar/index.js new file mode 100644 index 0000000000000..7f23d172fd1a3 --- /dev/null +++ b/docs/site/src/theme/SearchBar/index.js @@ -0,0 +1,24 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +import React from "react"; +import SearchBar from "@theme-original/SearchBar"; + +export default class SearchBarWrapper extends React.Component { + componentDidMount() { + try { + window.initCookbook(); + } catch (e) { + // Gracefully ignore errors if something goes wrong + console.error("Erorr initializing Ask Cookbook", e); + } + } + + render() { + return ( + <> + + + ); + } +} diff --git a/examples/move/vdf/tests/lottery_tests.move b/examples/move/vdf/tests/lottery_tests.move index 850af7e49091e..b9b10fc5b3ae4 100644 --- a/examples/move/vdf/tests/lottery_tests.move +++ b/examples/move/vdf/tests/lottery_tests.move @@ -8,10 +8,10 @@ module vdf::lottery_tests { use vdf::lottery::{Self, Game, GameWinner}; const OUTPUT: vector = - x"c0014d00b5e624fe10d1cc1e593c0ffb8c3084e49bb70efc4337640a73990bb29dfb430b55710475bcc7524c77627d8067415fffa63e0e84b1204225520fea384999719c66dbdc6e91863d99c64674af971631b56e22b7cc780765bf12d53edea1dadf566f80a62769e287a1e195596d4894b2e1360e451cbf06864762275b4d5063871d45627dea2e42ab93d5345bf172b9724216627abbf295b35a8e64e13e585bca54848a90212c9f7a3adffc25c3b87eefa7d4ab1660b523bf6410b9a9ea0e00c001327d73bebc768d150beb2a1b0de9e80c69ed594ae7787d548af44eb1e0a03616100133146c9c1202ea3a35c331864f3bfe59ffa3c88d6acb8af7a4b1b5ea842c4c4c88d415539389e614876d738d80217a3ad16d001f2de60f62b04a9d8de7ccb4716c3368f0d42e3e719dbb07bdb4355f0e5569714fbcc130f30ac7b49a5b207a444c7e00a0c27edae10c28b05f87545f337283f90c4e4ed69639683154d6a89e6589db4d18702d3a29705b434dc32e10fcbd3c62d1da20b45dba511bcecdf7c101009db7911c39f3937785dd8bd0e9db56c94777bdd867897493f82e0931e0e5facb730c3aa400f815f3d2f61de94373209dcbf9210c5e1f179b675adce2be7159238cb5f89c568004ccfc75d1b3de99a060255bcd6c9dd9a674844251ec27c48119b870f4ce63cac2081541c4269bbfa60148f9dbf2c60c76099a6538a33e88c24ce092e6aad9bdfff330470ffb87b01b666dd98e44843b41896f2be688361fe062692b441b4dd8f8ecfa96948d13daf486259bb8934cab7e9d9788da0eac7edf56"; + x"c00139196755e0b52e791d3d6bc79cae46ddb8538d78b41a5e960d161748a144268da911c3cd252be589324289fa5f15cca87ce5f0721fad3ac4a63ba8e46512bd5d1deb0878b4de69a11b34f825434b468f461ae1463f969bd4ebcb14ad41bfd2a648fb70ff1ba30d6924b8a29dce3369c247890f2dfc5b636dbaf3e57f7245cc1e278aef91aa4fac88dde0e91874817e0b306116ce8e9e8fbe20ff1a0b991a5b395f4d26155eb975267506678bbe53d2b36b389cb4285441d6e9a2d65bc9c09aa6c001fdc198d0915db5073427cd42d0cf050a8f1b017211349f217240af8e65fc22e17684d88c7ce65f6f3bb4d2dbd0cb18905958e1e685608ab41b7bc43937354bab5ac29fe6247a0d5ee0c62e269103a9aca26c4685131f84de12faa2aec64782f8be17b1a9236614dec1c0c9622846246135322ed0d6967dd34cf30e1df61667c0026665080ed5939644f0fce70f375240b4f7dcc3fdbc27ad69f46041c922e260579376f3c0fe231cd28a466214e9593e410c47bf2c91aa6231158a35ea5c6cd9c10100c98de9aafb908426a12147801b4483a92c6194b882b725d2811728b03eb4451349434fba9c9882021765b9f5ec9fae4d469603291086ad32c6ed6b40b03f77a08d2ae784f7d0359b855018872307d760c4bfc731519f9859eef844fb61f60c67c5f6a091dbe40601f2bc8d9b029bee501c412d20e0c7dac29ca7f6942a46216cc11337245d9e5df5fe1c951f1da3c4f36a2dfa7348b8d399c6f116448e75adb1b8b98b83d08b8f03911586153ecb9c953259b244083f3625cd15381633a36c3c"; const PROOF: vector = - x"c0010f1ea16b16f3fc46eb0b00a516092a6ab389e259a3013eee5e39e130702de85954b8aac435e724ad0bfd210ab7789fb91b54ac4352154f3251e0a87ccfd2c9a57d26468a384f527e129fc82b33c04b3ebbec3a99967798a95b39820c35ea015fdf4c81e143004b34b99e63462cf350689b2abdd6c3903adcbe55781d3a89c89dc571c312f9a80911a9d64884747319574b3a4ded25478e6d64b9cfb25d9c67366bc25d9ac99bcdba16665158da50a2ba179893292c4b7e76502ecaba1337d693c001fb3867669e0d4e45aa43d959dbe33c3d35b00e8414d1cf1bb9552726bb95bafa0a2c12a014a3b8fb0bd5ab9a40430ff59364b19d58d80665fee0bfee272a38c45413a3688832bf9bcacf7b5723436c120878f85ce084e72b13246ecfec7cd6a5d79e13296bbb51af785c10afe6c4f07f43a5bc711dc398271185d700b1695310d8e428ad3bc6b81a4faac2f5009b723460dbd260c940dfac06e34854d204dc779f94ab3f67847a7b90855dadc3962871c022e172e96b39a08648e045e14dad87c10102f976797f14be801a441f19771a4835640a74cf7c6ad216f18d9cdaf461bb56a897b804e053cd6cc68d659bd9f0ed985f094932d306c1bd76450bd349db3a81008d7591bc826a36583c3c361add7a8f245d18007d79704d79ae27eb08b52a44af17e2f23b441919049f061d69bac3a09c3e15074e4d75cf82f42dbff1c62ddc94fe6167ccb7265e7eab0def7d30d97be441ad763705dd30d4040815996e34643bf6d7a4f06c22aa5d6d5dd30253ea8aa59607724bb71f5425a5e7fee03b7e9fe8"; + x"c0014c3726e4a8b4748edf1e536168bd6f65b261a24777d41745badb3c475462a41f91b0fc1824682274ccd6b112aa29c0bdd4d314e718964d80903a89a22545a1a375be431ee0b42d4aa2948a624af88348386bb4cbc3c7a0e9e8e61bae5cb545eb0a85a85a872efb06fbfbb4241815cc157a2b98805744b3c49c9fa3b4dd465549503be15dc97ebb2fa0d50bccac77f80a8e37497481d96bdd4f93f0365a3fc832786b4c134772f11e42cf427e10bdd4cf1d8f15ed66b40e9f696dff7d39526f4cc001b67117474c19190057a21fe396239d6d57901e225e7396e743316f0104ffdfb0391de5fc2af4c5da63073cf6315e741e5ce7f5abcd1e8dfbd75e26bf477c41ddddb225d431ee7236b0e086fb521140eb708db3d6fd908140604b308a451b4967f9dc14a0a679152ee9c2d49ef5441b3d452bf44215e97a7ec6887b03db19bbb2a465216dcd6ee978a5007ea61c208c341659568b079fbecb7fd1616d90f48ff13fef190e847f5d274c7e96f989cce6d66cfc6795cdd7ab5b186504f9ad1a3867c10100a8bb98567e45419001c914099e225a8f821f69d42b3f6ba5fa0795df4a16150bce84f47da24ae32097ff6a72b0945276d879a0f91663f33e4e8a4cebb6edfa32991f6f6838ad5e048d40fccf67ab6477b5f144d8552d20427c635cbf8385d1213666cc4fa574c691c1d86cc9bbf7d4027daa480039c6840de49f9cb3f044f4c0f08bf16ef5c75b9c0ec17638b83726ff0a0dfdf471461aabf18d9d424bf17ddc4b79e5c837f7e1ed06454de8d366fd6c99402ea00c184eee44d282b622d8fd26"; const BAD_PROOF: vector = x"0101010180032cf35709e1301d02b40a0dbe3dadfe6ec1eeba8fb8060a1decd0c7a126ea3f27fadcad81435601b0e0abca5c89173ef639e5a88043aa29801e6799e430b509e479b57af981f9ddd48d3a8d5919f99258081557a08270bb441233c78030a01e03ec199b5e3eef5ccc9b1a3d4841cbe4ff529c22a8cd1b1b0075338d864e3890942df6b007d2c3e3a8ef1ce7490c6bbec5372adfcbf8704a1ffc9a69db8d9cdc54762f019036e450e457325eef74b794f3f16ff327d68079a5b9de49163d7323937374f8a785a8f9afe84d6a71b336e4de00f239ee3af1d7604a3985e610e1603bd0e1a4998e19fa0c8920ffd8d61b0a87eeee50ac7c03ff7c4708a34f3bc92fd0103758c954ee34032cee2c78ad8cdc79a35dbc810196b7bf6833e1c45c83b09c0d1b78bc6f8753e10770e7045b08d50b4aa16a75b27a096d5ec1331f1fd0a44e95a8737c20240c90307b5497d3470393c2a00da0649e86d13e820591296c644fc1eef9e7c6ca4967c5e19df3153cd7fbd598c271e11c10397349ddc8cc8452ec"; @@ -78,14 +78,14 @@ module vdf::lottery_tests { ts.next_tx(user3); game.complete(OUTPUT, PROOF, &clock); - // User 1 is the winner since the mod of the hash results in 0. - ts.next_tx(user1); - assert!(!ts::has_most_recent_for_address(user1), 1); - let winner = game.redeem(&t1, ts.ctx()); + // User 2 is the winner since the mod of the hash results in 1. + ts.next_tx(user2); + assert!(!ts::has_most_recent_for_address(user2), 1); + let winner = game.redeem(&t2, ts.ctx()); - // Make sure User1 now has a winner ticket for the right game id. - ts.next_tx(user1); - assert!(winner.game_id() == t1.game_id(), 1); + // Make sure User2 now has a winner ticket for the right game id. + ts.next_tx(user2); + assert!(winner.game_id() == t2.game_id(), 1); t1.delete(); t2.delete(); diff --git a/examples/trading/api/package.json b/examples/trading/api/package.json index f78493e33f2fc..666bc0522bbf7 100644 --- a/examples/trading/api/package.json +++ b/examples/trading/api/package.json @@ -28,6 +28,6 @@ "@mysten/sui": "workspace:*", "@prisma/client": "^5.16.2", "cors": "^2.8.5", - "express": "^4.19.2" + "express": "^4.20.0" } } diff --git a/external-crates/move/Cargo.lock b/external-crates/move/Cargo.lock index 74205d8ffc2de..cdc1ffcdbc7ce 100644 --- a/external-crates/move/Cargo.lock +++ b/external-crates/move/Cargo.lock @@ -1604,6 +1604,7 @@ name = "move-bytecode-utils" version = "0.1.0" dependencies = [ "anyhow", + "indexmap 2.2.6", "move-binary-format", "move-core-types", "petgraph", diff --git a/external-crates/move/Cargo.toml b/external-crates/move/Cargo.toml index e74dc56214b3e..32d76dd3b1baa 100644 --- a/external-crates/move/Cargo.toml +++ b/external-crates/move/Cargo.toml @@ -63,6 +63,7 @@ hex = "0.4.3" hex-literal = "0.3.4" hkdf = "0.10.0" im = "15.1.0" +indexmap = "2.1.0" internment = { version = "0.5.0", features = [ "arc"] } itertools = "0.10.0" json_comments = "0.2.2" diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.exp index 974c19a2d0287..03714e445884b 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.exp @@ -2,7 +2,7 @@ processed 1 task task 0, lines 1-264: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: TOO_MANY_LOCALS, sub_status: None, location: undefined, diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.mvir index 654674e5bba1a..6121141f668b3 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/1_param_and_255_locals.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { foo(p1: u8) { let x1 : u8; let x2 : u8; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/256_locals.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/256_locals.mvir index 19a5030b3f10d..dfd8fd954ddd2 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/256_locals.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/256_locals.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { foo() { let x1 : u8; let x2 : u8; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.exp index a4ad787848de6..3e0ab849edb9f 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.exp @@ -2,7 +2,7 @@ processed 1 task task 0, lines 1-11: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: NUMBER_OF_TYPE_ARGUMENTS_MISMATCH, sub_status: None, location: undefined, diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.mvir index 1ffbb5deda91f..bbf22a84bb4ce 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/check_bounds/too_many_type_actuals.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } foo() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/dependencies/call_integers_valid.move b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/dependencies/call_integers_valid.move index 96a8df31082f5..95d581de785d9 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/dependencies/call_integers_valid.move +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/dependencies/call_integers_valid.move @@ -10,7 +10,7 @@ public fun plus256(a: u256): u256 { a + 1 } //# publish -module 0x42::m_test { +module 0x43::m_test { use 0x42::m; public fun test8() { m::plus8(1u8); } public fun test16() { m::plus16(1u16); } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.exp index 8a8f0e085cc48..396de5edaa335 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.exp @@ -2,30 +2,30 @@ processed 3 tasks task 0, lines 1-51: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } task 1, lines 53-70: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M2'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::M2'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M2, + location: 0x7::M2, indices: [], offsets: [], } task 2, lines 72-122: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M3'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::M3'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M3, + location: 0x8::M3, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.mvir index 338a67ae29374..003a8055e8ec8 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } a() { @@ -51,7 +51,7 @@ module 0x1.M { // loop: c#T1 --> c#T2 --> d#T --> b#T2 --S--> c#T1 //# publish -module 0x1.M2 { +module 0x7.M2 { struct S { b: bool } f() { @@ -71,7 +71,7 @@ module 0x1.M2 { //# publish -module 0x1.M3 { +module 0x8.M3 { struct S { b: bool } a() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.exp index 8a8f0e085cc48..396de5edaa335 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.exp @@ -2,30 +2,30 @@ processed 3 tasks task 0, lines 1-51: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } task 1, lines 53-70: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M2'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::M2'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M2, + location: 0x7::M2, indices: [], offsets: [], } task 2, lines 72-122: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M3'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::M3'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M3, + location: 0x8::M3, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.mvir index 48c02eeb5a308..0c40ecf23d370 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/complex_1_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { enum S { V{ b: bool } } a() { @@ -51,7 +51,7 @@ module 0x1.M { // loop: c#T1 --> c#T2 --> d#T --> b#T2 --S--> c#T1 //# publish -module 0x1.M2 { +module 0x7.M2 { enum S { V { b: bool } } f() { @@ -71,7 +71,7 @@ module 0x1.M2 { //# publish -module 0x1.M3 { +module 0x8.M3 { enum S { V{ b: bool } } a() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_just_type_params_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_just_type_params_ok.mvir index e629305a028c8..6da321cf28de6 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_just_type_params_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_just_type_params_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { f() { label b0: Self.g(); diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok.mvir index 36b0e3d5d47ba..43fb7213dff90 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok_enum.mvir index 7047b775fde22..0ae491e6b69bd 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_non_generic_type_ok_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { enum S { V{ b: bool } } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_just_type_params_shitfing_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_just_type_params_shitfing_ok.mvir index 35f8a8925e89e..e7150bbfc9dfb 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_just_type_params_shitfing_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_just_type_params_shitfing_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { f() { label b0: Self.g(); diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_non_generic_types_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_non_generic_types_ok.mvir index d351f5297cddd..ed5944413dc86 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_non_generic_types_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_non_generic_types_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.exp index adbec9b3fe0de..2a408054d932c 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.mvir index 8633f2a894e61..234ba01a2d084 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.exp index adbec9b3fe0de..2a408054d932c 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.mvir index 94016c7ddefad..2d2bde3bce548 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_three_args_type_con_shifting_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { enum S { V{ b: bool } } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_just_type_params_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_just_type_params_ok.mvir index fdd12bf421984..49f338117a009 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_just_type_params_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_just_type_params_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { f() { label b0: Self.g(); diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.exp index 78369208ad929..ad01a23a64fac 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-17: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.mvir index 39bfec96ce844..89d20e5734573 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.exp index 78369208ad929..ad01a23a64fac 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-17: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.mvir index 37bbd0a6a9a65..373a6652e1cc2 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_two_args_swapping_type_con_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { enum S { V { b: bool } } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.exp index 6d41922052c42..be62ad1f511ad 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-20: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.mvir index 17d06014e4fb1..ae7cf51e4cc23 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con.mvir @@ -3,7 +3,7 @@ // Not good: infinitely many types/instances. // f, g>, f>, g>>, ... -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.exp index 6d41922052c42..be62ad1f511ad 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-20: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.mvir index 5630abc40ab0a..b5a60d8aed64e 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/mutually_recursive_type_con_enum.mvir @@ -3,7 +3,7 @@ // Not good: infinitely many types/instances. // f, g>, f>, g>>, ... -module 0x1.M { +module 0x6.M { enum S { V { b: bool } } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.exp index 78c493676f019..214ea269d78a0 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-11: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.mvir index 5c2ae8ba244fe..a1da9c976696e 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_1_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { enum S { V { b: bool }, R { } } foo() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.exp index b3a74762fbfdb..ce40b165ce001 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-13: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.mvir index 8d0b0a07042a1..c6fe46d04d6e5 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2.mvir @@ -1,6 +1,6 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } struct R { b: bool } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.exp index b3a74762fbfdb..ce40b165ce001 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-13: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.mvir index 3de1969878886..13719dc6d0cff 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/nested_types_2_enum_struct.mvir @@ -1,6 +1,6 @@ //# publish -module 0x1.M { +module 0x6.M { struct S { b: bool } enum R { V{ b: bool } } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.exp index c332bce7d7352..1a6d508c2e1ef 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-45: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.mvir index a2847b03923e6..a37775e335d33 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_infinite_type_terminates.mvir @@ -1,6 +1,6 @@ //# publish -module 0x1.M { +module 0x6.M { struct Box { x: T } unbox(b: Self.Box): T { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_just_type_params_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_just_type_params_ok.mvir index 2e4b63f441635..ba67668677d81 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_just_type_params_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_just_type_params_ok.mvir @@ -2,7 +2,7 @@ // This is good as there is only one instance foo for any T. -module 0x1.M { +module 0x6.M { f(x: T) { label b0: Self.f(move(x)); diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_non_generic_type_ok.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_non_generic_type_ok.mvir index 52d442efd9fcb..bd5d7d200d7ac 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_non_generic_type_ok.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_non_generic_type_ok.mvir @@ -2,7 +2,7 @@ // Good: two instances foo & foo (if T != u64) for any T. -module 0x1.M { +module 0x6.M { f() { label b0: Self.f(); diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.exp index b3a74762fbfdb..ce40b165ce001 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-13: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.mvir index 0bf3a7ecea459..fac9b6bec0d8a 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con.mvir @@ -2,7 +2,7 @@ // Bad! Can have infinitely many instances: f, f>, f>>, ... -module 0x1.M { +module 0x6.M { struct S { b: bool } f(x: T) { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.exp index b3a74762fbfdb..ce40b165ce001 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-13: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.mvir index 1c8c5e8a77e44..79bb4ba7998e0 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_one_arg_type_con_enum.mvir @@ -2,7 +2,7 @@ // Bad! Can have infinitely many instances: f, f>, f>>, ... -module 0x1.M { +module 0x6.M { enum S { V{ b: bool } } f(x: T) { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.exp index dd4ffa564acd7..711fe8831e56f 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-14: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.mvir index 562d14461a04e..397c7f59228cb 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/recursive_two_args_swapping_type_con_enum.mvir @@ -3,7 +3,7 @@ // Similar to the case with one argument, but swaps the two type parameters. // f => f, T1> => f, S> => f>, S> => ... -module 0x1.M { +module 0x6.M { enum S { V{ x: T } } f(a: T1, b: T2) { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.exp index 6d41922052c42..be62ad1f511ad 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-20: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.mvir index 1c553a73fb03f..6b9f433d13c69 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops.mvir @@ -3,7 +3,7 @@ // Two loops in the resulting graph. // One error for the loop. -module 0x1.M { +module 0x6.M { struct S { b: bool } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.exp index 6d41922052c42..be62ad1f511ad 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-20: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::M'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::M'. Got VMError: { major_status: LOOP_IN_INSTANTIATION_GRAPH, sub_status: None, - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.mvir index dd3e44bdb8872..5dda00b6fbcb6 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/instantiation_loops/two_loops_enum.mvir @@ -3,7 +3,7 @@ // Two loops in the resulting graph. // One error for the loop. -module 0x1.M { +module 0x6.M { enum S { V { b: bool } } f() { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/abort_positive_stack_size.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/abort_positive_stack_size.mvir index 55b29920c75fd..4712442571478 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/abort_positive_stack_size.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/abort_positive_stack_size.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { public foo(): u64 { label b0: @@ -10,7 +10,7 @@ module 0x1.M { //# run module 0x42.m { -import 0x1.M; +import 0x6.M; entry foo() { label b0: diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.exp index c225167ca88a6..8813cc785079b 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.exp @@ -12,50 +12,50 @@ Error: Unable to publish module '00000000000000000000000000000000000000000000000 task 1, lines 11-20: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 2, lines 21-30: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000044::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x44::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 3, lines 31-40: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000045::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x45::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 4, lines 41-50: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000046::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x46::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 5, lines 51-60: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000047::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x47::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.mvir index a6c68adf7c963..f2d596581a059 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_negative_stack.mvir @@ -9,7 +9,7 @@ label b0: } //# run -module 0x42.m { +module 0x43.m { entry foo() { label b0: @@ -19,7 +19,7 @@ label b0: } //# run -module 0x42.m { +module 0x44.m { entry foo() { label b0: @@ -29,7 +29,7 @@ label b0: } //# run -module 0x42.m { +module 0x45.m { entry foo() { label b0: @@ -39,7 +39,7 @@ label b0: } //# run -module 0x42.m { +module 0x46.m { entry foo() { label b0: @@ -49,7 +49,7 @@ label b0: } //# run -module 0x42.m { +module 0x47.m { entry foo() { label b0: diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.exp index 990489897072d..045003b01f887 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.exp @@ -12,50 +12,50 @@ Error: Unable to publish module '00000000000000000000000000000000000000000000000 task 1, lines 12-22: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 2, lines 23-33: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000044::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x44::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 3, lines 34-44: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000045::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x45::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 4, lines 45-55: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000046::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x46::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 5, lines 56-66: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000047::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x47::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.mvir index b1d50383d5119..9e1a2ab4d5e08 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/cast_positive_stack.mvir @@ -10,7 +10,7 @@ label b0: } //# run -module 0x42.m { +module 0x43.m { entry foo() { label b0: @@ -21,7 +21,7 @@ label b0: } //# run -module 0x42.m { +module 0x44.m { entry foo() { label b0: @@ -32,7 +32,7 @@ label b0: } //# run -module 0x42.m { +module 0x45.m { entry foo() { label b0: @@ -43,7 +43,7 @@ label b0: } //# run -module 0x42.m { +module 0x46.m { entry foo() { label b0: @@ -54,7 +54,7 @@ label b0: } //# run -module 0x42.m { +module 0x47.m { entry foo() { label b0: diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/consume_stack.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/consume_stack.mvir index 6da510c3bb2f7..2c104aa6cf805 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/consume_stack.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/consume_stack.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct R has key { data: vector } is_ok_(addr: &address, data: &vector): bool { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_1.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_1.mvir index 2c3ee0f4617ee..f3cb1e037cd0f 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_1.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_1.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { public baz(k: u64, l: u64, m: u64) : u64 { let z: u64; label b0: @@ -10,7 +10,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { label b0: diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_2.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_2.mvir index af8863f0c6242..0d155a47e10dd 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_2.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_call_negative_stack_err_2.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { public push_u64(): u64 { label b0: return 42; @@ -23,7 +23,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x6.A; entry foo() { let ans: u64; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_pos_and_neg_stack_err.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_pos_and_neg_stack_err.mvir index 1e41dfc79bb02..cbd64175c8423 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_pos_and_neg_stack_err.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_pos_and_neg_stack_err.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { public parity(u: u64): u64 * bool { label b0: return copy(u), (move(u) % 2 == 0); @@ -22,7 +22,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x6.A; entry foo() { let k: u64; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_1.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_1.mvir index 8ee20a4f7ccad..dd121aceb9cce 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_1.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_1.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { public foo(v: u64): u64 * u64 { let one_less: u64; let one_more: u64; @@ -17,7 +17,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let y: u64; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_2.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_2.mvir index d093bfe94f0f9..e305f1b4818ad 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_2.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/function_composition_positive_stack_err_2.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { public parity(u: u64): u64 * bool { label b0: return copy(u), (move(u) % 2 == 0); @@ -17,7 +17,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x6.A; entry foo() { let k: u64; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_balanced.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_balanced.mvir index 0f3bcf7462d36..5ae7f117976c6 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_balanced.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_balanced.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { two(): u8 * u8{ label b0: return 0u8, 255u8; @@ -15,7 +15,7 @@ module 0x1.A { //# publish -module 0x1.B { +module 0x7.B { two(): u16 * u16{ label b0: return 0u16, 65535u16; @@ -30,7 +30,7 @@ module 0x1.B { } //# publish -module 0x1.C { +module 0x8.C { two(): u32 * u32{ label b0: return 0u32, 4294967295u32; @@ -45,7 +45,7 @@ module 0x1.C { } //# publish -module 0x1.D { +module 0x9.D { two(): u64 * u64{ label b0: return 0u64, 18446744073709551615u64; @@ -60,7 +60,7 @@ module 0x1.D { } //# publish -module 0x1.E { +module 0xa.E { two(): u128 * u128{ label b0: return 0u128, 340282366920938463463374607431768211455u128; @@ -75,7 +75,7 @@ module 0x1.E { } //# publish -module 0x1.F { +module 0xb.F { two(): u256 * u256{ label b0: return 0u256, 115792089237316195423570985008687907853269984665640564039457584007913129639935u256; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.exp index 699a4ac9c6149..c624c4cdf2947 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.exp @@ -2,60 +2,60 @@ processed 6 tasks task 0, lines 1-14: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0x6::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 1, lines 17-30: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0x7::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 2, lines 32-45: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0x8::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 3, lines 47-60: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000009::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0x9::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 4, lines 62-75: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '000000000000000000000000000000000000000000000000000000000000000a::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0xa::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 5, lines 77-90: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '000000000000000000000000000000000000000000000000000000000000000b::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0xb::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.mvir index 3c79aa5384349..afb71b732ee0f 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_negative.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { three(): u8 * u8{ label b0: return 0u8, 255u8; @@ -15,7 +15,7 @@ module 0x1.A { //# publish -module 0x1.A { +module 0x7.A { three(): u16 * u16{ label b0: return 0u16, 65535u16; @@ -30,7 +30,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0x8.A { three(): u32 * u32{ label b0: return 0u32, 4294967295u32; @@ -45,7 +45,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0x9.A { three(): u64 * u64{ label b0: return 0u64, 18446744073709551615u64; @@ -60,7 +60,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0xa.A { three(): u128 * u128{ label b0: return 0u128, 340282366920938463463374607431768211455u128; @@ -75,7 +75,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0xb.A { three(): u256 * u256{ label b0: return 0u256, 115792089237316195423570985008687907853269984665640564039457584007913129639935u256; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.exp index 0159c527ec643..5b0d3bd5e1251 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.exp @@ -2,60 +2,60 @@ processed 6 tasks task 0, lines 1-14: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0x6::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 1, lines 17-30: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0x7::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 2, lines 32-45: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0x8::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 3, lines 47-60: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000009::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0x9::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 4, lines 62-75: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '000000000000000000000000000000000000000000000000000000000000000a::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0xa::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 5, lines 77-90: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '000000000000000000000000000000000000000000000000000000000000000b::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0xb::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.mvir index d852de8a76038..684c4b08da760 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/integer_stack_positive.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { two(): u8 * u8{ label b0: return 0u8, 255u8; @@ -15,7 +15,7 @@ module 0x1.A { //# publish -module 0x1.A { +module 0x7.A { two(): u16 * u16{ label b0: return 0u16, 65535u16; @@ -30,7 +30,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0x8.A { two(): u32 * u32{ label b0: return 0u32, 4294967295u32; @@ -45,7 +45,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0x9.A { two(): u64 * u64{ label b0: return 0u64, 18446744073709551615u64; @@ -60,7 +60,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0xa.A { two(): u128 * u128{ label b0: return 0u128, 340282366920938463463374607431768211455u128; @@ -75,7 +75,7 @@ module 0x1.A { } //# publish -module 0x1.A { +module 0xb.A { two(): u256 * u256{ label b0: return 0u256, 115792089237316195423570985008687907853269984665640564039457584007913129639935u256; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.exp index e889ba7cca133..e3584b774d583 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.exp @@ -12,50 +12,50 @@ Error: Unable to publish module '00000000000000000000000000000000000000000000000 task 1, lines 11-20: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 2, lines 21-30: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000044::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x44::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 3, lines 31-40: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000045::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x45::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 4, lines 41-50: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000046::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x46::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 5, lines 51-60: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000047::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x47::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.mvir index 51a47e9e9013e..1836524fc13fc 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/load_positive_stack.mvir @@ -9,7 +9,7 @@ label b0: } //# run -module 0x42.m { +module 0x43.m { entry foo() { label b0: @@ -19,7 +19,7 @@ label b0: } //# run -module 0x42.m { +module 0x44.m { entry foo() { label b0: @@ -29,7 +29,7 @@ label b0: } //# run -module 0x42.m { +module 0x45.m { entry foo() { label b0: @@ -39,7 +39,7 @@ label b0: } //# run -module 0x42.m { +module 0x46.m { entry foo() { label b0: @@ -49,7 +49,7 @@ label b0: } //# run -module 0x42.m { +module 0x47.m { entry foo() { label b0: diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values.mvir index 4f712c1437723..6f6c734ae211a 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct T { b: bool } public new(): Self.T { @@ -22,7 +22,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let i: u64; let t: Test.T; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_binding.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_binding.mvir index 7848d2d97825e..a3a59dec7f285 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_binding.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_binding.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct T { b: bool } public new(): Self.T { @@ -22,7 +22,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let i: u64; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.exp index c01500ef5563e..c6b98bc7c7947 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-22: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.mvir index c87a256ddd6dd..0c2da08ec77cb 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_extra_value.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct T { b: bool } public new(): Self.T { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_binding.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_binding.mvir index 13b4b9222ac03..5bb415363fd48 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_binding.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_binding.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct T { b: bool } public new(): Self.T { @@ -22,7 +22,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let i: u64; let t: Test.T; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.exp index 375de9b58eccb..42d23089c9fd8 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-22: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.mvir index 9a361944c4232..4984ab43d2e9c 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/multiple_return_values_missing_value.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct T { b: bool } public new(): Self.T { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.exp index d4f83da77d825..94e1abe52f0dc 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.exp @@ -2,40 +2,40 @@ processed 4 tasks task 0, lines 1-11: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::MonoTooMany'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::MonoTooMany'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::MonoTooMany, + location: 0x6::MonoTooMany, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 1, lines 13-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::MonoTooFew'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::MonoTooFew'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::MonoTooFew, + location: 0x7::MonoTooFew, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 2, lines 25-35: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::PolyTooMany'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::PolyTooMany'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::PolyTooMany, + location: 0x8::PolyTooMany, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 3, lines 37-47: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::PolyTooFew'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000009::PolyTooFew'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::PolyTooFew, + location: 0x9::PolyTooFew, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.mvir index 3e2d8e28706bd..a97f8e35406cd 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pack_invalid_number_arguments_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.MonoTooMany { +module 0x6.MonoTooMany { enum Foo{ V { x: u64 } } bar(x: u64, y: u64): Self.Foo { @@ -11,7 +11,7 @@ module 0x1.MonoTooMany { } //# publish -module 0x1.MonoTooFew { +module 0x7.MonoTooFew { enum Foo{ V { x: u64 } } bar(x: u64, y: u64): Self.Foo { @@ -23,7 +23,7 @@ module 0x1.MonoTooFew { } //# publish -module 0x1.PolyTooMany { +module 0x8.PolyTooMany { enum Foo{ V { x: T } } bar(x: u64, y: u64): Self.Foo { @@ -35,7 +35,7 @@ module 0x1.PolyTooMany { } //# publish -module 0x1.PolyTooFew { +module 0x9.PolyTooFew { enum Foo{ V { x: T } } bar(x: u64, y: u64): Self.Foo { diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_exact.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_exact.mvir index ba04f90880ea8..3c2bd7d6c42fa 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_exact.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_exact.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { three(): u64 * u64 * u64 { label b0: return 0, 1, 2; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.exp index 141eed8a420c2..93e436150ca42 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-14: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::A'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::A, + location: 0x6::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.mvir index 8dcf844a2967d..e87f737091d13 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_negative.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { three(): u64 * u64 * u64 { label b0: return 0, 1, 2; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.exp index 6b6a4314f8861..dd30db4ec5d1a 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-14: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::A'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::A'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::A, + location: 0x6::A, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.mvir index 916930ed556c2..2c742476a877c 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/pop_positive.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { three(): u64 * u64 * u64 { label b0: return 0, 1, 2; diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.exp index fe4dbe30c9a65..1915f30073195 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.mvir index 99e1785936075..7643e2b02d2de 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct X { b: bool } struct T { i: u64, x: Self.X } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.exp index f9c16469dbf08..624f6f2a0ae14 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.exp @@ -2,40 +2,40 @@ processed 4 tasks task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 1, lines 25-46: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::Test'. Got VMError: { major_status: GENERIC_MEMBER_OPCODE_MISMATCH, sub_status: None, - location: 0x1::Test, + location: 0x7::Test, indices: [], offsets: [(FunctionDefinitionIndex(1), 1)], } task 2, lines 48-69: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::Test'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::Test, + location: 0x8::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 3, lines 71-92: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000009::Test'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x1::Test, + location: 0x9::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.mvir index 32ac8099c2664..809046ebc7210 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_extra_binding_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct X { b: bool } enum T { V{ i: u64, x: Self.X } } @@ -23,7 +23,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x7.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X } } @@ -46,7 +46,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x8.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X } } @@ -69,7 +69,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x9.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X } } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.exp index 76928c5efe648..c4d018ddb726b 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.exp @@ -2,10 +2,10 @@ processed 1 task task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.mvir index 424c00fddcab5..c850fb96f1f0f 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct X { b: bool } struct T { i: u64, x: Self.X, b: bool, y: u64 } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.exp index c5fe244682fa9..775ca364e2d1e 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.exp @@ -2,40 +2,40 @@ processed 4 tasks task 0, lines 1-22: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::Test'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::Test, + location: 0x6::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 1, lines 24-45: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000007::Test'. Got VMError: { major_status: GENERIC_MEMBER_OPCODE_MISMATCH, sub_status: None, - location: 0x1::Test, + location: 0x7::Test, indices: [], offsets: [(FunctionDefinitionIndex(1), 1)], } task 2, lines 47-68: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000008::Test'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::Test, + location: 0x8::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } task 3, lines 70-91: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::Test'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000009::Test'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x1::Test, + location: 0x9::Test, indices: [(FunctionDefinition, 1)], offsets: [(FunctionDefinitionIndex(1), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.mvir index fd83f799ce125..fea181991d0ea 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/unpack_missing_binding_enum.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { struct X { b: bool } enum T { V { i: u64, x: Self.X, b: bool, y: u64 } } @@ -22,7 +22,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x7.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X, b: bool, y: u64 } } @@ -45,7 +45,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x8.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X, b: bool, y: u64 } } @@ -68,7 +68,7 @@ module 0x1.Test { } //# publish -module 0x1.Test { +module 0x9.Test { struct X { b: bool } enum T { V{ i: H, x: Self.X, b: bool, y: u64 } } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.exp b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.exp index db91a6561a094..9a21c209f6551 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.exp +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.exp @@ -12,30 +12,30 @@ Error: Unable to publish module '00000000000000000000000000000000000000000000000 task 1, lines 12-22: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 2, lines 23-35: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000044::m'. Got VMError: { major_status: POSITIVE_STACK_SIZE_AT_BLOCK_END, sub_status: None, - location: 0x42::m, + location: 0x44::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } task 3, lines 36-48: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000045::m'. Got VMError: { major_status: NEGATIVE_STACK_SIZE_WITHIN_BLOCK, sub_status: None, - location: 0x42::m, + location: 0x45::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 0)], } diff --git a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.mvir b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.mvir index 99085f29914a5..fe8c5c2782d2b 100644 --- a/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.mvir +++ b/external-crates/move/crates/bytecode-verifier-transactional-tests/tests/stack_usage_verifier/vector_ops_pack_unpack.mvir @@ -10,7 +10,7 @@ label b0: } //# run -module 0x42.m { +module 0x43.m { entry foo() { let v: vector; @@ -21,7 +21,7 @@ label b0: } //# run -module 0x42.m { +module 0x44.m { entry foo() { let v: vector; @@ -34,7 +34,7 @@ label b0: } //# run -module 0x42.m { +module 0x45.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/language-benchmarks/tests/bench.move b/external-crates/move/crates/language-benchmarks/tests/bench.move index fc9b4d205d460..6b69121cfb495 100644 --- a/external-crates/move/crates/language-benchmarks/tests/bench.move +++ b/external-crates/move/crates/language-benchmarks/tests/bench.move @@ -2,7 +2,7 @@ // The Module must be called `Bench` and the set of public functions are callable from the bench (Rust code). // `benches/transaction.rs` contains the calling code. // The idea is that you build your scenario with a public entry point and a bunch of private functions as needed. -module 0x1::Bench { +module 0x6::Bench { use std::vector; // diff --git a/external-crates/move/crates/move-analyzer/editors/code/package.json b/external-crates/move/crates/move-analyzer/editors/code/package.json index 4a3be17e8b51c..97bb29c43284d 100644 --- a/external-crates/move/crates/move-analyzer/editors/code/package.json +++ b/external-crates/move/crates/move-analyzer/editors/code/package.json @@ -5,7 +5,7 @@ "publisher": "mysten", "icon": "images/move.png", "license": "Apache-2.0", - "version": "1.0.11", + "version": "1.0.12", "preview": true, "repository": { "url": "https://github.com/MystenLabs/sui.git", diff --git a/external-crates/move/crates/move-analyzer/src/analyzer.rs b/external-crates/move/crates/move-analyzer/src/analyzer.rs index 37f31ec5c6cfd..bb8c067e97b6c 100644 --- a/external-crates/move/crates/move-analyzer/src/analyzer.rs +++ b/external-crates/move/crates/move-analyzer/src/analyzer.rs @@ -19,7 +19,7 @@ use std::{ }; use crate::{ - completion::on_completion_request, context::Context, inlay_hints, symbols, + completions::on_completion_request, context::Context, inlay_hints, symbols, vfs::on_text_document_sync_notification, }; use url::Url; diff --git a/external-crates/move/crates/move-analyzer/src/completions/dot.rs b/external-crates/move/crates/move-analyzer/src/completions/dot.rs new file mode 100644 index 0000000000000..387f6441c2fab --- /dev/null +++ b/external-crates/move/crates/move-analyzer/src/completions/dot.rs @@ -0,0 +1,115 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +// Auto-completion for the dot operator, e.g., `struct.` or `value.foo()`.` + +use crate::{ + completions::utils::{call_completion_item, mod_defs}, + symbols::{type_to_ide_string, DefInfo, FunType, Symbols}, + utils::lsp_position_to_loc, +}; +use lsp_types::{ + CompletionItem, CompletionItemKind, CompletionItemLabelDetails, InsertTextFormat, Position, +}; +use move_compiler::{ + expansion::ast::ModuleIdent_, + shared::{ide::AutocompleteMethod, Identifier}, +}; +use move_symbol_pool::Symbol; + +use std::path::Path; + +/// Handle "dot" auto-completion at a given position. +pub fn dot_completions( + symbols: &Symbols, + use_fpath: &Path, + position: &Position, +) -> (Vec, bool) { + let mut completions = vec![]; + let mut completion_finalized = false; + let Some(fhash) = symbols.file_hash(use_fpath) else { + eprintln!("no dot completions due to missing file"); + return (completions, completion_finalized); + }; + let Some(loc) = lsp_position_to_loc(&symbols.files, fhash, position) else { + eprintln!("no dot completions due to missing loc"); + return (completions, completion_finalized); + }; + let Some(info) = symbols.compiler_info.get_autocomplete_info(fhash, &loc) else { + return (completions, completion_finalized); + }; + // we found auto-completion info, so don't look for any more completions + // even if if it does not contain any + completion_finalized = true; + for AutocompleteMethod { + method_name, + target_function: (mod_ident, function_name), + } in &info.methods + { + let call_completion = if let Some(DefInfo::Function( + .., + fun_type, + _, + type_args, + arg_names, + arg_types, + ret_type, + _, + )) = fun_def_info(symbols, mod_ident.value, function_name.value()) + { + call_completion_item( + &mod_ident.value, + matches!(fun_type, FunType::Macro), + Some(method_name), + &function_name.value(), + type_args, + arg_names, + arg_types, + ret_type, + /* inside_use */ false, + ) + } else { + // this shouldn't really happen as we should be able to get + // `DefInfo` for a function but if for some reason we cannot, + // let's generate simpler autotompletion value + eprintln!("incomplete dot item"); + CompletionItem { + label: format!("{method_name}()"), + kind: Some(CompletionItemKind::METHOD), + insert_text: Some(method_name.to_string()), + insert_text_format: Some(InsertTextFormat::PLAIN_TEXT), + ..Default::default() + } + }; + completions.push(call_completion); + } + for (n, t) in &info.fields { + let label_details = Some(CompletionItemLabelDetails { + detail: None, + description: Some(type_to_ide_string(t, /* verbose */ false)), + }); + let init_completion = CompletionItem { + label: n.to_string(), + label_details, + kind: Some(CompletionItemKind::FIELD), + insert_text: Some(n.to_string()), + insert_text_format: Some(InsertTextFormat::PLAIN_TEXT), + ..Default::default() + }; + completions.push(init_completion); + } + + (completions, completion_finalized) +} + +/// Get the `DefInfo` for a function definition. +fn fun_def_info(symbols: &Symbols, mod_ident: ModuleIdent_, name: Symbol) -> Option<&DefInfo> { + let Some(mod_defs) = mod_defs(symbols, &mod_ident) else { + return None; + }; + + let Some(fdef) = mod_defs.functions.get(&name) else { + return None; + }; + symbols.def_info(&fdef.name_loc) +} diff --git a/external-crates/move/crates/move-analyzer/src/completions/mod.rs b/external-crates/move/crates/move-analyzer/src/completions/mod.rs new file mode 100644 index 0000000000000..362fca386d22b --- /dev/null +++ b/external-crates/move/crates/move-analyzer/src/completions/mod.rs @@ -0,0 +1,428 @@ +// Copyright (c) The Diem Core Contributors +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +use crate::{ + completions::{ + dot::dot_completions, + name_chain::{name_chain_completions, use_decl_completions}, + snippets::{init_completion, object_completion}, + utils::{completion_item, PRIMITIVE_TYPE_COMPLETIONS}, + }, + context::Context, + symbols::{self, CursorContext, PrecompiledPkgDeps, SymbolicatorRunner, Symbols}, +}; +use lsp_server::Request; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionParams, Position}; +use move_command_line_common::files::FileHash; +use move_compiler::{ + editions::Edition, + linters::LintLevel, + parser::{ + keywords::{BUILTINS, CONTEXTUAL_KEYWORDS, KEYWORDS, PRIMITIVE_TYPES}, + lexer::{Lexer, Tok}, + }, +}; +use move_symbol_pool::Symbol; + +use once_cell::sync::Lazy; + +use std::{ + collections::{BTreeMap, HashSet}, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, +}; +use vfs::VfsPath; + +mod dot; +mod name_chain; +mod snippets; +mod utils; + +/// List of completion items corresponding to each one of Move's keywords. +/// +/// Currently, this does not filter keywords out based on whether they are valid at the completion +/// request's cursor position, but in the future it ought to. For example, this function returns +/// all specification language keywords, but in the future it should be modified to only do so +/// within a spec block. +static KEYWORD_COMPLETIONS: Lazy> = Lazy::new(|| { + let mut keywords = KEYWORDS + .iter() + .chain(CONTEXTUAL_KEYWORDS.iter()) + .chain(PRIMITIVE_TYPES.iter()) + .map(|label| { + let kind = if label == &"copy" || label == &"move" { + CompletionItemKind::OPERATOR + } else { + CompletionItemKind::KEYWORD + }; + completion_item(label, kind) + }) + .collect::>(); + keywords.extend(PRIMITIVE_TYPE_COMPLETIONS.clone()); + keywords +}); + +/// List of completion items corresponding to each one of Move's builtin functions. +static BUILTIN_COMPLETIONS: Lazy> = Lazy::new(|| { + BUILTINS + .iter() + .map(|label| completion_item(label, CompletionItemKind::FUNCTION)) + .collect() +}); + +/// Sends the given connection a response to a completion request. +/// +/// The completions returned depend upon where the user's cursor is positioned. +pub fn on_completion_request( + context: &Context, + request: &Request, + ide_files_root: VfsPath, + pkg_dependencies: Arc>>, +) { + eprintln!("handling completion request"); + let parameters = serde_json::from_value::(request.params.clone()) + .expect("could not deserialize completion request"); + + let path = parameters + .text_document_position + .text_document + .uri + .to_file_path() + .unwrap(); + + let mut pos = parameters.text_document_position.position; + if pos.character != 0 { + // adjust column to be at the character that has just been inserted rather than right after + // it (unless we are at the very first column) + pos = Position::new(pos.line, pos.character - 1); + } + let completions = + completions(context, ide_files_root, pkg_dependencies, &path, pos).unwrap_or_default(); + let completions_len = completions.len(); + + let result = + serde_json::to_value(completions).expect("could not serialize completion response"); + eprintln!("about to send completion response with {completions_len} items"); + let response = lsp_server::Response::new_ok(request.id.clone(), result); + if let Err(err) = context + .connection + .sender + .send(lsp_server::Message::Response(response)) + { + eprintln!("could not send completion response: {:?}", err); + } +} + +/// Computes a list of auto-completions for a given position in a file, +/// given the current context. +fn completions( + context: &Context, + ide_files_root: VfsPath, + pkg_dependencies: Arc>>, + path: &Path, + pos: Position, +) -> Option> { + let Some(pkg_path) = SymbolicatorRunner::root_dir(path) else { + eprintln!("failed completion for {:?} (package root not found)", path); + return None; + }; + let symbol_map = context.symbols.lock().unwrap(); + let current_symbols = symbol_map.get(&pkg_path)?; + Some(compute_completions( + current_symbols, + ide_files_root, + pkg_dependencies, + path, + pos, + )) +} + +/// Computes a list of auto-completions for a given position in a file, +/// based on the current symbols. +pub fn compute_completions( + current_symbols: &Symbols, + ide_files_root: VfsPath, + pkg_dependencies: Arc>>, + path: &Path, + pos: Position, +) -> Vec { + compute_completions_new_symbols(ide_files_root, pkg_dependencies, path, pos) + .unwrap_or_else(|| compute_completions_with_symbols(current_symbols, path, pos)) +} + +/// Computes a list of auto-completions for a given position in a file, +/// after attempting to re-compute the symbols to get the most up-to-date +/// view of the code (returns `None` if the symbols could not be re-computed). +fn compute_completions_new_symbols( + ide_files_root: VfsPath, + pkg_dependencies: Arc>>, + path: &Path, + cursor_position: Position, +) -> Option> { + let Some(pkg_path) = SymbolicatorRunner::root_dir(path) else { + eprintln!("failed completion for {:?} (package root not found)", path); + return None; + }; + let cursor_path = path.to_path_buf(); + let cursor_info = Some((&cursor_path, cursor_position)); + let (symbols, _diags) = symbols::get_symbols( + pkg_dependencies, + ide_files_root, + &pkg_path, + LintLevel::None, + cursor_info, + ) + .ok()?; + let symbols = symbols?; + Some(compute_completions_with_symbols( + &symbols, + path, + cursor_position, + )) +} + +/// Computes a list of auto-completions for a given position in a file +/// using the symbols provided as argument. +pub fn compute_completions_with_symbols( + symbols: &Symbols, + path: &Path, + pos: Position, +) -> Vec { + let mut completions = vec![]; + + let Some(fhash) = symbols.file_hash(path) else { + return completions; + }; + let Some(file_id) = symbols.files.file_mapping().get(&fhash) else { + return completions; + }; + let Ok(file) = symbols.files.files().get(*file_id) else { + return completions; + }; + + let file_source = file.source().clone(); + if !file_source.is_empty() { + let completion_finalized; + match &symbols.cursor_context { + Some(cursor_context) => { + eprintln!("cursor completion"); + let (cursor_completions, cursor_finalized) = + cursor_completion_items(symbols, path, &file_source, pos, cursor_context); + completion_finalized = cursor_finalized; + completions.extend(cursor_completions); + } + None => { + eprintln!("non-cursor completion"); + let (no_cursor_completions, no_cursor_finalized) = + no_cursor_completion_items(symbols, path, &file_source, pos); + completion_finalized = no_cursor_finalized; + completions.extend(no_cursor_completions); + } + } + if !completion_finalized { + eprintln!("including identifiers"); + let identifiers = identifiers(&file_source, symbols, path); + completions.extend(identifiers); + } + } else { + // no file content + completions.extend(KEYWORD_COMPLETIONS.clone()); + completions.extend(BUILTIN_COMPLETIONS.clone()); + } + completions +} + +/// Return completion items in case cursor is available plus a flag indicating +/// if we should continue searching for more completions. +fn cursor_completion_items( + symbols: &Symbols, + path: &Path, + file_source: &str, + pos: Position, + cursor: &CursorContext, +) -> (Vec, bool) { + let cursor_leader = get_cursor_token(file_source, &pos); + match cursor_leader { + // TODO: consider using `cursor.position` for this instead + Some(Tok::Period) => dot_completions(symbols, path, &pos), + Some(Tok::ColonColon) => { + let mut completions = vec![]; + let mut completion_finalized = false; + let (name_chain_completions, name_chain_finalized) = + name_chain_completions(symbols, cursor, /* colon_colon_triggered */ true); + completions.extend(name_chain_completions); + completion_finalized |= name_chain_finalized; + if !completion_finalized { + let (use_decl_completions, use_decl_finalized) = + use_decl_completions(symbols, cursor); + completions.extend(use_decl_completions); + completion_finalized |= use_decl_finalized; + } + (completions, completion_finalized) + } + // Carve out to suggest UID for struct with key ability + Some(Tok::LBrace) => { + let mut completions = vec![]; + let mut completion_finalized = false; + let (custom_completions, custom_finalized) = lbrace_cursor_completions(symbols, cursor); + completions.extend(custom_completions); + completion_finalized |= custom_finalized; + if !completion_finalized { + let (use_decl_completions, use_decl_finalized) = + use_decl_completions(symbols, cursor); + completions.extend(use_decl_completions); + completion_finalized |= use_decl_finalized; + } + (completions, completion_finalized) + } + // TODO: should we handle auto-completion on `:`? If we model our support after + // rust-analyzer then it does not do this - it starts auto-completing types after the first + // character beyond `:` is typed + _ => { + eprintln!("no relevant cursor leader"); + let mut completions = vec![]; + let mut completion_finalized = false; + let (name_chain_completions, name_chain_finalized) = + name_chain_completions(symbols, cursor, /* colon_colon_triggered */ false); + completions.extend(name_chain_completions); + completion_finalized |= name_chain_finalized; + if !completion_finalized { + if matches!(cursor_leader, Some(Tok::Colon)) { + // much like rust-analyzer we do not auto-complete in the middle of `::` + completion_finalized = true; + } else { + let (use_decl_completions, use_decl_finalized) = + use_decl_completions(symbols, cursor); + completions.extend(use_decl_completions); + completion_finalized |= use_decl_finalized; + } + } + if !completion_finalized { + eprintln!("checking default items"); + let (default_completions, default_finalized) = + no_cursor_completion_items(symbols, path, file_source, pos); + completions.extend(default_completions); + completion_finalized |= default_finalized; + } + (completions, completion_finalized) + } + } +} + +/// Returns the token corresponding to the "trigger character" if it is one of `.`, `:`, '{', or +/// `::`. Otherwise, returns `None` (position points at the potential trigger character itself). +fn get_cursor_token(buffer: &str, position: &Position) -> Option { + let line = match buffer.lines().nth(position.line as usize) { + Some(line) => line, + None => return None, // Our buffer does not contain the line, and so must be out of date. + }; + match line.chars().nth(position.character as usize) { + Some('.') => Some(Tok::Period), + Some(':') => { + if position.character > 0 + && line.chars().nth(position.character as usize - 1) == Some(':') + { + Some(Tok::ColonColon) + } else { + Some(Tok::Colon) + } + } + Some('{') => Some(Tok::LBrace), + _ => None, + } +} + +/// Handle auto-completion requests with lbrace (`{`) trigger character +/// when cursor is available. +fn lbrace_cursor_completions( + symbols: &Symbols, + cursor: &CursorContext, +) -> (Vec, bool) { + let completions = vec![]; + let (completion_item_opt, completion_finalized) = object_completion(symbols, cursor); + if let Some(completion_item) = completion_item_opt { + return (vec![completion_item], completion_finalized); + } + (completions, completion_finalized) +} + +/// Return completion items no cursor is available plus a flag indicating +/// if we should continue searching for more completions. +fn no_cursor_completion_items( + symbols: &Symbols, + path: &Path, + file_source: &str, + pos: Position, +) -> (Vec, bool) { + // If the user's cursor is positioned anywhere other than following a `.`, `:`, or `::`, + // offer them context-specific autocompletion items and, if needed, + // Move's keywords, and builtins. + let (mut completions, mut completion_finalized) = dot_completions(symbols, path, &pos); + if !completion_finalized { + let (init_completions, init_finalized) = init_completion(symbols, path, file_source, &pos); + completions.extend(init_completions); + completion_finalized |= init_finalized; + } + + if !completion_finalized { + completions.extend(KEYWORD_COMPLETIONS.clone()); + completions.extend(BUILTIN_COMPLETIONS.clone()); + } + (completions, true) +} + +/// Lexes the Move source file at the given path and returns a list of completion items +/// corresponding to the non-keyword identifiers therein. +/// +/// Currently, this does not perform semantic analysis to determine whether the identifiers +/// returned are valid at the request's cursor position. However, this list of identifiers is akin +/// to what editors like Visual Studio Code would provide as completion items if this language +/// server did not initialize with a response indicating it's capable of providing completions. In +/// the future, the server should be modified to return semantically valid completion items, not +/// simple textual suggestions. +fn identifiers(buffer: &str, symbols: &Symbols, path: &Path) -> Vec { + // TODO thread through package configs + let mut lexer = Lexer::new(buffer, FileHash::new(buffer), Edition::LEGACY); + if lexer.advance().is_err() { + return vec![]; + } + let mut ids = HashSet::new(); + while lexer.peek() != Tok::EOF { + // Some tokens, such as "phantom", are contextual keywords that are only reserved in + // certain contexts. Since for now this language server doesn't analyze semantic context, + // tokens such as "phantom" are always present in keyword suggestions. To avoid displaying + // these keywords to the user twice in the case that the token "phantom" is present in the + // source program (once as a keyword, and once as an identifier), we filter out any + // identifier token that has the same text as a keyword. + if lexer.peek() == Tok::Identifier && !KEYWORDS.contains(&lexer.content()) { + // The completion item kind "text" indicates the item is not based on any semantic + // context of the request cursor's position. + ids.insert(lexer.content()); + } + if lexer.advance().is_err() { + break; + } + } + + let mods_opt = symbols.file_mods.get(path); + + // The completion item kind "text" indicates that the item is based on simple textual matching, + // not any deeper semantic analysis. + ids.iter() + .map(|label| { + if let Some(mods) = mods_opt { + if mods + .iter() + .any(|m| m.functions().contains_key(&Symbol::from(*label))) + { + completion_item(label, CompletionItemKind::FUNCTION) + } else { + completion_item(label, CompletionItemKind::TEXT) + } + } else { + completion_item(label, CompletionItemKind::TEXT) + } + }) + .collect() +} diff --git a/external-crates/move/crates/move-analyzer/src/completion.rs b/external-crates/move/crates/move-analyzer/src/completions/name_chain.rs similarity index 50% rename from external-crates/move/crates/move-analyzer/src/completion.rs rename to external-crates/move/crates/move-analyzer/src/completions/name_chain.rs index b53a58f60779c..5fd8896b36121 100644 --- a/external-crates/move/crates/move-analyzer/src/completion.rs +++ b/external-crates/move/crates/move-analyzer/src/completions/name_chain.rs @@ -1,55 +1,32 @@ -// Copyright (c) The Diem Core Contributors // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 +// Auto-completions for name chains, such as `mod::struct::field` or `mod::function`, +// both in the code (e.g., types) and in `use` statements. + use crate::{ - context::Context, + completions::utils::{ + call_completion_item, completion_item, mod_defs, PRIMITIVE_TYPE_COMPLETIONS, + }, symbols::{ - self, expansion_mod_ident_to_map_key, mod_ident_to_ide_string, ret_type_to_ide_str, - type_args_to_ide_string, type_list_to_ide_string, type_to_ide_string, ChainCompletionKind, - ChainInfo, CursorContext, CursorDefinition, DefInfo, FunType, MemberDef, MemberDefInfo, - ModuleDefs, PrecompiledPkgDeps, SymbolicatorRunner, Symbols, VariantInfo, + expansion_mod_ident_to_map_key, ChainCompletionKind, ChainInfo, CursorContext, DefInfo, + FunType, MemberDef, MemberDefInfo, Symbols, VariantInfo, }, - utils, }; use itertools::Itertools; -use lsp_server::Request; -use lsp_types::{ - CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionParams, - Documentation, InsertTextFormat, Position, -}; -use move_command_line_common::files::FileHash; +use lsp_types::{CompletionItem, CompletionItemKind, InsertTextFormat}; use move_compiler::{ - editions::Edition, expansion::ast::{Address, ModuleIdent, ModuleIdent_, Visibility}, - linters::LintLevel, - naming::ast::{Type, Type_}, - parser::{ - ast::{self as P, Ability_, LeadingNameAccess, LeadingNameAccess_}, - keywords::{BUILTINS, CONTEXTUAL_KEYWORDS, KEYWORDS, PRIMITIVE_TYPES}, - lexer::{Lexer, Tok}, - }, - shared::{ - ide::{AliasAutocompleteInfo, AutocompleteMethod}, - Identifier, Name, NumericalAddress, - }, + parser::ast as P, + shared::{ide::AliasAutocompleteInfo, Identifier, Name, NumericalAddress}, }; use move_ir_types::location::{sp, Loc}; use move_symbol_pool::Symbol; - -use once_cell::sync::Lazy; - -use std::{ - collections::{BTreeMap, BTreeSet, HashSet}, - path::{Path, PathBuf}, - sync::{Arc, Mutex}, - vec, -}; -use vfs::VfsPath; +use std::collections::BTreeSet; /// Describes kind of the name access chain component. enum ChainComponentKind { - Package(LeadingNameAccess), + Package(P::LeadingNameAccess), Module(ModuleIdent), Member(ModuleIdent, Symbol), } @@ -66,372 +43,242 @@ impl ChainComponentInfo { } } -/// Constructs an `lsp_types::CompletionItem` with the given `label` and `kind`. -fn completion_item(label: &str, kind: CompletionItemKind) -> CompletionItem { - CompletionItem { - label: label.to_owned(), - kind: Some(kind), - ..Default::default() - } -} +/// Handle name chain auto-completion at a given position. The gist of this approach is to first +/// identify what the first component of the access chain represents (as it may be a package, module +/// or a member) and if the chain has other components, recursively process them in turn to either +/// - finish auto-completion if cursor is on a given component's identifier +/// - identify what the subsequent component represents and keep going +pub fn name_chain_completions( + symbols: &Symbols, + cursor: &CursorContext, + colon_colon_triggered: bool, +) -> (Vec, bool) { + eprintln!("looking for name access chains"); + let mut completions = vec![]; + let mut completion_finalized = false; + let Some(ChainInfo { + chain, + kind: chain_kind, + inside_use, + }) = cursor.find_access_chain() + else { + eprintln!("no access chain"); + return (completions, completion_finalized); + }; -/// List of completion items corresponding to each one of Move's keywords. -/// -/// Currently, this does not filter keywords out based on whether they are valid at the completion -/// request's cursor position, but in the future it ought to. For example, this function returns -/// all specification language keywords, but in the future it should be modified to only do so -/// within a spec block. -static KEYWORD_COMPLETIONS: Lazy> = Lazy::new(|| { - let mut keywords = KEYWORDS - .iter() - .chain(CONTEXTUAL_KEYWORDS.iter()) - .chain(PRIMITIVE_TYPES.iter()) - .map(|label| { - let kind = if label == &"copy" || label == &"move" { - CompletionItemKind::OPERATOR - } else { - CompletionItemKind::KEYWORD - }; - completion_item(label, kind) - }) - .collect::>(); - keywords.extend(PRIMITIVE_TYPE_COMPLETIONS.clone()); - keywords -}); - -/// List of completion items of Move's primitive types. -static PRIMITIVE_TYPE_COMPLETIONS: Lazy> = Lazy::new(|| { - let mut primitive_types = PRIMITIVE_TYPES - .iter() - .map(|label| completion_item(label, CompletionItemKind::KEYWORD)) - .collect::>(); - primitive_types.push(completion_item("address", CompletionItemKind::KEYWORD)); - primitive_types -}); - -/// List of completion items corresponding to each one of Move's builtin functions. -static BUILTIN_COMPLETIONS: Lazy> = Lazy::new(|| { - BUILTINS - .iter() - .map(|label| completion_item(label, CompletionItemKind::FUNCTION)) - .collect() -}); - -/// Lexes the Move source file at the given path and returns a list of completion items -/// corresponding to the non-keyword identifiers therein. -/// -/// Currently, this does not perform semantic analysis to determine whether the identifiers -/// returned are valid at the request's cursor position. However, this list of identifiers is akin -/// to what editors like Visual Studio Code would provide as completion items if this language -/// server did not initialize with a response indicating it's capable of providing completions. In -/// the future, the server should be modified to return semantically valid completion items, not -/// simple textual suggestions. -fn identifiers(buffer: &str, symbols: &Symbols, path: &Path) -> Vec { - // TODO thread through package configs - let mut lexer = Lexer::new(buffer, FileHash::new(buffer), Edition::LEGACY); - if lexer.advance().is_err() { - return vec![]; - } - let mut ids = HashSet::new(); - while lexer.peek() != Tok::EOF { - // Some tokens, such as "phantom", are contextual keywords that are only reserved in - // certain contexts. Since for now this language server doesn't analyze semantic context, - // tokens such as "phantom" are always present in keyword suggestions. To avoid displaying - // these keywords to the user twice in the case that the token "phantom" is present in the - // source program (once as a keyword, and once as an identifier), we filter out any - // identifier token that has the same text as a keyword. - if lexer.peek() == Tok::Identifier && !KEYWORDS.contains(&lexer.content()) { - // The completion item kind "text" indicates the item is not based on any semantic - // context of the request cursor's position. - ids.insert(lexer.content()); - } - if lexer.advance().is_err() { - break; - } - } + let (leading_name, path_entries) = match &chain.value { + P::NameAccessChain_::Single(entry) => ( + sp(entry.name.loc, P::LeadingNameAccess_::Name(entry.name)), + vec![], + ), + P::NameAccessChain_::Path(name_path) => ( + name_path.root.name, + name_path.entries.iter().map(|e| e.name).collect::>(), + ), + }; + + // there may be access chains for which there is not auto-completion info generated by the + // compiler but which still have to be handled (e.g., chains starting with numeric address) + let info = symbols + .compiler_info + .path_autocomplete_info + .get(&leading_name.loc) + .cloned() + .unwrap_or_else(AliasAutocompleteInfo::new); - let mods_opt = symbols.file_mods.get(path); + eprintln!("found access chain for auto-completion (adddreses: {}, modules: {}, members: {}, tparams: {}", + info.addresses.len(), info.modules.len(), info.members.len(), info.type_params.len()); - // The completion item kind "text" indicates that the item is based on simple textual matching, - // not any deeper semantic analysis. - ids.iter() - .map(|label| { - if let Some(mods) = mods_opt { - if mods - .iter() - .any(|m| m.functions().contains_key(&Symbol::from(*label))) - { - completion_item(label, CompletionItemKind::FUNCTION) - } else { - completion_item(label, CompletionItemKind::TEXT) - } - } else { - completion_item(label, CompletionItemKind::TEXT) - } - }) - .collect() -} + // if we are auto-completing for an access chain, there is no need to include default completions + completion_finalized = true; -/// Returns the token corresponding to the "trigger character" if it is one of `.`, `:`, '{', or -/// `::`. Otherwise, returns `None` (position points at the potential trigger character itself). -fn get_cursor_token(buffer: &str, position: &Position) -> Option { - let line = match buffer.lines().nth(position.line as usize) { - Some(line) => line, - None => return None, // Our buffer does not contain the line, and so must be out of date. - }; - match line.chars().nth(position.character as usize) { - Some('.') => Some(Tok::Period), - Some(':') => { - if position.character > 0 - && line.chars().nth(position.character as usize - 1) == Some(':') - { - Some(Tok::ColonColon) - } else { - Some(Tok::Colon) + if leading_name.loc.contains(&cursor.loc) { + // at first position of the chain suggest all packages that are available regardless of what + // the leading name represents, as a package always fits at that position, for example: + // OxCAFE::... + // some_name::... + // ::some_name + // + completions.extend( + all_packages(symbols, &info) + .iter() + .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), + ); + + // only if leading name is actually a name, modules or module members are a correct + // auto-completion in the first position + if let P::LeadingNameAccess_::Name(_) = &leading_name.value { + completions.extend( + info.modules + .keys() + .map(|n| completion_item(n.as_str(), CompletionItemKind::MODULE)), + ); + completions.extend(all_single_name_member_completions( + symbols, + cursor, + &info.members, + chain_kind, + )); + if matches!(chain_kind, ChainCompletionKind::Type) { + completions.extend(PRIMITIVE_TYPE_COMPLETIONS.clone()); + completions.extend( + info.type_params + .iter() + .map(|t| completion_item(t.as_str(), CompletionItemKind::TYPE_PARAMETER)), + ); } } - Some('{') => Some(Tok::LBrace), - _ => None, + } else if let Some(next_kind) = first_name_chain_component_kind(symbols, &info, leading_name) { + completions_for_name_chain_entry( + symbols, + cursor, + &info, + ChainComponentInfo::new(leading_name.loc, next_kind), + chain_kind, + &path_entries, + /* path_index */ 0, + colon_colon_triggered, + inside_use, + &mut completions, + ); } -} -/// Checks if the cursor is at the opening brace of a struct definition and returns -/// auto-completion of this struct into an object if the struct has the `key` ability. -fn object_completion(symbols: &Symbols, cursor: &CursorContext) -> (Option, bool) { - let mut only_custom_items = false; - // look for a struct definition on the line that contains `{`, check its abilities, - // and do auto-completion if `key` ability is present - let Some(CursorDefinition::Struct(sname)) = &cursor.defn_name else { - return (None, only_custom_items); - }; - only_custom_items = true; - let Some(mod_ident) = cursor.module else { - return (None, only_custom_items); - }; - let Some(mod_defs) = mod_defs(symbols, &mod_ident.value) else { - return (None, only_custom_items); - }; - let Some(struct_def) = mod_defs.structs.get(&sname.value()) else { - return (None, only_custom_items); - }; - - let Some(DefInfo::Struct(_, _, _, _, abilities, ..)) = - symbols.def_info.get(&struct_def.name_loc) - else { - return (None, only_custom_items); - }; + eprintln!("found {} access chain completions", completions.len()); - if !abilities.has_ability_(Ability_::Key) { - return (None, only_custom_items); - } - let obj_snippet = "\n\tid: UID,\n\t$1\n".to_string(); - let init_completion = CompletionItem { - label: "id: UID".to_string(), - kind: Some(CompletionItemKind::SNIPPET), - documentation: Some(Documentation::String("Object snippet".to_string())), - insert_text: Some(obj_snippet), - insert_text_format: Some(InsertTextFormat::SNIPPET), - ..Default::default() - }; - (Some(init_completion), only_custom_items) + (completions, completion_finalized) } -/// Handle context-specific auto-completion requests with lbrace (`{`) trigger character. -fn context_specific_lbrace( +/// Handles auto-completions for "regular" `use` declarations (name access chains in `use fun` +/// declarations are handled as part of name chain completions). +pub fn use_decl_completions( symbols: &Symbols, cursor: &CursorContext, ) -> (Vec, bool) { - let completions = vec![]; - let (completion_item_opt, only_custom_items) = object_completion(symbols, cursor); - if let Some(completion_item) = completion_item_opt { - return (vec![completion_item], only_custom_items); - } - (completions, only_custom_items) -} - -/// Get definitions for a given module. -fn mod_defs<'a>(symbols: &'a Symbols, mod_ident: &ModuleIdent_) -> Option<&'a ModuleDefs> { - symbols - .file_mods - .values() - .flatten() - .find(|mdef| mdef.ident == *mod_ident) -} - -fn fun_def_info(symbols: &Symbols, mod_ident: ModuleIdent_, name: Symbol) -> Option<&DefInfo> { - let Some(mod_defs) = mod_defs(symbols, &mod_ident) else { - return None; + eprintln!("looking for use declarations"); + let mut completions = vec![]; + let mut completion_finalized = false; + let Some(use_) = cursor.find_use_decl() else { + eprintln!("no use declaration"); + return (completions, completion_finalized); }; + eprintln!("use declaration {:?}", use_); - let Some(fdef) = mod_defs.functions.get(&name) else { - return None; - }; - symbols.def_info(&fdef.name_loc) -} + // if we are auto-completing for a use decl, there is no need to include default completions + completion_finalized = true; -fn lambda_snippet(sp!(_, ty): &Type, snippet_idx: &mut i32) -> Option { - if let Type_::Fun(vec, _) = ty { - let arg_snippets = vec - .iter() - .map(|_| { - *snippet_idx += 1; - format!("${{{snippet_idx}}}") - }) - .collect::>() - .join(", "); - *snippet_idx += 1; - return Some(format!("|{arg_snippets}| ${{{snippet_idx}}}")); - } - None -} + // there is no auto-completion info generated by the compiler for this but helper methods used + // here are shared with name chain completion where it may exist, so we create an "empty" one + // here + let info = AliasAutocompleteInfo::new(); -fn call_completion_item( - mod_ident: &ModuleIdent_, - is_macro: bool, - method_name_opt: Option<&Symbol>, - function_name: &Symbol, - type_args: &[Type], - arg_names: &[Name], - arg_types: &[Type], - ret_type: &Type, - inside_use: bool, -) -> CompletionItem { - let sig_string = format!( - "fun {}({}){}", - type_args_to_ide_string(type_args, /* verbose */ false), - type_list_to_ide_string(arg_types, /* verbose */ false), - ret_type_to_ide_str(ret_type, /* verbose */ false) - ); - // if it's a method call we omit the first argument which is guaranteed to be there as this is a - // method and needs a receiver - let omitted_arg_count = if method_name_opt.is_some() { 1 } else { 0 }; - let mut snippet_idx = 0; - let arg_snippet = arg_names - .iter() - .zip(arg_types) - .skip(omitted_arg_count) - .map(|(name, ty)| { - lambda_snippet(ty, &mut snippet_idx).unwrap_or_else(|| { - let mut arg_name = name.to_string(); - if arg_name.starts_with('$') { - arg_name = arg_name[1..].to_string(); + match use_ { + P::Use::ModuleUse(sp!(_, mod_ident), mod_use) => { + if mod_ident.address.loc.contains(&cursor.loc) { + // cursor on package (e.g., on `some_pkg` in `some_pkg::some_mod`) + completions.extend( + all_packages(symbols, &info) + .iter() + .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), + ); + } else if cursor.loc.start() > mod_ident.address.loc.end() + && cursor.loc.end() <= mod_ident.module.loc().end() + { + // cursor is either at the `::` succeeding package/address or at the identifier + // following that particular `::` + for ident in pkg_mod_identifiers(symbols, &info, &mod_ident.address) { + completions.push(completion_item( + ident.value.module.value().as_str(), + CompletionItemKind::MODULE, + )); + } + } else { + completions.extend(module_use_completions( + symbols, + cursor, + &info, + &mod_use, + &mod_ident.address, + &mod_ident.module, + )); + } + } + P::Use::NestedModuleUses(leading_name, uses) => { + if leading_name.loc.contains(&cursor.loc) { + // cursor on package + completions.extend( + all_packages(symbols, &info) + .iter() + .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), + ); + } else { + if let Some((first_name, _)) = uses.first() { + if cursor.loc.start() > leading_name.loc.end() + && cursor.loc.end() <= first_name.loc().start() + { + // cursor is after `::` succeeding address/package but before the first + // module + for ident in pkg_mod_identifiers(symbols, &info, &leading_name) { + completions.push(completion_item( + ident.value.module.value().as_str(), + CompletionItemKind::MODULE, + )); + } + // no point in falling through to the uses loop below + return (completions, completion_finalized); + } } - snippet_idx += 1; - format!("${{{}:{}}}", snippet_idx, arg_name) - }) - }) - .collect::>() - .join(", "); - let macro_suffix = if is_macro { "!" } else { "" }; - let label_details = Some(CompletionItemLabelDetails { - detail: Some(format!( - " ({}::{})", - mod_ident_to_ide_string(mod_ident), - function_name - )), - description: Some(sig_string), - }); - - let method_name = method_name_opt.unwrap_or(function_name); - let (insert_text, insert_text_format) = if inside_use { - ( - Some(format!("{method_name}")), - Some(InsertTextFormat::PLAIN_TEXT), - ) - } else { - ( - Some(format!("{method_name}{macro_suffix}({arg_snippet})")), - Some(InsertTextFormat::SNIPPET), - ) - }; - - CompletionItem { - label: format!("{method_name}{macro_suffix}()"), - label_details, - kind: Some(CompletionItemKind::METHOD), - insert_text, - insert_text_format, - ..Default::default() - } -} -/// Handle dot auto-completion at a given position. -fn dot_completions( - symbols: &Symbols, - use_fpath: &Path, - position: &Position, -) -> Vec { - let mut completions = vec![]; - let Some(fhash) = symbols.file_hash(use_fpath) else { - eprintln!("no dot completions due to missing file"); - return completions; - }; - let Some(loc) = utils::lsp_position_to_loc(&symbols.files, fhash, position) else { - eprintln!("no dot completions due to missing loc"); - return completions; - }; - let Some(info) = symbols.compiler_info.get_autocomplete_info(fhash, &loc) else { - eprintln!("no dot completions due to no compiler autocomplete info"); - return completions; - }; - for AutocompleteMethod { - method_name, - target_function: (mod_ident, function_name), - } in &info.methods - { - let call_completion = if let Some(DefInfo::Function( - .., - fun_type, - _, - type_args, - arg_names, - arg_types, - ret_type, - _, - )) = fun_def_info(symbols, mod_ident.value, function_name.value()) - { - call_completion_item( - &mod_ident.value, - matches!(fun_type, FunType::Macro), - Some(method_name), - &function_name.value(), - type_args, - arg_names, - arg_types, - ret_type, - /* inside_use */ false, - ) - } else { - // this shouldn't really happen as we should be able to get - // `DefInfo` for a function but if for some reason we cannot, - // let's generate simpler autotompletion value - eprintln!("incomplete dot item"); - CompletionItem { - label: format!("{method_name}()"), - kind: Some(CompletionItemKind::METHOD), - insert_text: Some(method_name.to_string()), - insert_text_format: Some(InsertTextFormat::PLAIN_TEXT), - ..Default::default() + for (mod_name, mod_use) in &uses { + if mod_name.loc().contains(&cursor.loc) { + for ident in pkg_mod_identifiers(symbols, &info, &leading_name) { + completions.push(completion_item( + ident.value.module.value().as_str(), + CompletionItemKind::MODULE, + )); + } + // no point checking other locations + break; + } + completions.extend(module_use_completions( + symbols, + cursor, + &info, + mod_use, + &leading_name, + mod_name, + )); + } } - }; - completions.push(call_completion); - } - for (n, t) in &info.fields { - let label_details = Some(CompletionItemLabelDetails { - detail: None, - description: Some(type_to_ide_string(t, /* verbose */ false)), - }); - let init_completion = CompletionItem { - label: n.to_string(), - label_details, - kind: Some(CompletionItemKind::FIELD), - insert_text: Some(n.to_string()), - insert_text_format: Some(InsertTextFormat::PLAIN_TEXT), - ..Default::default() - }; - completions.push(init_completion); + } + P::Use::Fun { .. } => (), // already handled as part of name chain completion + P::Use::Partial { + package, + colon_colon, + opening_brace: _, + } => { + if package.loc.contains(&cursor.loc) { + // cursor on package name/address + completions.extend( + all_packages(symbols, &info) + .iter() + .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), + ); + } + if let Some(colon_colon_loc) = colon_colon { + if cursor.loc.start() >= colon_colon_loc.start() { + // cursor is on or past `::` + for ident in pkg_mod_identifiers(symbols, &info, &package) { + completions.push(completion_item( + ident.value.module.value().as_str(), + CompletionItemKind::MODULE, + )); + } + } + } + } } - completions + + (completions, completion_finalized) } /// Handles auto-completion for structs and enums variants, including fields contained @@ -735,17 +582,18 @@ fn all_single_name_member_completions( /// Checks if a given module identifier represents a module in a package identifier by /// `leading_name`. -fn is_pkg_mod_ident(mod_ident: &ModuleIdent_, leading_name: &LeadingNameAccess) -> bool { +fn is_pkg_mod_ident(mod_ident: &ModuleIdent_, leading_name: &P::LeadingNameAccess) -> bool { match mod_ident.address { Address::NamedUnassigned(name) => matches!(leading_name.value, - LeadingNameAccess_::Name(n) | LeadingNameAccess_::GlobalAddress(n) if name == n), + P::LeadingNameAccess_::Name(n) | P::LeadingNameAccess_::GlobalAddress(n) if name == n), Address::Numerical { name, value, name_conflict: _, } => match leading_name.value { - LeadingNameAccess_::AnonymousAddress(addr) if addr == value.value => true, - LeadingNameAccess_::Name(addr_name) | LeadingNameAccess_::GlobalAddress(addr_name) + P::LeadingNameAccess_::AnonymousAddress(addr) if addr == value.value => true, + P::LeadingNameAccess_::Name(addr_name) + | P::LeadingNameAccess_::GlobalAddress(addr_name) if Some(addr_name) == name => { true @@ -759,7 +607,7 @@ fn is_pkg_mod_ident(mod_ident: &ModuleIdent_, leading_name: &LeadingNameAccess) fn pkg_mod_identifiers( symbols: &Symbols, info: &AliasAutocompleteInfo, - leading_name: &LeadingNameAccess, + leading_name: &P::LeadingNameAccess, ) -> BTreeSet { info.modules .values() @@ -1028,152 +876,47 @@ fn all_packages(symbols: &Symbols, info: &AliasAutocompleteInfo) -> BTreeSet Option { match leading_name.value { - LeadingNameAccess_::Name(n) => { + P::LeadingNameAccess_::Name(n) => { if is_package_name(symbols, info, n) { Some(ChainComponentKind::Package(leading_name)) } else if let Some(mod_ident) = info.modules.get(&n.value) { Some(ChainComponentKind::Module(*mod_ident)) } else if let Some((mod_ident, member_name)) = - info.members - .iter() - .find_map(|(alias_name, mod_ident, member_name)| { - if alias_name == &n.value { - Some((*mod_ident, member_name)) - } else { - None - } - }) - { - Some(ChainComponentKind::Member(mod_ident, member_name.value)) - } else { - None - } - } - LeadingNameAccess_::AnonymousAddress(addr) => { - if is_package_address(symbols, info, addr) { - Some(ChainComponentKind::Package(leading_name)) - } else { - None - } - } - LeadingNameAccess_::GlobalAddress(n) => { - // if leading name is global address then the first component can only be a - // package - if is_package_name(symbols, info, n) { - Some(ChainComponentKind::Package(leading_name)) - } else { - None - } - } - } -} - -/// Handle name chain auto-completion at a given position. The gist of this approach is to first -/// identify what the first component of the access chain represents (as it may be a package, module -/// or a member) and if the chain has other components, recursively process them in turn to either -/// - finish auto-completion if cursor is on a given component's identifier -/// - identify what the subsequent component represents and keep going -fn name_chain_completions( - symbols: &Symbols, - cursor: &CursorContext, - colon_colon_triggered: bool, -) -> (Vec, bool) { - eprintln!("looking for name access chains"); - let mut completions = vec![]; - let mut only_custom_items = false; - let Some(ChainInfo { - chain, - kind: chain_kind, - inside_use, - }) = cursor.find_access_chain() - else { - eprintln!("no access chain"); - return (completions, only_custom_items); - }; - - let (leading_name, path_entries) = match &chain.value { - P::NameAccessChain_::Single(entry) => ( - sp(entry.name.loc, LeadingNameAccess_::Name(entry.name)), - vec![], - ), - P::NameAccessChain_::Path(name_path) => ( - name_path.root.name, - name_path.entries.iter().map(|e| e.name).collect::>(), - ), - }; - - // there may be access chains for which there is not auto-completion info generated by the - // compiler but which still have to be handled (e.g., chains starting with numeric address) - let info = symbols - .compiler_info - .path_autocomplete_info - .get(&leading_name.loc) - .cloned() - .unwrap_or_else(AliasAutocompleteInfo::new); - - eprintln!("found access chain for auto-completion (adddreses: {}, modules: {}, members: {}, tparams: {}", - info.addresses.len(), info.modules.len(), info.members.len(), info.type_params.len()); - - // if we are auto-completing for an access chain, there is no need to include default completions - only_custom_items = true; - - if leading_name.loc.contains(&cursor.loc) { - // at first position of the chain suggest all packages that are available regardless of what - // the leading name represents, as a package always fits at that position, for example: - // OxCAFE::... - // some_name::... - // ::some_name - // - completions.extend( - all_packages(symbols, &info) - .iter() - .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), - ); - - // only if leading name is actually a name, modules or module members are a correct - // auto-completion in the first position - if let LeadingNameAccess_::Name(_) = &leading_name.value { - completions.extend( - info.modules - .keys() - .map(|n| completion_item(n.as_str(), CompletionItemKind::MODULE)), - ); - completions.extend(all_single_name_member_completions( - symbols, - cursor, - &info.members, - chain_kind, - )); - if matches!(chain_kind, ChainCompletionKind::Type) { - completions.extend(PRIMITIVE_TYPE_COMPLETIONS.clone()); - completions.extend( - info.type_params - .iter() - .map(|t| completion_item(t.as_str(), CompletionItemKind::TYPE_PARAMETER)), - ); + info.members + .iter() + .find_map(|(alias_name, mod_ident, member_name)| { + if alias_name == &n.value { + Some((*mod_ident, member_name)) + } else { + None + } + }) + { + Some(ChainComponentKind::Member(mod_ident, member_name.value)) + } else { + None + } + } + P::LeadingNameAccess_::AnonymousAddress(addr) => { + if is_package_address(symbols, info, addr) { + Some(ChainComponentKind::Package(leading_name)) + } else { + None + } + } + P::LeadingNameAccess_::GlobalAddress(n) => { + // if leading name is global address then the first component can only be a + // package + if is_package_name(symbols, info, n) { + Some(ChainComponentKind::Package(leading_name)) + } else { + None } } - } else if let Some(next_kind) = first_name_chain_component_kind(symbols, &info, leading_name) { - completions_for_name_chain_entry( - symbols, - cursor, - &info, - ChainComponentInfo::new(leading_name.loc, next_kind), - chain_kind, - &path_entries, - /* path_index */ 0, - colon_colon_triggered, - inside_use, - &mut completions, - ); } - - eprintln!("found {} access chain completions", completions.len()); - - (completions, only_custom_items) } /// Computes auto-completions for module uses. @@ -1182,7 +925,7 @@ fn module_use_completions( cursor: &CursorContext, info: &AliasAutocompleteInfo, mod_use: &P::ModuleUse, - package: &LeadingNameAccess, + package: &P::LeadingNameAccess, mod_name: &P::ModuleName, ) -> Vec { use P::ModuleUse as MU; @@ -1251,542 +994,3 @@ fn module_use_completions( completions } - -/// Handles auto-completions for "regular" `use` declarations (name access chains in `use fun` -/// declarations are handled as part of name chain completions). -fn use_decl_completions(symbols: &Symbols, cursor: &CursorContext) -> (Vec, bool) { - eprintln!("looking for use declarations"); - let mut completions = vec![]; - let mut only_custom_items = false; - let Some(use_) = cursor.find_use_decl() else { - eprintln!("no use declaration"); - return (completions, only_custom_items); - }; - eprintln!("use declaration {:?}", use_); - - // if we are auto-completing for a use decl, there is no need to include default completions - only_custom_items = true; - - // there is no auto-completion info generated by the compiler for this but helper methods used - // here are shared with name chain completion where it may exist, so we create an "empty" one - // here - let info = AliasAutocompleteInfo::new(); - - match use_ { - P::Use::ModuleUse(sp!(_, mod_ident), mod_use) => { - if mod_ident.address.loc.contains(&cursor.loc) { - // cursor on package (e.g., on `some_pkg` in `some_pkg::some_mod`) - completions.extend( - all_packages(symbols, &info) - .iter() - .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), - ); - } else if cursor.loc.start() > mod_ident.address.loc.end() - && cursor.loc.end() <= mod_ident.module.loc().end() - { - // cursor is either at the `::` succeeding package/address or at the identifier - // following that particular `::` - for ident in pkg_mod_identifiers(symbols, &info, &mod_ident.address) { - completions.push(completion_item( - ident.value.module.value().as_str(), - CompletionItemKind::MODULE, - )); - } - } else { - completions.extend(module_use_completions( - symbols, - cursor, - &info, - &mod_use, - &mod_ident.address, - &mod_ident.module, - )); - } - } - P::Use::NestedModuleUses(leading_name, uses) => { - if leading_name.loc.contains(&cursor.loc) { - // cursor on package - completions.extend( - all_packages(symbols, &info) - .iter() - .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), - ); - } else { - if let Some((first_name, _)) = uses.first() { - if cursor.loc.start() > leading_name.loc.end() - && cursor.loc.end() <= first_name.loc().start() - { - // cursor is after `::` succeeding address/package but before the first - // module - for ident in pkg_mod_identifiers(symbols, &info, &leading_name) { - completions.push(completion_item( - ident.value.module.value().as_str(), - CompletionItemKind::MODULE, - )); - } - // no point in falling through to the uses loop below - return (completions, only_custom_items); - } - } - - for (mod_name, mod_use) in &uses { - if mod_name.loc().contains(&cursor.loc) { - for ident in pkg_mod_identifiers(symbols, &info, &leading_name) { - completions.push(completion_item( - ident.value.module.value().as_str(), - CompletionItemKind::MODULE, - )); - } - // no point checking other locations - break; - } - completions.extend(module_use_completions( - symbols, - cursor, - &info, - mod_use, - &leading_name, - mod_name, - )); - } - } - } - P::Use::Fun { .. } => (), // already handled as part of name chain completion - P::Use::Partial { - package, - colon_colon, - opening_brace: _, - } => { - if package.loc.contains(&cursor.loc) { - // cursor on package name/address - completions.extend( - all_packages(symbols, &info) - .iter() - .map(|n| completion_item(n.as_str(), CompletionItemKind::UNIT)), - ); - } - if let Some(colon_colon_loc) = colon_colon { - if cursor.loc.start() >= colon_colon_loc.start() { - // cursor is on or past `::` - for ident in pkg_mod_identifiers(symbols, &info, &package) { - completions.push(completion_item( - ident.value.module.value().as_str(), - CompletionItemKind::MODULE, - )); - } - } - } - } - } - - (completions, only_custom_items) -} - -/// Handle context-specific auto-completion requests with no trigger character. -fn context_specific_no_trigger( - symbols: &Symbols, - use_fpath: &Path, - buffer: &str, - position: &Position, -) -> (Vec, bool) { - eprintln!("looking for dot"); - let mut completions = dot_completions(symbols, use_fpath, position); - eprintln!("dot found: {}", !completions.is_empty()); - if !completions.is_empty() { - // found dot completions - do not look for any other - return (completions, true); - } - - let mut only_custom_items = false; - - let strings = preceding_strings(buffer, position); - - if strings.is_empty() { - return (completions, only_custom_items); - } - - // at this point only try to auto-complete init function declararation - get the last string - // and see if it represents the beginning of init function declaration - const INIT_FN_NAME: &str = "init"; - let (n, use_col) = strings.last().unwrap(); - for u in symbols.line_uses(use_fpath, position.line) { - if *use_col >= u.col_start() && *use_col <= u.col_end() { - let def_loc = u.def_loc(); - let Some(use_file_mod_definition) = symbols.file_mods.get(use_fpath) else { - break; - }; - let Some(use_file_mod_def) = use_file_mod_definition.first() else { - break; - }; - if is_definition( - symbols, - position.line, - u.col_start(), - use_file_mod_def.fhash(), - def_loc, - ) { - // since it's a definition, there is no point in trying to suggest a name - // if one is about to create a fresh identifier - only_custom_items = true; - } - let Some(def_info) = symbols.def_info(&def_loc) else { - break; - }; - let DefInfo::Function(mod_ident, v, ..) = def_info else { - // not a function - break; - }; - if !INIT_FN_NAME.starts_with(n) { - // starting to type "init" - break; - } - if !matches!(v, Visibility::Internal) { - // private (otherwise perhaps it's "init_something") - break; - } - - // get module info containing the init function - let Some(mdef) = symbols.mod_defs(&u.def_loc().file_hash(), *mod_ident) else { - break; - }; - - if mdef.functions().contains_key(&(INIT_FN_NAME.into())) { - // already has init function - break; - } - - let sui_ctx_arg = "ctx: &mut TxContext"; - - // decide on the list of parameters depending on whether a module containing - // the init function has a struct thats an one-time-witness candidate struct - let otw_candidate = Symbol::from(mod_ident.module.value().to_uppercase()); - let init_snippet = if mdef.structs().contains_key(&otw_candidate) { - format!("{INIT_FN_NAME}(${{1:witness}}: {otw_candidate}, {sui_ctx_arg}) {{\n\t${{2:}}\n}}\n") - } else { - format!("{INIT_FN_NAME}({sui_ctx_arg}) {{\n\t${{1:}}\n}}\n") - }; - - let init_completion = CompletionItem { - label: INIT_FN_NAME.to_string(), - kind: Some(CompletionItemKind::SNIPPET), - documentation: Some(Documentation::String( - "Module initializer snippet".to_string(), - )), - insert_text: Some(init_snippet), - insert_text_format: Some(InsertTextFormat::SNIPPET), - ..Default::default() - }; - completions.push(init_completion); - break; - } - } - (completions, only_custom_items) -} - -/// Checks if a use at a given position is also a definition. -fn is_definition( - symbols: &Symbols, - use_line: u32, - use_col: u32, - use_fhash: FileHash, - def_loc: Loc, -) -> bool { - if let Some(use_loc) = symbols - .files - .line_char_offset_to_loc_opt(use_fhash, use_line, use_col) - { - // TODO: is overlapping better? - def_loc.contains(&use_loc) - } else { - false - } -} - -/// Finds white-space separated strings on the line containing auto-completion request and their -/// locations. -fn preceding_strings(buffer: &str, position: &Position) -> Vec<(String, u32)> { - let mut strings = vec![]; - let line = match buffer.lines().nth(position.line as usize) { - Some(line) => line, - None => return strings, // Our buffer does not contain the line, and so must be out of date. - }; - - let mut chars = line.chars(); - let mut cur_col = 0; - let mut cur_str_start = 0; - let mut cur_str = "".to_string(); - while cur_col <= position.character { - let Some(c) = chars.next() else { - return strings; - }; - if c == ' ' || c == '\t' { - if !cur_str.is_empty() { - // finish an already started string - strings.push((cur_str, cur_str_start)); - cur_str = "".to_string(); - } - } else { - if cur_str.is_empty() { - // start a new string - cur_str_start = cur_col; - } - cur_str.push(c); - } - - cur_col += c.len_utf8() as u32; - } - if !cur_str.is_empty() { - // finish the last string - strings.push((cur_str, cur_str_start)); - } - strings -} - -/// Sends the given connection a response to a completion request. -/// -/// The completions returned depend upon where the user's cursor is positioned. -pub fn on_completion_request( - context: &Context, - request: &Request, - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, -) { - eprintln!("handling completion request"); - let parameters = serde_json::from_value::(request.params.clone()) - .expect("could not deserialize completion request"); - - let path = parameters - .text_document_position - .text_document - .uri - .to_file_path() - .unwrap(); - - let mut pos = parameters.text_document_position.position; - if pos.character != 0 { - // adjust column to be at the character that has just been inserted rather than right after - // it (unless we are at the very first column) - pos = Position::new(pos.line, pos.character - 1); - } - let items = completions_with_context(context, ide_files_root, pkg_dependencies, &path, pos) - .unwrap_or_default(); - let items_len = items.len(); - - let result = serde_json::to_value(items).expect("could not serialize completion response"); - eprintln!("about to send completion response with {items_len} items"); - let response = lsp_server::Response::new_ok(request.id.clone(), result); - if let Err(err) = context - .connection - .sender - .send(lsp_server::Message::Response(response)) - { - eprintln!("could not send completion response: {:?}", err); - } -} - -pub fn completions_with_context( - context: &Context, - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, - path: &Path, - pos: Position, -) -> Option> { - let Some(pkg_path) = SymbolicatorRunner::root_dir(path) else { - eprintln!("failed completion for {:?} (package root not found)", path); - return None; - }; - let symbol_map = context.symbols.lock().unwrap(); - let current_symbols = symbol_map.get(&pkg_path)?; - Some(completion_items( - current_symbols, - ide_files_root, - pkg_dependencies, - path, - pos, - )) -} - -pub fn completion_items( - current_symbols: &Symbols, - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, - path: &Path, - pos: Position, -) -> Vec { - compute_cursor_completion_items(ide_files_root, pkg_dependencies, path, pos) - .unwrap_or_else(|| compute_completion_items(current_symbols, path, pos)) -} - -fn compute_cursor_completion_items( - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, - path: &Path, - cursor_position: Position, -) -> Option> { - let Some(pkg_path) = SymbolicatorRunner::root_dir(path) else { - eprintln!("failed completion for {:?} (package root not found)", path); - return None; - }; - let cursor_path = path.to_path_buf(); - let cursor_info = Some((&cursor_path, cursor_position)); - let (symbols, _diags) = symbols::get_symbols( - pkg_dependencies, - ide_files_root, - &pkg_path, - LintLevel::None, - cursor_info, - ) - .ok()?; - let symbols = symbols?; - Some(compute_completion_items(&symbols, path, cursor_position)) -} - -/// Computes completion items for a given completion request. -fn compute_completion_items(symbols: &Symbols, path: &Path, pos: Position) -> Vec { - let mut items = vec![]; - - let Some(fhash) = symbols.file_hash(path) else { - return items; - }; - let Some(file_id) = symbols.files.file_mapping().get(&fhash) else { - return items; - }; - let Ok(file) = symbols.files.files().get(*file_id) else { - return items; - }; - - let file_source = file.source().clone(); - if !file_source.is_empty() { - let only_custom_items; - match &symbols.cursor_context { - Some(cursor_context) => { - eprintln!("cursor completion"); - let (new_items, only_has_custom_items) = - cursor_completion_items(symbols, path, &file_source, pos, cursor_context); - only_custom_items = only_has_custom_items; - items.extend(new_items); - } - None => { - eprintln!("non-cursor completion"); - let (new_items, only_has_custom_items) = - default_items(symbols, path, &file_source, pos); - only_custom_items = only_has_custom_items; - items.extend(new_items); - } - } - if !only_custom_items { - eprintln!("including identifiers"); - let identifiers = identifiers(&file_source, symbols, path); - items.extend(identifiers); - } - } else { - // no file content - items.extend(KEYWORD_COMPLETIONS.clone()); - items.extend(BUILTIN_COMPLETIONS.clone()); - } - items -} - -/// Return completion items, plus a flag indicating if we should only use the custom items returned -/// (i.e., when the flag is false, default items and identifiers should also be added). -fn cursor_completion_items( - symbols: &Symbols, - path: &Path, - file_source: &str, - pos: Position, - cursor: &CursorContext, -) -> (Vec, bool) { - let cursor_leader = get_cursor_token(file_source, &pos); - match cursor_leader { - // TODO: consider using `cursor.position` for this instead - Some(Tok::Period) => { - eprintln!("found period"); - let items = dot_completions(symbols, path, &pos); - let items_is_empty = items.is_empty(); - eprintln!("found items: {}", !items_is_empty); - // whether completions have been found for the dot or not - // it makes no sense to try offering "dumb" autocompletion - // options here as they will not fit (an example would - // be dot completion of u64 variable without any methods - // with u64 receiver being visible) - (items, true) - } - Some(Tok::ColonColon) => { - let mut items = vec![]; - let mut only_custom_items = false; - let (path_items, path_custom) = - name_chain_completions(symbols, cursor, /* colon_colon_triggered */ true); - items.extend(path_items); - only_custom_items |= path_custom; - if !only_custom_items { - let (path_items, path_custom) = use_decl_completions(symbols, cursor); - items.extend(path_items); - only_custom_items |= path_custom; - } - (items, only_custom_items) - } - // Carve out to suggest UID for struct with key ability - Some(Tok::LBrace) => { - let mut items = vec![]; - let mut only_custom_items = false; - let (path_items, path_custom) = context_specific_lbrace(symbols, cursor); - items.extend(path_items); - only_custom_items |= path_custom; - if !only_custom_items { - let (path_items, path_custom) = use_decl_completions(symbols, cursor); - items.extend(path_items); - only_custom_items |= path_custom; - } - (items, only_custom_items) - } - // TODO: should we handle auto-completion on `:`? If we model our support after - // rust-analyzer then it does not do this - it starts auto-completing types after the first - // character beyond `:` is typed - _ => { - eprintln!("no relevant cursor leader"); - let mut items = vec![]; - let mut only_custom_items = false; - let (path_items, path_custom) = - name_chain_completions(symbols, cursor, /* colon_colon_triggered */ false); - items.extend(path_items); - only_custom_items |= path_custom; - if !only_custom_items { - if matches!(cursor_leader, Some(Tok::Colon)) { - // much like rust-analyzer we do not auto-complete in the middle of `::` - only_custom_items = true; - } else { - let (path_items, path_custom) = use_decl_completions(symbols, cursor); - items.extend(path_items); - only_custom_items |= path_custom; - } - } - if !only_custom_items { - eprintln!("checking default items"); - let (default_items, default_custom) = - default_items(symbols, path, file_source, pos); - items.extend(default_items); - only_custom_items |= default_custom; - } - (items, only_custom_items) - } - } -} - -fn default_items( - symbols: &Symbols, - path: &Path, - file_source: &str, - pos: Position, -) -> (Vec, bool) { - // If the user's cursor is positioned anywhere other than following a `.`, `:`, or `::`, - // offer them context-specific autocompletion items as well as - // Move's keywords, operators, and builtins. - let (custom_items, only_custom_items) = - context_specific_no_trigger(symbols, path, file_source, &pos); - let mut items = custom_items; - if !only_custom_items { - items.extend(KEYWORD_COMPLETIONS.clone()); - items.extend(BUILTIN_COMPLETIONS.clone()); - } - (items, only_custom_items) -} diff --git a/external-crates/move/crates/move-analyzer/src/completions/snippets.rs b/external-crates/move/crates/move-analyzer/src/completions/snippets.rs new file mode 100644 index 0000000000000..d1c2c969cc8bc --- /dev/null +++ b/external-crates/move/crates/move-analyzer/src/completions/snippets.rs @@ -0,0 +1,215 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +// Snippets auto-completion for various language elements, such as `init` function +// or structs representing objects. + +use crate::{ + completions::utils::mod_defs, + symbols::{CursorContext, CursorDefinition, DefInfo, Symbols}, +}; +use lsp_types::{CompletionItem, CompletionItemKind, Documentation, InsertTextFormat, Position}; +use move_command_line_common::files::FileHash; +use move_compiler::{expansion::ast::Visibility, parser::ast::Ability_, shared::Identifier}; +use move_ir_types::location::Loc; +use move_symbol_pool::Symbol; + +use std::path::Path; + +/// Checks if the cursor is at the opening brace of a struct definition and returns +/// auto-completion of this struct into an object if the struct has the `key` ability. +pub fn object_completion( + symbols: &Symbols, + cursor: &CursorContext, +) -> (Option, bool) { + let mut completion_finalized = false; + // look for a struct definition on the line that contains `{`, check its abilities, + // and do auto-completion if `key` ability is present + let Some(CursorDefinition::Struct(sname)) = &cursor.defn_name else { + return (None, completion_finalized); + }; + completion_finalized = true; + let Some(mod_ident) = cursor.module else { + return (None, completion_finalized); + }; + let Some(mod_defs) = mod_defs(symbols, &mod_ident.value) else { + return (None, completion_finalized); + }; + let Some(struct_def) = mod_defs.structs.get(&sname.value()) else { + return (None, completion_finalized); + }; + + let Some(DefInfo::Struct(_, _, _, _, abilities, ..)) = + symbols.def_info.get(&struct_def.name_loc) + else { + return (None, completion_finalized); + }; + + if !abilities.has_ability_(Ability_::Key) { + return (None, completion_finalized); + } + let obj_snippet = "\n\tid: UID,\n\t$1\n".to_string(); + let init_completion = CompletionItem { + label: "id: UID".to_string(), + kind: Some(CompletionItemKind::SNIPPET), + documentation: Some(Documentation::String("Object snippet".to_string())), + insert_text: Some(obj_snippet), + insert_text_format: Some(InsertTextFormat::SNIPPET), + ..Default::default() + }; + (Some(init_completion), completion_finalized) +} + +/// Auto-completion for `init` function snippet. +pub fn init_completion( + symbols: &Symbols, + use_fpath: &Path, + buffer: &str, + position: &Position, +) -> (Vec, bool) { + let mut completions = vec![]; + let mut completion_finalized = false; + + let strings = preceding_strings(buffer, position); + + if strings.is_empty() { + return (completions, completion_finalized); + } + + // try to auto-complete init function declararation - get the last string + // and see if it represents the beginning of init function declaration + const INIT_FN_NAME: &str = "init"; + let (n, use_col) = strings.last().unwrap(); + for u in symbols.line_uses(use_fpath, position.line) { + if *use_col >= u.col_start() && *use_col <= u.col_end() { + let def_loc = u.def_loc(); + let Some(use_file_mod_definition) = symbols.file_mods.get(use_fpath) else { + break; + }; + let Some(use_file_mod_def) = use_file_mod_definition.first() else { + break; + }; + if is_definition( + symbols, + position.line, + u.col_start(), + use_file_mod_def.fhash(), + def_loc, + ) { + // since it's a definition, there is no point in trying to suggest a name + // if one is about to create a fresh identifier + completion_finalized = true; + } + let Some(def_info) = symbols.def_info(&def_loc) else { + break; + }; + let DefInfo::Function(mod_ident, v, ..) = def_info else { + // not a function + break; + }; + if !INIT_FN_NAME.starts_with(n) { + // starting to type "init" + break; + } + if !matches!(v, Visibility::Internal) { + // private (otherwise perhaps it's "init_something") + break; + } + + // get module info containing the init function + let Some(mdef) = symbols.mod_defs(&u.def_loc().file_hash(), *mod_ident) else { + break; + }; + + if mdef.functions().contains_key(&(INIT_FN_NAME.into())) { + // already has init function + break; + } + + let sui_ctx_arg = "ctx: &mut TxContext"; + + // decide on the list of parameters depending on whether a module containing + // the init function has a struct thats an one-time-witness candidate struct + let otw_candidate = Symbol::from(mod_ident.module.value().to_uppercase()); + let init_snippet = if mdef.structs().contains_key(&otw_candidate) { + format!("{INIT_FN_NAME}(${{1:witness}}: {otw_candidate}, {sui_ctx_arg}) {{\n\t${{2:}}\n}}\n") + } else { + format!("{INIT_FN_NAME}({sui_ctx_arg}) {{\n\t${{1:}}\n}}\n") + }; + + let init_completion = CompletionItem { + label: INIT_FN_NAME.to_string(), + kind: Some(CompletionItemKind::SNIPPET), + documentation: Some(Documentation::String( + "Module initializer snippet".to_string(), + )), + insert_text: Some(init_snippet), + insert_text_format: Some(InsertTextFormat::SNIPPET), + ..Default::default() + }; + completions.push(init_completion); + break; + } + } + + (completions, completion_finalized) +} + +/// Finds white-space separated strings on the line containing auto-completion request and their +/// locations. +fn preceding_strings(buffer: &str, position: &Position) -> Vec<(String, u32)> { + let mut strings = vec![]; + let line = match buffer.lines().nth(position.line as usize) { + Some(line) => line, + None => return strings, // Our buffer does not contain the line, and so must be out of date. + }; + + let mut chars = line.chars(); + let mut cur_col = 0; + let mut cur_str_start = 0; + let mut cur_str = "".to_string(); + while cur_col <= position.character { + let Some(c) = chars.next() else { + return strings; + }; + if c == ' ' || c == '\t' { + if !cur_str.is_empty() { + // finish an already started string + strings.push((cur_str, cur_str_start)); + cur_str = "".to_string(); + } + } else { + if cur_str.is_empty() { + // start a new string + cur_str_start = cur_col; + } + cur_str.push(c); + } + + cur_col += c.len_utf8() as u32; + } + if !cur_str.is_empty() { + // finish the last string + strings.push((cur_str, cur_str_start)); + } + strings +} + +/// Checks if a use at a given position is also a definition. +fn is_definition( + symbols: &Symbols, + use_line: u32, + use_col: u32, + use_fhash: FileHash, + def_loc: Loc, +) -> bool { + if let Some(use_loc) = symbols + .files + .line_char_offset_to_loc_opt(use_fhash, use_line, use_col) + { + // TODO: is overlapping better? + def_loc.contains(&use_loc) + } else { + false + } +} diff --git a/external-crates/move/crates/move-analyzer/src/completions/utils.rs b/external-crates/move/crates/move-analyzer/src/completions/utils.rs new file mode 100644 index 0000000000000..93ac2d7f08686 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/src/completions/utils.rs @@ -0,0 +1,130 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +use crate::symbols::{ + mod_ident_to_ide_string, ret_type_to_ide_str, type_args_to_ide_string, type_list_to_ide_string, + ModuleDefs, Symbols, +}; +use lsp_types::{CompletionItem, CompletionItemKind, CompletionItemLabelDetails, InsertTextFormat}; +use move_compiler::{ + expansion::ast::ModuleIdent_, + naming::ast::{Type, Type_}, + parser::keywords::PRIMITIVE_TYPES, + shared::Name, +}; +use move_symbol_pool::Symbol; +use once_cell::sync::Lazy; + +/// List of completion items of Move's primitive types. +pub static PRIMITIVE_TYPE_COMPLETIONS: Lazy> = Lazy::new(|| { + let mut primitive_types = PRIMITIVE_TYPES + .iter() + .map(|label| completion_item(label, CompletionItemKind::KEYWORD)) + .collect::>(); + primitive_types.push(completion_item("address", CompletionItemKind::KEYWORD)); + primitive_types +}); + +/// Get definitions for a given module. +pub fn mod_defs<'a>(symbols: &'a Symbols, mod_ident: &ModuleIdent_) -> Option<&'a ModuleDefs> { + symbols + .file_mods + .values() + .flatten() + .find(|mdef| mdef.ident == *mod_ident) +} + +/// Constructs an `lsp_types::CompletionItem` with the given `label` and `kind`. +pub fn completion_item(label: &str, kind: CompletionItemKind) -> CompletionItem { + CompletionItem { + label: label.to_owned(), + kind: Some(kind), + ..Default::default() + } +} + +pub fn call_completion_item( + mod_ident: &ModuleIdent_, + is_macro: bool, + method_name_opt: Option<&Symbol>, + function_name: &Symbol, + type_args: &[Type], + arg_names: &[Name], + arg_types: &[Type], + ret_type: &Type, + inside_use: bool, +) -> CompletionItem { + let sig_string = format!( + "fun {}({}){}", + type_args_to_ide_string(type_args, /* verbose */ false), + type_list_to_ide_string(arg_types, /* verbose */ false), + ret_type_to_ide_str(ret_type, /* verbose */ false) + ); + // if it's a method call we omit the first argument which is guaranteed to be there as this is a + // method and needs a receiver + let omitted_arg_count = if method_name_opt.is_some() { 1 } else { 0 }; + let mut snippet_idx = 0; + let arg_snippet = arg_names + .iter() + .zip(arg_types) + .skip(omitted_arg_count) + .map(|(name, ty)| { + lambda_snippet(ty, &mut snippet_idx).unwrap_or_else(|| { + let mut arg_name = name.to_string(); + if arg_name.starts_with('$') { + arg_name = arg_name[1..].to_string(); + } + snippet_idx += 1; + format!("${{{}:{}}}", snippet_idx, arg_name) + }) + }) + .collect::>() + .join(", "); + let macro_suffix = if is_macro { "!" } else { "" }; + let label_details = Some(CompletionItemLabelDetails { + detail: Some(format!( + " ({}::{})", + mod_ident_to_ide_string(mod_ident), + function_name + )), + description: Some(sig_string), + }); + + let method_name = method_name_opt.unwrap_or(function_name); + let (insert_text, insert_text_format) = if inside_use { + ( + Some(format!("{method_name}")), + Some(InsertTextFormat::PLAIN_TEXT), + ) + } else { + ( + Some(format!("{method_name}{macro_suffix}({arg_snippet})")), + Some(InsertTextFormat::SNIPPET), + ) + }; + + CompletionItem { + label: format!("{method_name}{macro_suffix}()"), + label_details, + kind: Some(CompletionItemKind::METHOD), + insert_text, + insert_text_format, + ..Default::default() + } +} + +fn lambda_snippet(sp!(_, ty): &Type, snippet_idx: &mut i32) -> Option { + if let Type_::Fun(vec, _) = ty { + let arg_snippets = vec + .iter() + .map(|_| { + *snippet_idx += 1; + format!("${{{snippet_idx}}}") + }) + .collect::>() + .join(", "); + *snippet_idx += 1; + return Some(format!("|{arg_snippets}| ${{{snippet_idx}}}")); + } + None +} diff --git a/external-crates/move/crates/move-analyzer/src/lib.rs b/external-crates/move/crates/move-analyzer/src/lib.rs index dc51721087232..0cb8aeb3e2820 100644 --- a/external-crates/move/crates/move-analyzer/src/lib.rs +++ b/external-crates/move/crates/move-analyzer/src/lib.rs @@ -8,7 +8,7 @@ extern crate move_ir_types; pub mod analysis; pub mod analyzer; pub mod compiler_info; -pub mod completion; +pub mod completions; pub mod context; pub mod diagnostics; pub mod inlay_hints; diff --git a/external-crates/move/crates/move-analyzer/src/symbols.rs b/external-crates/move/crates/move-analyzer/src/symbols.rs index d5a1561ca0434..bcb2eb2df7d5c 100644 --- a/external-crates/move/crates/move-analyzer/src/symbols.rs +++ b/external-crates/move/crates/move-analyzer/src/symbols.rs @@ -100,7 +100,10 @@ use move_compiler::{ Identifier, Name, NamedAddressMap, NamedAddressMaps, }, typing::{ - ast::{Exp, ExpListItem, ModuleDefinition, SequenceItem, SequenceItem_, UnannotatedExp_}, + ast::{ + self as T, Exp, ExpListItem, ModuleDefinition, SequenceItem, SequenceItem_, + UnannotatedExp_, + }, visitor::TypingVisitorContext, }, unit_test::filter_test_members::UNIT_TEST_POISON_FUN_NAME, @@ -116,6 +119,44 @@ use move_symbol_pool::Symbol; const MANIFEST_FILE_NAME: &str = "Move.toml"; +type SourceFiles = BTreeMap; + +/// Information about the compiled package and data structures +/// computed during compilation +#[derive(Clone)] +pub struct CompiledPkgInfo { + parsed_program: P::Program, + typed_program: T::Program, + libs: Option>, + source_files: SourceFiles, + mapped_files: MappedFiles, + edition: Option, + compiler_info: Option, +} + +/// Data used during symbols computation +#[derive(Clone)] +pub struct SymbolsComputationData { + mod_outer_defs: BTreeMap, + mod_use_defs: BTreeMap, + references: BTreeMap>, + def_info: BTreeMap, + mod_to_alias_lengths: BTreeMap>, +} + +impl SymbolsComputationData { + pub fn new() -> Self { + Self { + mod_outer_defs: BTreeMap::new(), + mod_use_defs: BTreeMap::new(), + references: BTreeMap::new(), + def_info: BTreeMap::new(), + mod_to_alias_lengths: BTreeMap::new(), + } + } +} + +/// Information about precompiled package dependencies #[derive(Clone)] pub struct PrecompiledPkgDeps { /// Hash of the manifest file for a given package @@ -1545,17 +1586,14 @@ fn has_precompiled_deps( pkg_deps.contains_key(pkg_path) } -/// Main driver to get symbols for the whole package. Returned symbols is an option as only the -/// correctly computed symbols should be a replacement for the old set - if symbols are not -/// actually (re)computed and the diagnostics are returned, the old symbolic information should -/// be retained even if it's getting out-of-date. -pub fn get_symbols( +/// Builds a package at a given path and, if successful, returns parsed AST +/// and typed AST as well as (regardless of success) diagnostics. +pub fn get_compiled_pkg( pkg_dependencies: Arc>>, ide_files_root: VfsPath, pkg_path: &Path, lint: LintLevel, - cursor_info: Option<(&PathBuf, Position)>, -) -> Result<(Option, BTreeMap>)> { +) -> Result<(Option, BTreeMap>)> { let build_config = move_package::BuildConfig { test_mode: true, install_dir: Some(tempdir().unwrap().path().to_path_buf()), @@ -1752,16 +1790,28 @@ pub fn get_symbols( // uwrap's are safe - this function returns earlier (during diagnostics processing) // when failing to produce the ASTs let parsed_program = parsed_ast.unwrap(); - let mut typed_program = typed_ast.clone().unwrap(); - - let mut mod_outer_defs = BTreeMap::new(); - let mut mod_use_defs = BTreeMap::new(); - let mut references = BTreeMap::new(); - let mut def_info = BTreeMap::new(); + let typed_program = typed_ast.clone().unwrap(); + let compiled_pkg_info = CompiledPkgInfo { + parsed_program, + typed_program, + libs: compiled_libs, + source_files, + mapped_files, + edition, + compiler_info, + }; + Ok((Some(compiled_pkg_info), ide_diagnostics)) +} +/// Preprocess parsed and typed programs prior to actual symbols computation. +pub fn compute_symbols_pre_process( + computation_data: &mut SymbolsComputationData, + compiled_pkg_info: &CompiledPkgInfo, + cursor_info: Option<(&PathBuf, Position)>, +) -> Option { let mut file_id_to_lines = HashMap::new(); - for file_id in mapped_files.file_mapping().values() { - let Ok(file) = mapped_files.files().get(*file_id) else { + for file_id in compiled_pkg_info.mapped_files.file_mapping().values() { + let Ok(file) = compiled_pkg_info.mapped_files.files().get(*file_id) else { eprintln!("file id without source code"); continue; }; @@ -1772,48 +1822,51 @@ pub fn get_symbols( let mut fields_order_info = FieldOrderInfo::new(); - pre_process_parsed_program(&parsed_program, &mut fields_order_info); + pre_process_parsed_program(&compiled_pkg_info.parsed_program, &mut fields_order_info); - let mut cursor_context = compute_cursor_context(&mapped_files, cursor_info); + let mut cursor_context = compute_cursor_context(&compiled_pkg_info.mapped_files, cursor_info); pre_process_typed_modules( - &typed_program.modules, + &compiled_pkg_info.typed_program.modules, &fields_order_info, - &mapped_files, + &compiled_pkg_info.mapped_files, &file_id_to_lines, - &mut mod_outer_defs, - &mut mod_use_defs, - &mut references, - &mut def_info, - &edition, + &mut computation_data.mod_outer_defs, + &mut computation_data.mod_use_defs, + &mut computation_data.references, + &mut computation_data.def_info, + &compiled_pkg_info.edition, cursor_context.as_mut(), ); - if let Some(libs) = compiled_libs.clone() { + if let Some(libs) = compiled_pkg_info.libs.clone() { pre_process_typed_modules( &libs.typing.modules, &fields_order_info, - &mapped_files, + &compiled_pkg_info.mapped_files, &file_id_to_lines, - &mut mod_outer_defs, - &mut mod_use_defs, - &mut references, - &mut def_info, - &edition, + &mut computation_data.mod_outer_defs, + &mut computation_data.mod_use_defs, + &mut computation_data.references, + &mut computation_data.def_info, + &compiled_pkg_info.edition, None, // Cursor can never be in a compiled library(?) ); } + cursor_context +} - eprintln!("get_symbols loaded"); - - let mut file_use_defs = BTreeMap::new(); - let mut mod_to_alias_lengths = BTreeMap::new(); - +/// Process parsed program for symbols computation. +pub fn compute_symbols_parsed_program( + computation_data: &mut SymbolsComputationData, + compiled_pkg_info: &CompiledPkgInfo, + mut cursor_context: Option, +) -> Option { let mut parsing_symbolicator = parsing_analysis::ParsingAnalysisContext { - mod_outer_defs: &mut mod_outer_defs, - files: &mapped_files, - references: &mut references, - def_info: &mut def_info, + mod_outer_defs: &mut computation_data.mod_outer_defs, + files: &compiled_pkg_info.mapped_files, + references: &mut computation_data.references, + def_info: &mut computation_data.def_info, use_defs: UseDefMap::new(), current_mod_ident_str: None, alias_lengths: BTreeMap::new(), @@ -1822,25 +1875,34 @@ pub fn get_symbols( }; parsing_symbolicator.prog_symbols( - &parsed_program, - &mut mod_use_defs, - &mut mod_to_alias_lengths, + &compiled_pkg_info.parsed_program, + &mut computation_data.mod_use_defs, + &mut computation_data.mod_to_alias_lengths, ); - if let Some(libs) = compiled_libs.clone() { + if let Some(libs) = compiled_pkg_info.libs.clone() { parsing_symbolicator.cursor = None; parsing_symbolicator.prog_symbols( &libs.parser, - &mut mod_use_defs, - &mut mod_to_alias_lengths, + &mut computation_data.mod_use_defs, + &mut computation_data.mod_to_alias_lengths, ); } + cursor_context +} - let mut compiler_info = compiler_info.unwrap(); +/// Process typed program for symbols computation. +pub fn compute_symbols_typed_program( + mut computation_data: SymbolsComputationData, + mut compiled_pkg_info: CompiledPkgInfo, + cursor_context: Option, +) -> Symbols { + let mut file_use_defs = BTreeMap::new(); + let mut compiler_info = compiled_pkg_info.compiler_info.unwrap(); let mut typing_symbolicator = typing_analysis::TypingAnalysisContext { - mod_outer_defs: &mut mod_outer_defs, - files: &mapped_files, - references: &mut references, - def_info: &mut def_info, + mod_outer_defs: &mut computation_data.mod_outer_defs, + files: &compiled_pkg_info.mapped_files, + references: &mut computation_data.references, + def_info: &mut computation_data.def_info, use_defs: UseDefMap::new(), current_mod_ident_str: None, alias_lengths: &BTreeMap::new(), @@ -1851,40 +1913,80 @@ pub fn get_symbols( }; process_typed_modules( - &mut typed_program.modules, - &source_files, - &mod_to_alias_lengths, + &mut compiled_pkg_info.typed_program.modules, + &compiled_pkg_info.source_files, + &computation_data.mod_to_alias_lengths, &mut typing_symbolicator, &mut file_use_defs, - &mut mod_use_defs, + &mut computation_data.mod_use_defs, ); - if let Some(libs) = compiled_libs { + if let Some(libs) = compiled_pkg_info.libs { process_typed_modules( &mut libs.typing.modules.clone(), - &source_files, - &mod_to_alias_lengths, + &compiled_pkg_info.source_files, + &computation_data.mod_to_alias_lengths, &mut typing_symbolicator, &mut file_use_defs, - &mut mod_use_defs, + &mut computation_data.mod_use_defs, ); } let mut file_mods: FileModules = BTreeMap::new(); - for d in mod_outer_defs.into_values() { - let path = mapped_files.file_path(&d.fhash.clone()); + for d in computation_data.mod_outer_defs.into_values() { + let path = compiled_pkg_info.mapped_files.file_path(&d.fhash.clone()); file_mods.entry(path.to_path_buf()).or_default().insert(d); } - let symbols = Symbols { - references, + Symbols { + references: computation_data.references, file_use_defs, file_mods, - def_info, - files: mapped_files, + def_info: computation_data.def_info, + files: compiled_pkg_info.mapped_files, compiler_info, cursor_context, + } +} + +/// Compute symbols for a given package from the parsed and typed ASTs, +/// as well as other auxiliary data provided in `compiled_pkg_info`. +pub fn compute_symbols( + compiled_pkg_info: CompiledPkgInfo, + cursor_info: Option<(&PathBuf, Position)>, +) -> Symbols { + let mut symbols_computation_data = SymbolsComputationData::new(); + let cursor_context = compute_symbols_pre_process( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_info, + ); + let cursor_context = compute_symbols_parsed_program( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_context, + ); + + compute_symbols_typed_program(symbols_computation_data, compiled_pkg_info, cursor_context) +} + +/// Main driver to get symbols for the whole package. Returned symbols is an option as only the +/// correctly computed symbols should be a replacement for the old set - if symbols are not +/// actually (re)computed and the diagnostics are returned, the old symbolic information should +/// be retained even if it's getting out-of-date. +pub fn get_symbols( + pkg_dependencies: Arc>>, + ide_files_root: VfsPath, + pkg_path: &Path, + lint: LintLevel, + cursor_info: Option<(&PathBuf, Position)>, +) -> Result<(Option, BTreeMap>)> { + let (compiled_pkg_info_opt, ide_diagnostics) = + get_compiled_pkg(pkg_dependencies, ide_files_root, pkg_path, lint)?; + let Some(compiled_pkg_info) = compiled_pkg_info_opt else { + return Ok((None, ide_diagnostics)); }; + let symbols = compute_symbols(compiled_pkg_info, cursor_info); eprintln!("get_symbols load complete"); @@ -2004,7 +2106,7 @@ fn pre_process_typed_modules( fn process_typed_modules<'a>( typed_modules: &mut UniqueMap, - source_files: &BTreeMap, + source_files: &SourceFiles, mod_to_alias_lengths: &'a BTreeMap>, typing_symbolicator: &mut typing_analysis::TypingAnalysisContext<'a>, file_use_defs: &mut FileUseDefs, @@ -2032,10 +2134,7 @@ fn process_typed_modules<'a>( } } -fn file_sources( - resolved_graph: &ResolvedGraph, - overlay_fs: VfsPath, -) -> BTreeMap { +fn file_sources(resolved_graph: &ResolvedGraph, overlay_fs: VfsPath) -> SourceFiles { resolved_graph .package_table .iter() @@ -2074,7 +2173,17 @@ fn file_sources( pub fn expansion_mod_ident_to_map_key(mod_ident: &E::ModuleIdent_) -> String { use E::Address as A; match mod_ident.address { - A::Numerical { value, .. } => format!("{value}::{}", mod_ident.module).to_string(), + A::Numerical { + name, + value, + name_conflict: _, + } => { + if let Some(n) = name { + format!("({n}={value})::{}", mod_ident.module).to_string() + } else { + format!("{value}::{}", mod_ident.module).to_string() + } + } A::NamedUnassigned(n) => format!("{n}::{}", mod_ident.module).to_string(), } } @@ -2090,7 +2199,14 @@ pub fn parsed_address(ln: P::LeadingNameAccess, pkg_addresses: &NamedAddressMap) P::LeadingNameAccess_::AnonymousAddress(bytes) => E::Address::anonymous(loc, bytes), P::LeadingNameAccess_::GlobalAddress(name) => E::Address::NamedUnassigned(name), P::LeadingNameAccess_::Name(name) => match pkg_addresses.get(&name.value).copied() { - Some(addr) => E::Address::anonymous(loc, addr), + // set `name_conflict` to `true` to force displaying (addr==pkg_name) so that the string + // representing map key is consistent with what's generated for expansion ModuleIdent in + // `expansion_mod_ident_to_map_key` + Some(addr) => E::Address::Numerical { + name: Some(name), + value: sp(loc, addr), + name_conflict: true, + }, None => E::Address::NamedUnassigned(name), }, } @@ -2103,7 +2219,8 @@ pub fn parsing_leading_and_mod_names_to_map_key( ln: P::LeadingNameAccess, name: P::ModuleName, ) -> String { - format!("{}::{}", parsed_address(ln, pkg_addresses), name).to_string() + let parsed_addr = parsed_address(ln, pkg_addresses); + format!("{}::{}", parsed_addr, name).to_string() } /// Produces module ident string of the form pkg::module to be used as a map key. diff --git a/external-crates/move/crates/move-analyzer/tests/ide_testsuite.rs b/external-crates/move/crates/move-analyzer/tests/ide_testsuite.rs index 97d0d6d39b7a6..bfb9b0ae2c9c1 100644 --- a/external-crates/move/crates/move-analyzer/tests/ide_testsuite.rs +++ b/external-crates/move/crates/move-analyzer/tests/ide_testsuite.rs @@ -12,11 +12,12 @@ use std::{ use json_comments::StripComments; use lsp_types::{InlayHintKind, InlayHintLabel, InlayHintTooltip, Position}; use move_analyzer::{ - completion::completion_items, + completions::compute_completions_with_symbols, inlay_hints::inlay_hints_internal, symbols::{ - def_info_doc_string, get_symbols, maybe_convert_for_guard, PrecompiledPkgDeps, Symbols, - UseDefMap, + compute_symbols, compute_symbols_parsed_program, compute_symbols_pre_process, + def_info_doc_string, get_compiled_pkg, maybe_convert_for_guard, CompiledPkgInfo, Symbols, + SymbolsComputationData, UseDefMap, }, }; use move_command_line_common::testing::{ @@ -177,9 +178,8 @@ impl CompletionTest { fn test( &self, test_idx: usize, - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, - symbols: &Symbols, + compiled_pkg_info: CompiledPkgInfo, + symbols: &mut Symbols, output: &mut dyn std::io::Write, use_file_path: &Path, ) -> anyhow::Result<()> { @@ -189,13 +189,27 @@ impl CompletionTest { line: lsp_use_line, character: lsp_use_col, }; - let items = completion_items( - symbols, - ide_files_root, - pkg_dependencies, - use_file_path, - use_pos, + + // symbols do not change for each test, so we can reuse the same symbols + // but we need to recompute the cursor each time + let cursor_path = use_file_path.to_path_buf(); + let cursor_info = Some((&cursor_path, use_pos)); + let mut symbols_computation_data = SymbolsComputationData::new(); + // we only compute cursor context and tag it on the existing symbols to avoid spending time + // recomputing all symbols (saves quite a bit of time when running the test suite) + let mut cursor_context = compute_symbols_pre_process( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_info, ); + cursor_context = compute_symbols_parsed_program( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_context, + ); + symbols.cursor_context = cursor_context; + + let items = compute_completions_with_symbols(symbols, &cursor_path, use_pos); writeln!(output, "-- test {test_idx} -------------------")?; writeln!( output, @@ -225,8 +239,8 @@ impl CursorTest { fn test( &self, test_ndx: usize, - ide_files_root: VfsPath, - pkg_dependencies: Arc>>, + compiled_pkg_info: CompiledPkgInfo, + symbols: &mut Symbols, output: &mut dyn std::io::Write, path: &Path, ) -> anyhow::Result<()> { @@ -238,21 +252,29 @@ impl CursorTest { let line = line - 1; // 0th-based let character = character - 1; // 0th-based - let (symbols_opt, _) = get_symbols( - pkg_dependencies, - ide_files_root, - path, - LintLevel::None, - Some((&path.to_path_buf(), Position { line, character })), - )?; - let symbols = symbols_opt.unwrap(); + // symbols do not change for each test, so we can reuse the same symbols + // but we need to recompute the cursor each time + let cursor_path = path.to_path_buf(); + let cursor_info = Some((&cursor_path, Position { line, character })); + let mut symbols_computation_data = SymbolsComputationData::new(); + let mut cursor_context = compute_symbols_pre_process( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_info, + ); + cursor_context = compute_symbols_parsed_program( + &mut symbols_computation_data, + &compiled_pkg_info, + cursor_context, + ); + symbols.cursor_context = cursor_context.clone(); writeln!( output, "-- test {test_ndx} @ {line}:{character} ------------" )?; writeln!(output, "expected: {description}")?; - writeln!(output, "{}", symbols.cursor_context.unwrap())?; + writeln!(output, "{}", cursor_context.unwrap())?; Ok(()) } } @@ -341,34 +363,32 @@ fn check_expected(expected_path: &Path, result: &str) -> anyhow::Result<()> { fn initial_symbols( project: String, -) -> datatest_stable::Result<( - PathBuf, - VfsPath, - Arc>>, - Symbols, -)> { +) -> datatest_stable::Result<(PathBuf, CompiledPkgInfo, Symbols)> { let base_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let mut project_path = base_path.clone(); project_path.push(project); let ide_files_root: VfsPath = MemoryFS::new().into(); let pkg_deps = Arc::new(Mutex::new(BTreeMap::new())); - let (symbols_opt, _) = get_symbols( + + let (compiled_pkg_info_opt, _) = get_compiled_pkg( pkg_deps.clone(), ide_files_root.clone(), project_path.as_path(), LintLevel::None, - None, )?; - let symbols = symbols_opt.ok_or("DID NOT FIND SYMBOLS")?; - Ok((project_path, ide_files_root, pkg_deps, symbols)) + + let compiled_pkg_info = compiled_pkg_info_opt.ok_or("PACKAGE COMPILATION FAILED")?; + let symbols = compute_symbols(compiled_pkg_info.clone(), None); + + Ok((project_path, compiled_pkg_info, symbols)) } fn use_def_test_suite( project: String, file_tests: BTreeMap>, ) -> datatest_stable::Result { - let (project_path, _, _, symbols) = initial_symbols(project)?; + let (project_path, _, symbols) = initial_symbols(project)?; let mut output: BufWriter<_> = BufWriter::new(Vec::new()); let writer: &mut dyn io::Write = output.get_mut(); @@ -402,7 +422,7 @@ fn completion_test_suite( project: String, file_tests: BTreeMap>, ) -> datatest_stable::Result { - let (project_path, ide_files_root, pkg_deps, symbols) = initial_symbols(project)?; + let (project_path, compiled_pkg_info, mut symbols) = initial_symbols(project)?; let mut output: BufWriter<_> = BufWriter::new(Vec::new()); let writer: &mut dyn io::Write = output.get_mut(); @@ -419,14 +439,7 @@ fn completion_test_suite( let cpath = dunce::canonicalize(&fpath).unwrap(); for (idx, test) in tests.iter().enumerate() { - test.test( - idx, - ide_files_root.clone(), - pkg_deps.clone(), - &symbols, - writer, - &cpath, - )?; + test.test(idx, compiled_pkg_info.clone(), &mut symbols, writer, &cpath)?; } } @@ -438,7 +451,7 @@ fn cursor_test_suite( project: String, file_tests: BTreeMap>, ) -> datatest_stable::Result { - let (project_path, ide_files_root, pkg_deps, _) = initial_symbols(project)?; + let (project_path, compiled_pkg_info, mut symbols) = initial_symbols(project)?; let mut output: BufWriter<_> = BufWriter::new(Vec::new()); let writer: &mut dyn io::Write = output.get_mut(); @@ -454,13 +467,7 @@ fn cursor_test_suite( fpath.push(format!("sources/{file}")); let cpath = dunce::canonicalize(&fpath).unwrap(); for (idx, test) in tests.iter().enumerate() { - test.test( - idx, - ide_files_root.clone(), - pkg_deps.clone(), - writer, - &cpath, - )?; + test.test(idx, compiled_pkg_info.clone(), &mut symbols, writer, &cpath)?; } } @@ -472,7 +479,7 @@ fn hint_test_suite( project: String, file_tests: BTreeMap>, ) -> datatest_stable::Result { - let (project_path, _, _, symbols) = initial_symbols(project)?; + let (project_path, _, symbols) = initial_symbols(project)?; let mut output: BufWriter<_> = BufWriter::new(Vec::new()); let writer: &mut dyn io::Write = output.get_mut(); diff --git a/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/Move.toml b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/Move.toml new file mode 100644 index 0000000000000..ece1e0a62fea8 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/Move.toml @@ -0,0 +1,9 @@ +[package] +name = "DepPkg" +edition = "2024.alpha" + +[dependencies] +MoveStdlib = { local = "../../../../move-stdlib/", addr_subst = { "std" = "0x1" } } + +[addresses] +DepPkg = "0x0" diff --git a/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/sources/M1.move b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/sources/M1.move new file mode 100644 index 0000000000000..59051e45fe5e9 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/dep-pkg/sources/M1.move @@ -0,0 +1,4 @@ +#[test_only] +module DepPkg::M1 { + public fun foo() {} +} diff --git a/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/Move.toml b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/Move.toml new file mode 100644 index 0000000000000..0411f2618e1e0 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/Move.toml @@ -0,0 +1,10 @@ +[package] +name = "Pkg" +edition = "2024.alpha" + +[dependencies] +MoveStdlib = { local = "../../../../move-stdlib/", addr_subst = { "std" = "0x1" } } +DepPkg = { local = "../dep-pkg" } + +[addresses] +Pkg = "0x0" diff --git a/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/sources/M1.move b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/sources/M1.move new file mode 100644 index 0000000000000..2a11cefad8b61 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same-pkg-name-error/pkg/sources/M1.move @@ -0,0 +1,15 @@ +#[test_only] +module Pkg::M1 { + public fun foo() {} +} + + +module Pkg::M2 { + + #[test] + public fun bar() { + Pkg::M1::foo(); + DepPkg::M1::foo() + } + +} diff --git a/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.exp b/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.exp new file mode 100644 index 0000000000000..58cee3e93a404 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.exp @@ -0,0 +1 @@ +== M1.move ======================================================== diff --git a/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.ide b/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.ide new file mode 100644 index 0000000000000..8b1f96392bbb6 --- /dev/null +++ b/external-crates/move/crates/move-analyzer/tests/same_pkg_name_error.ide @@ -0,0 +1,8 @@ +{ + "UseDef": { + "project": "tests/same-pkg-name-error/pkg", + "file_tests": { + "M1.move": [] + } + } +} \ No newline at end of file diff --git a/external-crates/move/crates/move-analyzer/trace-adapter/src/adapter.ts b/external-crates/move/crates/move-analyzer/trace-adapter/src/adapter.ts index 109b07dea465e..dce30618424e8 100644 --- a/external-crates/move/crates/move-analyzer/trace-adapter/src/adapter.ts +++ b/external-crates/move/crates/move-analyzer/trace-adapter/src/adapter.ts @@ -11,7 +11,7 @@ import { } from '@vscode/debugadapter'; import { DebugProtocol } from '@vscode/debugprotocol'; import * as path from 'path'; -import { Runtime, RuntimeEvents } from './runtime'; +import { Runtime, RuntimeEvents, IRuntimeStack } from './runtime'; const enum LogLevel { Log = 'log', @@ -19,6 +19,17 @@ const enum LogLevel { None = 'none' } +/** + * Customized stack trace response that includes additional data. + */ +interface CustomizedStackTraceResponse extends DebugProtocol.StackTraceResponse { + body: { + stackFrames: StackFrame[]; + totalFrames?: number; + optimized_lines: number[]; + }; +} + /** * Converts a log level string to a Logger.LogLevel. * @@ -137,7 +148,10 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendEvent(new InitializedEvent()); } - protected async launchRequest(response: DebugProtocol.LaunchResponse, args: ILaunchRequestArguments): Promise { + protected async launchRequest( + response: DebugProtocol.LaunchResponse, + args: ILaunchRequestArguments + ): Promise { logger.setup(convertLoggerLogLevel(args.logLevel ?? LogLevel.None), false); logger.log("Launching trace viewer for file: " + args.source + " and trace: " + args.traceInfo); try { @@ -150,7 +164,10 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendEvent(new StoppedEvent("entry", MoveDebugSession.THREAD_ID)); } - protected configurationDoneRequest(response: DebugProtocol.ConfigurationDoneResponse, args: DebugProtocol.ConfigurationDoneArguments): void { + protected configurationDoneRequest( + response: DebugProtocol.ConfigurationDoneResponse, + _args: DebugProtocol.ConfigurationDoneArguments + ): void { this.sendResponse(response); } @@ -163,15 +180,22 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendResponse(response); } - protected stackTraceRequest(response: DebugProtocol.StackTraceResponse, args: DebugProtocol.StackTraceArguments): void { + protected stackTraceRequest( + response: CustomizedStackTraceResponse, + args: DebugProtocol.StackTraceArguments + ): void { try { const runtimeStack = this.runtime.stack(); + const stack_height = runtimeStack.frames.length; response.body = { stackFrames: runtimeStack.frames.map(frame => { const fileName = path.basename(frame.file); return new StackFrame(frame.id, frame.name, new Source(fileName, frame.file), frame.line); }).reverse(), - totalFrames: runtimeStack.frames.length + totalFrames: stack_height, + optimized_lines: stack_height > 0 + ? runtimeStack.frames[stack_height - 1].sourceMap.optimized_lines + : [] }; } catch (err) { response.success = false; @@ -180,9 +204,16 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendResponse(response); } - protected nextRequest(response: DebugProtocol.NextResponse, args: DebugProtocol.NextArguments): void { + protected nextRequest( + response: DebugProtocol.NextResponse, + args: DebugProtocol.NextArguments + ): void { try { - this.runtime.step(true); + this.runtime.step( + /* next */ true, + /* stopAtCloseFrame */ false, + /* nextLineSkip */ true + ); } catch (err) { response.success = false; response.message = err instanceof Error ? err.message : String(err); @@ -190,10 +221,17 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendResponse(response); } - protected stepInRequest(response: DebugProtocol.StepInResponse, args: DebugProtocol.StepInArguments): void { + protected stepInRequest( + response: DebugProtocol.StepInResponse, + args: DebugProtocol.StepInArguments + ): void { let terminate = false; try { - terminate = this.runtime.step(false); + terminate = this.runtime.step( + /* next */ false, + /* stopAtCloseFrame */ false, + /* nextLineSkip */ true + ); } catch (err) { response.success = false; response.message = err instanceof Error ? err.message : String(err); @@ -204,17 +242,27 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendResponse(response); } - protected stepOutRequest(response: DebugProtocol.StepOutResponse, args: DebugProtocol.StepOutArguments): void { + protected stepOutRequest( + response: DebugProtocol.StepOutResponse, + args: DebugProtocol.StepOutArguments + ): void { + let terminate = false; try { - this.runtime.stepOut(); + terminate = this.runtime.stepOut(); } catch (err) { response.success = false; response.message = err instanceof Error ? err.message : String(err); } + if (terminate) { + this.sendEvent(new TerminatedEvent()); + } this.sendResponse(response); } - protected stepBackRequest(response: DebugProtocol.StepBackResponse, args: DebugProtocol.StepBackArguments): void { + protected stepBackRequest( + response: DebugProtocol.StepBackResponse, + args: DebugProtocol.StepBackArguments + ): void { try { this.runtime.stepBack(); } catch (err) { @@ -224,8 +272,44 @@ export class MoveDebugSession extends LoggingDebugSession { this.sendResponse(response); } + protected continueRequest( + response: DebugProtocol.ContinueResponse, + args: DebugProtocol.ContinueArguments + ): void { + let terminate = false; + try { + terminate = this.runtime.continue(/* reverse */ false); + } catch (err) { + response.success = false; + response.message = err instanceof Error ? err.message : String(err); + } + if (terminate) { + this.sendEvent(new TerminatedEvent()); + } + this.sendResponse(response); + } + + protected reverseContinueRequest( + response: DebugProtocol.ReverseContinueResponse, + args: DebugProtocol.ReverseContinueArguments + ): void { + let terminate = false; + try { + terminate = this.runtime.continue(/* reverse */ true); + } catch (err) { + response.success = false; + response.message = err instanceof Error ? err.message : String(err); + } + if (terminate) { + this.sendEvent(new TerminatedEvent()); + } + this.sendResponse(response); + } - protected disconnectRequest(response: DebugProtocol.DisconnectResponse, args: DebugProtocol.DisconnectArguments): void { + protected disconnectRequest( + response: DebugProtocol.DisconnectResponse, + args: DebugProtocol.DisconnectArguments + ): void { // Cleanup and terminate the debug session this.sendEvent(new TerminatedEvent()); this.sendResponse(response); diff --git a/external-crates/move/crates/move-analyzer/trace-adapter/src/runtime.ts b/external-crates/move/crates/move-analyzer/trace-adapter/src/runtime.ts index 670fbc2fcd13e..e9037438c1b81 100644 --- a/external-crates/move/crates/move-analyzer/trace-adapter/src/runtime.ts +++ b/external-crates/move/crates/move-analyzer/trace-adapter/src/runtime.ts @@ -32,7 +32,7 @@ interface IRuntimeStackFrame { * Describes the runtime stack during trace viewing session * (oldest frame is at the bottom of the stack at index 0). */ -interface IRuntimeStack { +export interface IRuntimeStack { frames: IRuntimeStackFrame[]; } @@ -115,7 +115,7 @@ export class Runtime extends EventEmitter { this.frameStack = { frames: [newFrame] }; - this.step(false); + this.step(/* next */ false, /* stopAtCloseFrame */ false, /* nextLineSkip */ true); } /** @@ -131,10 +131,16 @@ export class Runtime extends EventEmitter { * Handles step/next adapter action. * * @param next determines if it's `next` (or otherwise `step`) action. + * @param stopAtCloseFrame determines if the action should stop at `CloseFrame` event + * (rather then proceedint to the following instruction). * @returns `true` if the trace viewing session is finished, `false` otherwise. * @throws Error with a descriptive error message if the step event cannot be handled. */ - public step(next: boolean): boolean { + public step( + next: boolean, + stopAtCloseFrame: boolean, + nextLineSkip: boolean + ): boolean { this.eventIndex++; if (this.eventIndex >= this.trace.events.length) { this.sendEvent(RuntimeEvents.stopOnStep); @@ -143,40 +149,35 @@ export class Runtime extends EventEmitter { let currentEvent = this.trace.events[this.eventIndex]; if (currentEvent.type === 'Instruction') { let sameLine = this.instruction(currentEvent); - if (sameLine) { - return this.step(next); + if (sameLine && nextLineSkip) { + return this.step(next, stopAtCloseFrame, nextLineSkip); } this.sendEvent(RuntimeEvents.stopOnStep); return false; } else if (currentEvent.type === 'OpenFrame') { - if (next) { - // skip all events until the corresponding CloseFrame event - // rather than creating a new frame - const openFrameID = currentEvent.id; - while (true) { - this.eventIndex++; - if (this.eventIndex >= this.trace.events.length) { - return true; - } - currentEvent = this.trace.events[this.eventIndex]; - if (currentEvent.type === 'CloseFrame' && currentEvent.id === openFrameID) { - break; - } - } - return this.step(next); - } // create a new frame and push it onto the stack const newFrame = this.newStackFrame(currentEvent.id, currentEvent.name, currentEvent.modInfo); this.frameStack.frames.push(newFrame); - return this.step(next); + if (next) { + // step out of the frame right away + return this.stepOut(); + } else { + return this.step(next, stopAtCloseFrame, nextLineSkip); + } } else if (currentEvent.type === 'CloseFrame') { - // pop the top frame from the stack - this.frameStack.frames.pop(); - return this.step(next); + if (stopAtCloseFrame) { + // don't do anything as the caller needs to inspect + // the event before proceeing + return false; + } else { + // pop the top frame from the stack + this.frameStack.frames.pop(); + return this.step(next, stopAtCloseFrame, nextLineSkip); + } } else { // ignore other events - return this.step(next); + return this.step(next, stopAtCloseFrame, nextLineSkip); } } @@ -190,6 +191,8 @@ export class Runtime extends EventEmitter { const stackHeight = this.frameStack.frames.length; if (stackHeight <= 1) { // do nothing as there is no frame to step out to + logger.log("At the outermost function, cannot step out"); + this.sendEvent(RuntimeEvents.stopOnStep); return false; } // newest frame is at the top of the stack @@ -198,45 +201,63 @@ export class Runtime extends EventEmitter { // skip all events until the corresponding CloseFrame event, // pop the top frame from the stack, and proceed to the next event while (true) { - this.eventIndex++; - if (this.eventIndex >= this.trace.events.length) { + if (this.step(/* next */ false, /* stopAtCloseFrame */ true, /* nextLineSkip */ true)) { + // trace viewing session finished throw new Error("Cannot find corresponding CloseFrame event for function: " + currentFrame.name); } currentEvent = this.trace.events[this.eventIndex]; - if (currentEvent.type === 'CloseFrame' && currentEvent.id === currentFrame.id) { - break; + if (currentEvent.type === 'CloseFrame') { + const currentFrameID = currentFrame.id; + // `step` call finished at the CloseFrame event + // but did not process it so we need pop the frame here + this.frameStack.frames.pop(); + if (currentEvent.id === currentFrameID) { + break; + } } } - this.frameStack.frames.pop(); - return this.step(false); + + // Do not skip to same line when stepping out as this may lead + // to unusual behavior if multiple bytcode instructions are on the same line. + // For example, consider the following code: + // ``` + // assert(foo() == bar()); + // ``` + // In the code above if we enter `foo` and then step out of it, + // we want to end up on the same line (where the next instruction is) + // but we don't want to call `bar` in the same debugging step. + return this.step(/* next */ false, /* stopAtCloseFrame */ false, /* nextLineSkip */ false); } /** * Handles "step back" adapter action. + * @returns `true` if was able to step back, `false` otherwise. * @throws Error with a descriptive error message if the step back event cannot be handled. */ - public stepBack() { - if (this.eventIndex === 1) { + public stepBack(): boolean { + if (this.eventIndex <= 1) { // no where to step back to (event 0 is the `OpenFrame` event for the first frame) // and is processed in runtime.start() which is executed only once + logger.log("At the beginning of the trace, cannot step back"); this.sendEvent(RuntimeEvents.stopOnStep); - return; + return false; } let currentEvent = this.trace.events[this.eventIndex - 1]; if (currentEvent.type === 'CloseFrame') { // cannot step back into or over function calls + logger.log("After a function call, cannot step back"); this.sendEvent(RuntimeEvents.stopOnStep); - return; + return false; } else { this.eventIndex--; if (currentEvent.type === 'Instruction') { let sameLine = this.instruction(currentEvent); if (sameLine) { this.stepBack(); - return; + return true; } this.sendEvent(RuntimeEvents.stopOnStep); - return; + return true; } else if (currentEvent.type === 'OpenFrame') { const stackHeight = this.frameStack.frames.length; if (stackHeight <= 0) { @@ -261,10 +282,11 @@ export class Runtime extends EventEmitter { // cannot simply call stepBack as we are stepping back to the same line // that is now in the current frame, which would result in unintentionally // recursing to previous events - if (this.eventIndex === 0) { + if (this.eventIndex <= 1) { // no where to step back to + logger.log("At the beginning of the trace, cannot step back"); this.sendEvent(RuntimeEvents.stopOnStep); - return; + return true; // we actually stepped back just can't step back further } this.eventIndex--; let prevCurrentEvent = this.trace.events[this.eventIndex]; @@ -283,19 +305,43 @@ export class Runtime extends EventEmitter { + this.frameStack.frames[stackHeight - 1].name + " as there would be no frame on the stack afterwards" ); - - throw new Error("Wrong line to step back to from a function call"); } this.sendEvent(RuntimeEvents.stopOnStep); - return; + return true; } else { // ignore other events this.stepBack(); - return; + return true; } } } + /** + * Handles "continue" adapter action. + * @returns `true` if the trace viewing session is finished, `false` otherwise. + * @throws Error with a descriptive error message if the continue event cannot be handled. + */ + public continue(reverse: boolean): boolean { + if (reverse) { + while (true) { + if (!this.stepBack()) { + return false; + } + } + } else { + while (true) { + if (this.step( + /* next */ false, + /* stopAtCloseFrame */ false, + /* nextLineSkip */ true) + ) { + return true; + } + } + } + + } + /** * Handles `Instruction` trace event which represents instruction in the current stack frame. * @@ -435,7 +481,7 @@ function getPkgNameFromManifest(pkgRoot: string): string | undefined { * @param directory path to the directory containing Move source files. * @param filesMap map to update with file information. */ -function hashToFileMap(directory: string, filesMap: Map) { +function hashToFileMap(directory: string, filesMap: Map): void { const processDirectory = (dir: string) => { const files = fs.readdirSync(dir); for (const f of files) { @@ -447,14 +493,13 @@ function hashToFileMap(directory: string, filesMap: Map) { const content = fs.readFileSync(filePath, 'utf8'); const hash = fileHash(content); const lines = content.split('\n'); - filesMap.set(Buffer.from(hash).toString('base64'), { path: filePath, content, lines }); + const fileInfo = { path: filePath, content, lines }; + filesMap.set(Buffer.from(hash).toString('base64'), fileInfo); } } }; processDirectory(directory); - - return filesMap; } /** diff --git a/external-crates/move/crates/move-analyzer/trace-adapter/src/source_map_utils.ts b/external-crates/move/crates/move-analyzer/trace-adapter/src/source_map_utils.ts index 842caf5511558..5398d963de225 100644 --- a/external-crates/move/crates/move-analyzer/trace-adapter/src/source_map_utils.ts +++ b/external-crates/move/crates/move-analyzer/trace-adapter/src/source_map_utils.ts @@ -13,10 +13,22 @@ interface ISrcDefinitionLocation { end: number; } +interface ISrcStructSourceMapEntry { + definition_location: ISrcDefinitionLocation; + type_parameters: [string, ISrcDefinitionLocation][]; + fields: ISrcDefinitionLocation[]; +} + +interface ISrcEnumSourceMapEntry { + definition_location: ISrcDefinitionLocation; + type_parameters: [string, ISrcDefinitionLocation][]; + variants: [[string, ISrcDefinitionLocation], ISrcDefinitionLocation[]][]; +} + interface ISrcFunctionMapEntry { definition_location: ISrcDefinitionLocation; - type_parameters: any[]; - parameters: any[]; + type_parameters: [string, ISrcDefinitionLocation][]; + parameters: [string, ISrcDefinitionLocation][]; locals: [string, ISrcDefinitionLocation][]; nops: Record; code_map: Record; @@ -26,10 +38,10 @@ interface ISrcFunctionMapEntry { interface ISrcRootObject { definition_location: ISrcDefinitionLocation; module_name: string[]; - struct_map: Record; - enum_map: Record; + struct_map: Record; + enum_map: Record; function_map: Record; - constant_map: Record; + constant_map: Record; } // Runtime data types. @@ -47,7 +59,7 @@ interface ILoc { */ interface ISourceMapFunction { // Locations indexed with PC values. - pcLocs: ILoc[] + pcLocs: ILoc[], } /** @@ -68,10 +80,15 @@ export interface IFileInfo { export interface ISourceMap { fileHash: string modInfo: ModuleInfo, - functions: Map + functions: Map, + // Lines that are not present in the source map. + optimized_lines: number[] } -export function readAllSourceMaps(directory: string, filesMap: Map): Map { +export function readAllSourceMaps( + directory: string, + filesMap: Map +): Map { const sourceMapsMap = new Map(); const processDirectory = (dir: string) => { @@ -116,7 +133,10 @@ function readSourceMap(sourceMapPath: string, filesMap: Map): + " when processing source map at: " + sourceMapPath); } - for (const funEntry of Object.values(sourceMapJSON.function_map)) { + const allSourceMapLines = new Set(); + prePopulateSourceMapLines(sourceMapJSON, fileInfo, allSourceMapLines); + const functionMap = sourceMapJSON.function_map; + for (const funEntry of Object.values(functionMap)) { let nameStart = funEntry.definition_location.start; let nameEnd = funEntry.definition_location.end; const funName = fileInfo.content.slice(nameStart, nameEnd); @@ -131,6 +151,9 @@ function readSourceMap(sourceMapPath: string, filesMap: Map): for (const [pc, defLocation] of Object.entries(funEntry.code_map)) { const currentPC = parseInt(pc); const currentLoc = byteOffsetToLineColumn(fileInfo, defLocation.start); + allSourceMapLines.add(currentLoc.line); + // add the end line to the set as well even if we don't need it for pcLocs + allSourceMapLines.add(byteOffsetToLineColumn(fileInfo, defLocation.end).line); for (let i = prevPC + 1; i < currentPC; i++) { pcLocs.push(prevLoc); } @@ -140,9 +163,91 @@ function readSourceMap(sourceMapPath: string, filesMap: Map): } functions.set(funName, { pcLocs }); } - return { fileHash, modInfo, functions }; + let optimized_lines: number[] = []; + for (let i = 0; i < fileInfo.lines.length; i++) { + if (!allSourceMapLines.has(i + 1)) { // allSourceMapLines is 1-based + optimized_lines.push(i); // result must be 0-based + } + } + + return { fileHash, modInfo, functions, optimized_lines }; +} + +/** + * Pre-populates the set of source file lines that are present in the source map + * with lines corresponding to the definitions of module, structs, enums, and functions + * (excluding location of instructions in the function body which are handled elsewhere). + * Constants do not have location information in the source map and must be handled separately. + * + * @param sourceMapJSON + * @param fileInfo + * @param sourceMapLines + */ +function prePopulateSourceMapLines( + sourceMapJSON: ISrcRootObject, + fileInfo: IFileInfo, + sourceMapLines: Set +): void { + addLinesForLocation(sourceMapJSON.definition_location, fileInfo, sourceMapLines); + const structMap = sourceMapJSON.struct_map; + for (const structEntry of Object.values(structMap)) { + addLinesForLocation(structEntry.definition_location, fileInfo, sourceMapLines); + for (const typeParam of structEntry.type_parameters) { + addLinesForLocation(typeParam[1], fileInfo, sourceMapLines); + } + for (const fieldDef of structEntry.fields) { + addLinesForLocation(fieldDef, fileInfo, sourceMapLines); + } + } + + const enumMap = sourceMapJSON.enum_map; + for (const enumEntry of Object.values(enumMap)) { + addLinesForLocation(enumEntry.definition_location, fileInfo, sourceMapLines); + for (const typeParam of enumEntry.type_parameters) { + addLinesForLocation(typeParam[1], fileInfo, sourceMapLines); + } + for (const variant of enumEntry.variants) { + addLinesForLocation(variant[0][1], fileInfo, sourceMapLines); + for (const fieldDef of variant[1]) { + addLinesForLocation(fieldDef, fileInfo, sourceMapLines); + } + } + } + + const functionMap = sourceMapJSON.function_map; + for (const funEntry of Object.values(functionMap)) { + addLinesForLocation(funEntry.definition_location, fileInfo, sourceMapLines); + for (const typeParam of funEntry.type_parameters) { + addLinesForLocation(typeParam[1], fileInfo, sourceMapLines); + } + for (const param of funEntry.parameters) { + addLinesForLocation(param[1], fileInfo, sourceMapLines); + } + for (const local of funEntry.locals) { + addLinesForLocation(local[1], fileInfo, sourceMapLines); + } + } +} + +/** + * Adds source file lines for the given location to the set. + * + * @param loc location in the source file. + * @param fileInfo source file information. + * @param sourceMapLines set of source file lines. + */ +function addLinesForLocation( + loc: ISrcDefinitionLocation, + fileInfo: IFileInfo, + sourceMapLines: Set +): void { + const startLine = byteOffsetToLineColumn(fileInfo, loc.start).line; + sourceMapLines.add(startLine); + const endLine = byteOffsetToLineColumn(fileInfo, loc.end).line; + sourceMapLines.add(endLine); } + /** * Computes source file location (line/colum) from the byte offset * (assumes that lines and columns are 1-based). diff --git a/external-crates/move/crates/move-analyzer/trace-debug/src/extension.ts b/external-crates/move/crates/move-analyzer/trace-debug/src/extension.ts index a096d01bb37dc..2b94f50f38b35 100644 --- a/external-crates/move/crates/move-analyzer/trace-debug/src/extension.ts +++ b/external-crates/move/crates/move-analyzer/trace-debug/src/extension.ts @@ -4,7 +4,8 @@ import * as fs from 'fs'; import * as vscode from 'vscode'; import * as path from 'path'; -import { WorkspaceFolder, DebugConfiguration, ProviderResult, CancellationToken, DebugSession } from 'vscode'; +import { StackFrame } from '@vscode/debugadapter'; +import { WorkspaceFolder, DebugConfiguration, CancellationToken } from 'vscode'; /** * Log level for the debug adapter. @@ -35,6 +36,66 @@ export function activate(context: vscode.ExtensionContext) { }) ); + let previousSourcePath: string | undefined; + const decorationType = vscode.window.createTextEditorDecorationType({ + color: 'grey', + backgroundColor: 'rgba(220, 220, 220, 0.5)' // grey with 50% opacity + }); + context.subscriptions.push( + vscode.debug.onDidChangeActiveStackItem(async stackItem => { + if (stackItem instanceof vscode.DebugStackFrame) { + const session = vscode.debug.activeDebugSession; + if (session) { + // Request the stack frame details from the debug adapter + const stackTraceResponse = await session.customRequest('stackTrace', { + threadId: stackItem.threadId, + startFrame: stackItem.frameId, + levels: 1 + }); + + const stackFrame: StackFrame = stackTraceResponse.stackFrames[0]; + if (stackFrame && stackFrame.source && stackFrame.source.path !== previousSourcePath) { + previousSourcePath = stackFrame.source.path; + const source = stackFrame.source; + const line = stackFrame.line; + console.log(`Frame details: ${source?.name} at line ${line}`); + + const editor = vscode.window.activeTextEditor; + if (editor) { + const optimized_lines = stackTraceResponse.optimized_lines; + const document = editor.document; + let decorationsArray: vscode.DecorationOptions[] = []; + + optimized_lines.forEach((lineNumber: number) => { + const line = document.lineAt(lineNumber); + const lineLength = line.text.length; + const lineText = line.text.trim(); + if (lineText.length !== 0 // ignore empty lines + && !lineText.startsWith("const") // ignore constant declarations (not in the source map) + && !lineText.startsWith("}")) { // ignore closing braces with nothing else on the same line + const decoration = { + range: new vscode.Range(lineNumber, 0, lineNumber, lineLength), + }; + decorationsArray.push(decoration); + } + }); + + editor.setDecorations(decorationType, decorationsArray); + } + + } + } + } + }), + vscode.debug.onDidTerminateDebugSession(() => { + // reset all decorations when the debug session is terminated + // to avoid showing lines for code that was optimized away + const editor = vscode.window.activeTextEditor; + if (editor) { + editor.setDecorations(decorationType, []); + } + }) + ); } /** diff --git a/external-crates/move/crates/move-bytecode-utils/Cargo.toml b/external-crates/move/crates/move-bytecode-utils/Cargo.toml index bcfe9488133a1..824ba9f953d7d 100644 --- a/external-crates/move/crates/move-bytecode-utils/Cargo.toml +++ b/external-crates/move/crates/move-bytecode-utils/Cargo.toml @@ -15,3 +15,4 @@ move-binary-format.workspace = true move-core-types.workspace = true petgraph.workspace = true serde-reflection.workspace = true +indexmap.workspace = true diff --git a/external-crates/move/crates/move-bytecode-utils/src/dependency_graph.rs b/external-crates/move/crates/move-bytecode-utils/src/dependency_graph.rs deleted file mode 100644 index fbfd1d5065125..0000000000000 --- a/external-crates/move/crates/move-bytecode-utils/src/dependency_graph.rs +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) The Diem Core Contributors -// Copyright (c) The Move Contributors -// SPDX-License-Identifier: Apache-2.0 - -use move_binary_format::file_format::CompiledModule; -use petgraph::graphmap::DiGraphMap; - -use anyhow::{bail, Result}; -use std::collections::BTreeMap; - -/// Directed graph capturing dependencies between modules -pub struct DependencyGraph<'a> { - /// Set of modules guaranteed to be closed under dependencies - modules: Vec<&'a CompiledModule>, - graph: DiGraphMap, -} - -#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq, PartialOrd, Ord)] -struct ModuleIndex(usize); - -impl<'a> DependencyGraph<'a> { - /// Construct a dependency graph from a set of `modules`. - /// Panics if `modules` contains duplicates or is not closed under the depedency relation - pub fn new(module_iter: impl IntoIterator) -> Self { - let mut modules = vec![]; - let mut reverse_modules = BTreeMap::new(); - for (i, m) in module_iter.into_iter().enumerate() { - modules.push(m); - assert!( - reverse_modules - .insert(m.self_id(), ModuleIndex(i)) - .is_none(), - "Duplicate module found" - ); - } - let mut graph = DiGraphMap::new(); - for module in &modules { - let module_idx: ModuleIndex = *reverse_modules.get(&module.self_id()).unwrap(); - let deps = module.immediate_dependencies(); - if deps.is_empty() { - graph.add_node(module_idx); - } else { - for dep in deps { - let dep_idx = *reverse_modules - .get(&dep) - .unwrap_or_else(|| panic!("Missing dependency {}", dep)); - graph.add_edge(dep_idx, module_idx, ()); - } - } - } - DependencyGraph { modules, graph } - } - - /// Return an iterator over the modules in `self` in topological order--modules with least deps first. - /// Fails with an error if `self` contains circular dependencies - pub fn compute_topological_order(&self) -> Result> { - match petgraph::algo::toposort(&self.graph, None) { - Err(_) => bail!("Circular dependency detected"), - Ok(ordered_idxs) => Ok(ordered_idxs.into_iter().map(move |idx| self.modules[idx.0])), - } - } -} diff --git a/external-crates/move/crates/move-bytecode-utils/src/layout.rs b/external-crates/move/crates/move-bytecode-utils/src/layout.rs index dde1d9245335f..a4fe222b28d63 100644 --- a/external-crates/move/crates/move-bytecode-utils/src/layout.rs +++ b/external-crates/move/crates/move-bytecode-utils/src/layout.rs @@ -226,8 +226,8 @@ impl<'a, T: GetModule> SerdeLayoutBuilder<'a, T> { (None, Some(enum_def)) => { Container::Enum(Enum::new(declaring_module.borrow(), enum_def).1) } - (Some(_), Some(_)) => panic!("Found both struct and enum with name {}", name), - (None, None) => panic!( + (Some(_), Some(_)) => bail!("Found both struct and enum with name {}", name), + (None, None) => bail!( "Could not find datatype named {} in module {}", name, declaring_module.borrow().name() @@ -287,7 +287,7 @@ impl<'a, T: GetModule> SerdeLayoutBuilder<'a, T> { if old_datatype.clone() != self.generate_serde_container(def, type_arguments, depth)? { - panic!( + bail!( "Name conflict: multiple structs with name {}, but different addresses", type_key ) @@ -587,7 +587,10 @@ impl DatatypeLayoutBuilder { .zip(layouts) .map(|(name, layout)| A::MoveFieldLayout::new(name, layout)) .collect(); - Ok(A::MoveStructLayout { type_, fields }) + Ok(A::MoveStructLayout { + type_, + fields: Box::new(fields), + }) } } } @@ -608,7 +611,7 @@ impl DatatypeLayoutBuilder { module.borrow().find_struct_def_by_name(name), module.borrow().find_enum_def_by_name(name), ) { - (Some(struct_def), None) => Ok(A::MoveDatatypeLayout::Struct( + (Some(struct_def), None) => Ok(A::MoveDatatypeLayout::Struct(Box::new( Self::build_from_struct_definition( module.borrow(), struct_def, @@ -616,8 +619,8 @@ impl DatatypeLayoutBuilder { resolver, depth, )?, - )), - (None, Some(enum_def)) => Ok(A::MoveDatatypeLayout::Enum( + ))), + (None, Some(enum_def)) => Ok(A::MoveDatatypeLayout::Enum(Box::new( Self::build_from_enum_definition( module.borrow(), enum_def, @@ -625,9 +628,9 @@ impl DatatypeLayoutBuilder { resolver, depth, )?, - )), - (Some(_), Some(_)) => panic!("Found both struct and enum with name {}", name), - (None, None) => panic!( + ))), + (Some(_), Some(_)) => bail!("Found both struct and enum with name {}", name), + (None, None) => bail!( "Could not find struct/enum named {} in module {}", name, module.borrow().name() @@ -645,13 +648,13 @@ impl DatatypeLayoutBuilder { check_depth!(depth); if let Some(def) = m.find_struct_def(s) { // declared internally - Ok(A::MoveDatatypeLayout::Struct( + Ok(A::MoveDatatypeLayout::Struct(Box::new( Self::build_from_struct_definition(m, def, type_arguments, resolver, depth)?, - )) + ))) } else if let Some(def) = m.find_enum_def(s) { - Ok(A::MoveDatatypeLayout::Enum( + Ok(A::MoveDatatypeLayout::Enum(Box::new( Self::build_from_enum_definition(m, def, type_arguments, resolver, depth)?, - )) + ))) } else { let handle = m.datatype_handle_at(s); let name = m.identifier_at(handle.name); diff --git a/external-crates/move/crates/move-bytecode-utils/src/lib.rs b/external-crates/move/crates/move-bytecode-utils/src/lib.rs index a641b4123420a..c2af29e185dd6 100644 --- a/external-crates/move/crates/move-bytecode-utils/src/lib.rs +++ b/external-crates/move/crates/move-bytecode-utils/src/lib.rs @@ -2,17 +2,17 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -pub mod dependency_graph; pub mod layout; pub mod module_cache; -use crate::dependency_graph::DependencyGraph; use move_binary_format::file_format::{CompiledModule, DatatypeHandleIndex, SignatureToken}; use move_core_types::{ account_address::AccountAddress, identifier::IdentStr, language_storage::ModuleId, }; use anyhow::{anyhow, Result}; +use indexmap::IndexMap; +use petgraph::graphmap::DiGraphMap; use std::collections::BTreeMap; /// Set of Move modules indexed by module Id @@ -43,9 +43,37 @@ impl<'a> Modules<'a> { self.iter_modules().into_iter().cloned().collect() } - /// Compute a dependency graph for `self` - pub fn compute_dependency_graph(&self) -> DependencyGraph { - DependencyGraph::new(self.0.values().copied()) + /// Return an iterator over the modules in `self` in topological order--modules with least deps first. + /// Fails with an error if `self` contains circular dependencies. + /// Tolerates missing dependencies. + pub fn compute_topological_order(&self) -> Result> { + let mut module_map = IndexMap::new(); + for m in self.iter_modules() { + if module_map.insert(m.self_id(), m).is_some() { + panic!("Duplicate module found") + } + } + + let mut graph: DiGraphMap = DiGraphMap::new(); + for i in 0..module_map.len() { + graph.add_node(i); + } + + for (i, (_, m)) in module_map.iter().enumerate() { + for dep in m.immediate_dependencies() { + if let Some(j) = module_map.get_index_of(&dep) { + graph.add_edge(i, j, 0); + } + } + } + + match petgraph::algo::toposort(&graph, None) { + Err(_) => panic!("Circular dependency detected"), + Ok(ordered_idxs) => Ok(ordered_idxs + .into_iter() + .map(move |idx| *module_map.get_index(idx).unwrap().1) + .rev()), + } } /// Return the backing map of `self` diff --git a/external-crates/move/crates/move-cli/src/base/test.rs b/external-crates/move/crates/move-cli/src/base/test.rs index 8f91b7b2e790a..68e8ace285981 100644 --- a/external-crates/move/crates/move-cli/src/base/test.rs +++ b/external-crates/move/crates/move-cli/src/base/test.rs @@ -5,6 +5,7 @@ use super::reroot_path; use crate::NativeFunctionRecord; use anyhow::Result; use clap::*; +use move_binary_format::CompiledModule; use move_command_line_common::files::MOVE_COVERAGE_MAP_EXTENSION; use move_compiler::{ diagnostics::{self, Diagnostics}, @@ -159,8 +160,27 @@ pub fn run_move_unit_tests( }) .collect(); + // Collect all the bytecode modules that are dependencies of the package. We need to do this + // because they're not returned by the compilation result, but we need to add them in the + // VM storage. + let mut bytecode_deps_modules = vec![]; + for pkg in resolution_graph.package_table.values() { + let source_available = !pkg + .get_sources(&resolution_graph.build_options) + .unwrap() + .is_empty(); + if source_available { + continue; + } + for bytes in pkg.get_bytecodes_bytes()? { + let module = CompiledModule::deserialize_with_defaults(&bytes)?; + bytecode_deps_modules.push(module); + } + } + let root_package = resolution_graph.root_package(); let build_plan = BuildPlan::create(resolution_graph)?; + // Compile the package. We need to intercede in the compilation, process being performed by the // Move package system, to first grab the compilation env, construct the test plan from it, and // then save it, before resuming the rest of the compilation and returning the results and @@ -192,7 +212,7 @@ pub fn run_move_unit_tests( let (test_plan, mapped_files, units) = test_plan.unwrap(); let test_plan = test_plan.unwrap(); let no_tests = test_plan.is_empty(); - let test_plan = TestPlan::new(test_plan, mapped_files, units); + let test_plan = TestPlan::new(test_plan, mapped_files, units, bytecode_deps_modules); let trace_path = pkg_path.join(".trace"); let coverage_map_path = pkg_path diff --git a/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/bar/sources/Bar.move b/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/bar/sources/Bar.move index 7905cb3917008..c228f2fc35231 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/bar/sources/Bar.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/bar/sources/Bar.move @@ -1,5 +1,5 @@ -module 0x1::Bar { - use 0x1::Foo; +module 0x6::Bar { + use 0x6::Foo; public bar() { Foo::foo(); diff --git a/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/sources/Foo.move index c094de36af50f..e83504b570603 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/circular_dependencies/sources/Foo.move @@ -1,5 +1,5 @@ -module 0x1::Foo { - use 0x1::Bar; +module 0x6::Foo { + use 0x6::Bar; public foo() { Bar::bar(); diff --git a/external-crates/move/crates/move-cli/tests/build_tests/empty_module_no_deps/sources/A.move b/external-crates/move/crates/move-cli/tests/build_tests/empty_module_no_deps/sources/A.move index edb25f3d7377a..c98e8a41e7d62 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/empty_module_no_deps/sources/A.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/empty_module_no_deps/sources/A.move @@ -1 +1 @@ -module 0x1::M {} +module 0x6::M {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/include_exclude_stdlib/sources/UseSigner.move b/external-crates/move/crates/move-cli/tests/build_tests/include_exclude_stdlib/sources/UseSigner.move index 471e9ac62e941..d6a4b951a632a 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/include_exclude_stdlib/sources/UseSigner.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/include_exclude_stdlib/sources/UseSigner.move @@ -1,4 +1,4 @@ -module 0x1::Example { +module 0x6::Example { use std::address; public fun f(): u64 { diff --git a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_adding_new_source/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_adding_new_source/sources/Foo.move index f358fe72bead9..20084ca68a2ae 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_adding_new_source/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_adding_new_source/sources/Foo.move @@ -1 +1 @@ -module 0x1::Foo {} +module 0x6::Foo {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_deleting_output_artifact/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_deleting_output_artifact/sources/Foo.move index f358fe72bead9..20084ca68a2ae 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_deleting_output_artifact/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_deleting_output_artifact/sources/Foo.move @@ -1 +1 @@ -module 0x1::Foo {} +module 0x6::Foo {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_manifest/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_manifest/sources/Foo.move index f358fe72bead9..20084ca68a2ae 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_manifest/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_manifest/sources/Foo.move @@ -1 +1 @@ -module 0x1::Foo {} +module 0x6::Foo {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_source/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_source/sources/Foo.move index f358fe72bead9..20084ca68a2ae 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_source/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_after_touching_source/sources/Foo.move @@ -1 +1 @@ -module 0x1::Foo {} +module 0x6::Foo {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_no_modification/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_no_modification/sources/Foo.move index f358fe72bead9..20084ca68a2ae 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/rebuild_no_modification/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/rebuild_no_modification/sources/Foo.move @@ -1 +1 @@ -module 0x1::Foo {} +module 0x6::Foo {} diff --git a/external-crates/move/crates/move-cli/tests/build_tests/simple_build_with_docs/sources/Foo.move b/external-crates/move/crates/move-cli/tests/build_tests/simple_build_with_docs/sources/Foo.move index 6af3654843431..534279a5f0573 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/simple_build_with_docs/sources/Foo.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/simple_build_with_docs/sources/Foo.move @@ -1,4 +1,4 @@ -module 0x1::Foo { +module 0x6::Foo { /// Test documentation comment public fun foo() { } diff --git a/external-crates/move/crates/move-cli/tests/build_tests/unbound_dependency/sources/A.move b/external-crates/move/crates/move-cli/tests/build_tests/unbound_dependency/sources/A.move index 4fe8b348357bf..aec3bb0d6ca73 100644 --- a/external-crates/move/crates/move-cli/tests/build_tests/unbound_dependency/sources/A.move +++ b/external-crates/move/crates/move-cli/tests/build_tests/unbound_dependency/sources/A.move @@ -1 +1 @@ -module 0x1::A {} +module 0x6::A {} diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/Move.toml b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/Move.toml new file mode 100644 index 0000000000000..a6ad126c76313 --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/Move.toml @@ -0,0 +1,8 @@ +[package] +name = "Bar" + +[addresses] +B = "0x2" + +[dependencies] +Foo = { local = "../C" } diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/sources/Bar.move b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/sources/Bar.move new file mode 100644 index 0000000000000..5074f2e1b441a --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/B/sources/Bar.move @@ -0,0 +1,8 @@ +module B::Bar { + use C::Foo; + + public fun foo(): u64 { + Foo::bar() + } + +} diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/Move.toml b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/Move.toml new file mode 100644 index 0000000000000..db95e4a84a73b --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/Move.toml @@ -0,0 +1,5 @@ +[package] +name = "Foo" + +[addresses] +C = "0x3" diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/sources/Foo.move b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/sources/Foo.move new file mode 100644 index 0000000000000..4313b3ed6a196 --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/C/sources/Foo.move @@ -0,0 +1,5 @@ +module C::Foo { + public fun bar(): u64 { + 1 + } +} diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/Move.toml b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/Move.toml new file mode 100644 index 0000000000000..156d5498096cf --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/Move.toml @@ -0,0 +1,15 @@ +[package] +name = "A" + +[addresses] +A = "0x2" + +[dependencies] +Bar = { local = "./B" } +Foo = { local = "./C" } + +[dev-addresses] +std = "0x1" + +[dev-dependencies] +MoveStdlib = { local = "../../../../move-stdlib" } diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.exp b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.exp new file mode 100644 index 0000000000000..612ce6d4f6608 --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.exp @@ -0,0 +1,17 @@ +Command `build -v -p ./C`: +BUILDING Foo +External Command `mv ./C/sources/Foo.move ./C/sources/Foo.move_old`: +External Command `rm -rf ./C/build/Foo/sources`: +Command `build -v -p ./B`: +INCLUDING DEPENDENCY Foo +BUILDING Bar +External Command `mv ./B/sources/Bar.move ./B/sources/Bar.move_old`: +External Command `rm -rf ./B/build/Bar/sources`: +Command `test`: +INCLUDING DEPENDENCY Bar +INCLUDING DEPENDENCY Foo +INCLUDING DEPENDENCY MoveStdlib +BUILDING A +Running Move unit tests +[ PASS ] 0x2::A::foo_test +Test result: OK. Total tests: 1; passed: 1; failed: 0 diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.txt b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.txt new file mode 100644 index 0000000000000..09998befbcfa8 --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/args.txt @@ -0,0 +1,7 @@ +build -v -p ./C +> mv ./C/sources/Foo.move ./C/sources/Foo.move_old +> rm -rf ./C/build/Foo/sources +build -v -p ./B +> mv ./B/sources/Bar.move ./B/sources/Bar.move_old +> rm -rf ./B/build/Bar/sources +test diff --git a/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/sources/A.move b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/sources/A.move new file mode 100644 index 0000000000000..63e2dec3673e2 --- /dev/null +++ b/external-crates/move/crates/move-cli/tests/move_unit_tests/bytecode_deps/sources/A.move @@ -0,0 +1,13 @@ +module A::A { + use B::Bar; + use C::Foo; + + public fun foo(): u64 { + Bar::foo() + Foo::bar() + } + + #[test] + fun foo_test() { + Bar::foo() + Foo::bar(); + } +} diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.exp b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.exp index 7c4520eea8311..a2733f9fabda0 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.exp +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.exp @@ -1,7 +1,7 @@ Command `sandbox publish`: -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args u64`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args u64`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M2::C": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C": STRUCT: - t: U64 - b: BOOL @@ -16,9 +16,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args address`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args address`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M2::C": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C": STRUCT: - t: TYPENAME: AccountAddress @@ -34,9 +34,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args vector`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args vector`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M2::C>": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C>": STRUCT: - t: BYTES - b: BOOL @@ -51,16 +51,16 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct B --type-args bool`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct B --type-args bool`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M1::B": +"0000000000000000000000000000000000000000000000000000000000000006::M1::B": STRUCT: - a: TYPENAME: AccountAddress - c: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M2::C" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M2::C" - t: BOOL -"0000000000000000000000000000000000000000000000000000000000000001::M2::C": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C": STRUCT: - t: BOOL - b: BOOL @@ -75,7 +75,7 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct B --type-args bool --shallow`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct B --type-args bool --shallow`: --- AccountAddress: NEWTYPESTRUCT: @@ -88,29 +88,29 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args 0x1::M1::S`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args 0x6::M1::S`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M1::A<0000000000000000000000000000000000000000000000000000000000000001::M1::S>": +"0000000000000000000000000000000000000000000000000000000000000006::M1::A<0000000000000000000000000000000000000000000000000000000000000006::M1::S>": STRUCT: - f: U64 - v: BYTES - b: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::B<0000000000000000000000000000000000000000000000000000000000000001::M1::S>" -"0000000000000000000000000000000000000000000000000000000000000001::M1::B<0000000000000000000000000000000000000000000000000000000000000001::M1::S>": + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::B<0000000000000000000000000000000000000000000000000000000000000006::M1::S>" +"0000000000000000000000000000000000000000000000000000000000000006::M1::B<0000000000000000000000000000000000000000000000000000000000000006::M1::S>": STRUCT: - a: TYPENAME: AccountAddress - c: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M2::C<0000000000000000000000000000000000000000000000000000000000000001::M1::S>" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M2::C<0000000000000000000000000000000000000000000000000000000000000006::M1::S>" - t: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::S" -"0000000000000000000000000000000000000000000000000000000000000001::M1::S": + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::S" +"0000000000000000000000000000000000000000000000000000000000000006::M1::S": STRUCT: - t: U64 -"0000000000000000000000000000000000000000000000000000000000000001::M2::C<0000000000000000000000000000000000000000000000000000000000000001::M1::S>": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C<0000000000000000000000000000000000000000000000000000000000000006::M1::S>": STRUCT: - t: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::S" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::S" - b: BOOL AccountAddress: NEWTYPESTRUCT: @@ -123,31 +123,31 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args vector<0x1::M1::S>`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args vector<0x6::M1::S>`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M1::A>>": +"0000000000000000000000000000000000000000000000000000000000000006::M1::A>>": STRUCT: - f: U64 - v: BYTES - b: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::B>>" -"0000000000000000000000000000000000000000000000000000000000000001::M1::B>>": + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::B>>" +"0000000000000000000000000000000000000000000000000000000000000006::M1::B>>": STRUCT: - a: TYPENAME: AccountAddress - c: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M2::C>>" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M2::C>>" - t: SEQ: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::S" -"0000000000000000000000000000000000000000000000000000000000000001::M1::S": + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::S" +"0000000000000000000000000000000000000000000000000000000000000006::M1::S": STRUCT: - t: U64 -"0000000000000000000000000000000000000000000000000000000000000001::M2::C>>": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C>>": STRUCT: - t: SEQ: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::S" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::S" - b: BOOL AccountAddress: NEWTYPESTRUCT: @@ -160,7 +160,7 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args 0x1::M1::S --shallow`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args 0x6::M1::S --shallow`: --- AccountAddress: NEWTYPESTRUCT: @@ -173,7 +173,7 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args vector<0x1::M1::S> --shallow`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args vector<0x6::M1::S> --shallow`: --- AccountAddress: NEWTYPESTRUCT: @@ -186,14 +186,14 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args 0x1::M2::C`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args 0x6::M2::C`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M2::C<0000000000000000000000000000000000000000000000000000000000000001::M2::C>": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C<0000000000000000000000000000000000000000000000000000000000000006::M2::C>": STRUCT: - t: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M2::C" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M2::C" - b: BOOL -"0000000000000000000000000000000000000000000000000000000000000001::M2::C": +"0000000000000000000000000000000000000000000000000000000000000006::M2::C": STRUCT: - t: U64 - b: BOOL @@ -208,14 +208,14 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct G`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct G`: --- -"0000000000000000000000000000000000000000000000000000000000000001::M1::G": +"0000000000000000000000000000000000000000000000000000000000000006::M1::G": STRUCT: - x: U64 - s: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::M1::S" -"0000000000000000000000000000000000000000000000000000000000000001::M1::S": + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::M1::S" +"0000000000000000000000000000000000000000000000000000000000000006::M1::S": STRUCT: - t: BOOL AccountAddress: @@ -229,9 +229,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct A --type-args bool --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct A --type-args bool --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::A": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::A": STRUCT: - dummy_field: BOOL AccountAddress: @@ -245,9 +245,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct A --type-args u64 --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct A --type-args u64 --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::A": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::A": STRUCT: - dummy_field: BOOL AccountAddress: @@ -261,9 +261,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct B --type-args bool u8 --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct B --type-args bool u8 --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::B": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::B": STRUCT: - dummy_field: BOOL AccountAddress: @@ -277,20 +277,20 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct C --type-args u8 u64 u128 --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct C --type-args u8 u64 u128 --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::A": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::A": STRUCT: - dummy_field: BOOL -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::B": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::B": STRUCT: - dummy_field: BOOL -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::C": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::C": STRUCT: - a: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::phantoms::A" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::phantoms::A" - b: - TYPENAME: "0000000000000000000000000000000000000000000000000000000000000001::phantoms::B" + TYPENAME: "0000000000000000000000000000000000000000000000000000000000000006::phantoms::B" AccountAddress: NEWTYPESTRUCT: TUPLEARRAY: @@ -302,9 +302,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct D --type-args bool u64 --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct D --type-args bool u64 --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::D": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::D": STRUCT: - v: SEQ: U64 @@ -319,9 +319,9 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --ignore-phantom-types`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --ignore-phantom-types`: --- -"0000000000000000000000000000000000000000000000000000000000000001::phantoms::E": +"0000000000000000000000000000000000000000000000000000000000000006::phantoms::E": STRUCT: - v1: SEQ: BOOL @@ -338,7 +338,7 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses`: --- AccountAddress: NEWTYPESTRUCT: @@ -357,9 +357,9 @@ Signer: - v2: SEQ: U128 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --separator __`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --separator __`: --- -0000000000000000000000000000000000000000000000000000000000000001__phantoms__E__bool__u64__u128__: +0000000000000000000000000000000000000000000000000000000000000006__phantoms__E__bool__u64__u128__: STRUCT: - v1: SEQ: BOOL @@ -376,7 +376,7 @@ Signer: CONTENT: U8 SIZE: 32 -Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses --separator __`: +Command `sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses --separator __`: --- AccountAddress: NEWTYPESTRUCT: diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.txt b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.txt index 454f5ad6cc902..d66ce0bf87ba4 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.txt +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/args.txt @@ -1,39 +1,39 @@ sandbox publish # basics -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args u64 -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args address -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args vector +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args u64 +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args address +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args vector # across modules deep -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct B --type-args bool +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct B --type-args bool # across modules shallow -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct B --type-args bool --shallow +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct B --type-args bool --shallow # with fancy type parameters -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args 0x1::M1::S -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args vector<0x1::M1::S> +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args 0x6::M1::S +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args vector<0x6::M1::S> # same thing, but shallow -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args 0x1::M1::S --shallow -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct A --type-args vector<0x1::M1::S> --shallow +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args 0x6::M1::S --shallow +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct A --type-args vector<0x6::M1::S> --shallow # recursive type definition -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M2.mv --struct C --type-args 0x1::M2::C +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M2.mv --struct C --type-args 0x6::M2::C # without --type-args -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/M1.mv --struct G +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/M1.mv --struct G # phantom type arguments should not appear in struct keys when --ignore-phantom-types is set -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct A --type-args bool --ignore-phantom-types -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct A --type-args u64 --ignore-phantom-types -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct B --type-args bool u8 --ignore-phantom-types -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct C --type-args u8 u64 u128 --ignore-phantom-types -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct D --type-args bool u64 --ignore-phantom-types -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct A --type-args bool --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct A --type-args u64 --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct B --type-args bool u8 --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct C --type-args u8 u64 u128 --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct D --type-args bool u64 --ignore-phantom-types +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --ignore-phantom-types # test config options: address omission + separators -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --separator __ +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --separator __ -sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000001/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses --separator __ +sandbox generate struct-layouts --module storage/0x0000000000000000000000000000000000000000000000000000000000000006/modules/phantoms.mv --struct E --type-args bool u64 u128 --omit-addresses --separator __ diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M1.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M1.move index c171a80fb3ccf..85d0f874856f1 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M1.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M1.move @@ -1,6 +1,6 @@ #[allow(unused_field)] -module 0x1::M1 { - use 0x1::M2::C; +module 0x6::M1 { + use 0x6::M2::C; public struct A { f: u64, v: vector, b: B } diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M2.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M2.move index f3cc297417d58..ad9a4b73a1f93 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M2.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/M2.move @@ -1,4 +1,4 @@ -module 0x1::M2 { +module 0x6::M2 { #[allow(unused_field)] public struct C { t: T, b: bool } diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/phantoms.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/phantoms.move index 6c177ab6c6454..84ddd1b45ca84 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/phantoms.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/generate_struct_layout/sources/phantoms.move @@ -1,5 +1,5 @@ #[allow(unused_field)] -module 0x1::phantoms { +module 0x6::phantoms { public struct A {} public struct B {} diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.exp b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.exp index becab0ef70318..f40154b1961e3 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.exp +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.exp @@ -1,17 +1,17 @@ Command `sandbox publish --bundle --override-ordering B -v`: Found 2 modules -Invalid multi-module publishing: VMError with status LINKER_ERROR at location UNDEFINED and message Cannot find ModuleId { address: 0000000000000000000000000000000000000000000000000000000000000002, name: Identifier("A") } in data cache +Invalid multi-module publishing: VMError with status LINKER_ERROR at location UNDEFINED and message Cannot find ModuleId { address: 0000000000000000000000000000000000000000000000000000000000000007, name: Identifier("A") } in data cache Command `sandbox publish --bundle --override-ordering B --override-ordering A -v`: Found 2 modules -Invalid multi-module publishing: VMError with status LINKER_ERROR at location UNDEFINED and message Cannot find ModuleId { address: 0000000000000000000000000000000000000000000000000000000000000002, name: Identifier("A") } in data cache +Invalid multi-module publishing: VMError with status LINKER_ERROR at location UNDEFINED and message Cannot find ModuleId { address: 0000000000000000000000000000000000000000000000000000000000000007, name: Identifier("A") } in data cache Command `sandbox publish --bundle --override-ordering A --override-ordering B -v`: Found 2 modules -Publishing a new module 0000000000000000000000000000000000000000000000000000000000000002::A (wrote 121 bytes) -Publishing a new module 0000000000000000000000000000000000000000000000000000000000000002::B (wrote 129 bytes) +Publishing a new module 0000000000000000000000000000000000000000000000000000000000000007::A (wrote 121 bytes) +Publishing a new module 0000000000000000000000000000000000000000000000000000000000000007::B (wrote 129 bytes) Wrote 250 bytes of module ID's and code -Command `sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000002/modules/A.mv`: +Command `sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000007/modules/A.mv`: // Move bytecode v6 -module 2.A { +module 7.A { @@ -22,10 +22,10 @@ B0: } } -Command `sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000002/modules/B.mv`: +Command `sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000007/modules/B.mv`: // Move bytecode v6 -module 2.B { -use 0000000000000000000000000000000000000000000000000000000000000002::A; +module 7.B { +use 0000000000000000000000000000000000000000000000000000000000000007::A; diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.txt b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.txt index d4efa790725fa..4f3882e513eb6 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.txt +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/args.txt @@ -7,5 +7,5 @@ sandbox publish --bundle --override-ordering B --override-ordering A -v # expect success: this is the correct order of publishing A and B # with friend relationship sandbox publish --bundle --override-ordering A --override-ordering B -v -sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000002/modules/A.mv -sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000002/modules/B.mv +sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000007/modules/A.mv +sandbox view storage/0x0000000000000000000000000000000000000000000000000000000000000007/modules/B.mv diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/sources/GoodFriends.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/sources/GoodFriends.move index 74c3ff22d4e9b..d85292a141d9b 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/sources/GoodFriends.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/multi_module_publish/sources/GoodFriends.move @@ -1,8 +1,8 @@ -module 0x2::A { +module 0x7::A { public(package) fun foo() {} } -module 0x2::B { +module 0x7::B { #[allow(unused_function)] - fun bar() { 0x2::A::foo() } + fun bar() { 0x7::A::foo() } } diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/args.exp b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/args.exp index a298edc54f52e..2c5f4a883d841 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/args.exp +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/args.exp @@ -18,7 +18,7 @@ Call Stack: [2] - - [1] 0000000000000000000000000000000000000000000000000000000000000002::N::foo + [1] 0000000000000000000000000000000000000000000000000000000000000007::N::foo Code: [2] MutBorrowLoc(0) @@ -35,7 +35,7 @@ Call Stack: [2] - - [2] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [2] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) @@ -51,7 +51,7 @@ Call Stack: [1] - - [3] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [3] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) @@ -67,7 +67,7 @@ Call Stack: [1] - - [4] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [4] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) @@ -106,7 +106,7 @@ Call Stack: [2] 10 - [1] 0000000000000000000000000000000000000000000000000000000000000002::N::foo + [1] 0000000000000000000000000000000000000000000000000000000000000007::N::foo Code: [2] MutBorrowLoc(0) @@ -123,7 +123,7 @@ Call Stack: [2] - - [2] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [2] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) @@ -139,7 +139,7 @@ Call Stack: [1] - - [3] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [3] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) @@ -155,7 +155,7 @@ Call Stack: [1] - - [4] 0000000000000000000000000000000000000000000000000000000000000002::M::sum + [4] 0000000000000000000000000000000000000000000000000000000000000007::M::sum Code: [9] MoveLoc(0) diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/M.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/M.move index 978efd65b36a7..9978b965a73e2 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/M.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/M.move @@ -1,4 +1,4 @@ -module 0x2::M { +module 0x7::M { use std::debug; public fun sum(n: u64): u64 { diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/N.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/N.move index 08e2467602b16..ca19dbab4223a 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/N.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/N.move @@ -1,6 +1,6 @@ #[allow(unused_type_parameter, unused_mut_ref)] -module 0x2::N { - use 0x2::M; +module 0x7::N { + use 0x7::M; public fun foo(): u64 { let mut x = 3; diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/print_stack_trace.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/print_stack_trace.move index cda19d634c1e7..435891160af7f 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/print_stack_trace.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/print_stack_trace/sources/print_stack_trace.move @@ -1,6 +1,6 @@ module 0x42::print_stack_trace { use std::debug; - use 0x2::N; + use 0x7::N; #[allow(unused_mut_ref)] entry fun print_stack_trace() { diff --git a/external-crates/move/crates/move-cli/tests/sandbox_tests/random_test_flag_correctness/sources/m.move b/external-crates/move/crates/move-cli/tests/sandbox_tests/random_test_flag_correctness/sources/m.move index 56aa8549c352b..8e3b42fcd799e 100644 --- a/external-crates/move/crates/move-cli/tests/sandbox_tests/random_test_flag_correctness/sources/m.move +++ b/external-crates/move/crates/move-cli/tests/sandbox_tests/random_test_flag_correctness/sources/m.move @@ -1 +1 @@ -module 0x2::m { } +module 0x7::m { } diff --git a/external-crates/move/crates/move-cli/tests/upload_tests/no_git_remote_package/sources/Dummy.move b/external-crates/move/crates/move-cli/tests/upload_tests/no_git_remote_package/sources/Dummy.move index 45646f1b13bc1..c2eff2a22726a 100644 --- a/external-crates/move/crates/move-cli/tests/upload_tests/no_git_remote_package/sources/Dummy.move +++ b/external-crates/move/crates/move-cli/tests/upload_tests/no_git_remote_package/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy {} +module 0x6::Dummy {} diff --git a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package1/sources/Dummy.move b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package1/sources/Dummy.move index 45646f1b13bc1..c2eff2a22726a 100644 --- a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package1/sources/Dummy.move +++ b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package1/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy {} +module 0x6::Dummy {} diff --git a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package2/sources/Dummy.move b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package2/sources/Dummy.move index 45646f1b13bc1..c2eff2a22726a 100644 --- a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package2/sources/Dummy.move +++ b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package2/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy {} +module 0x6::Dummy {} diff --git a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package3/sources/Dummy.move b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package3/sources/Dummy.move index 45646f1b13bc1..c2eff2a22726a 100644 --- a/external-crates/move/crates/move-cli/tests/upload_tests/valid_package3/sources/Dummy.move +++ b/external-crates/move/crates/move-cli/tests/upload_tests/valid_package3/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy {} +module 0x6::Dummy {} diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/assert_in_while.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/assert_in_while.move index 3d2b6cd2edd06..542d831647957 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/assert_in_while.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/assert_in_while.move @@ -19,7 +19,7 @@ module 0x42::Test { } //# run -module 0x42::m { +module 0x43::m { use 0x42::Test; fun main() { let x = Test::new(); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/basic_named_blocks.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/basic_named_blocks.move index 8fd2a8bcd9bd0..ca65c01f38be3 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/basic_named_blocks.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/basic_named_blocks.move @@ -67,7 +67,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t00() == 10, 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/if_assignment.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/if_assignment.move index 8b2caf9af2eeb..34432dd7b8ce3 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/if_assignment.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/if_assignment.move @@ -11,7 +11,7 @@ module ReassignCond { } //# run -module 0x42::m { +module 0x43::m { use 0x42::ReassignCond::reassign_cond; fun main() { assert!(reassign_cond(@0x1, false) == @0x1, 42); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_blocks.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_blocks.move index c7f9e4505b612..2f058dd56ac41 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_blocks.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_blocks.move @@ -106,7 +106,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t00() == 10, 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_loops.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_loops.move index 8c2da22652b79..43bf8dd6b1035 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_loops.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/nested_loops.move @@ -11,7 +11,7 @@ module 0x42::M { } //# run -module 0x42::m { +module 0x43::m { use 0x42::M; fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.move index 17bfb185b6617..982096b42a98a 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.move @@ -7,7 +7,7 @@ module 0x42::Test { } //# run -module 0x42::m { +module 0x43::m { use 0x42::Test; fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/unused_named_blocks.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/unused_named_blocks.move index e275a4700a639..b7c83354ec58e 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/unused_named_blocks.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/control_flow/unused_named_blocks.move @@ -86,7 +86,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 10, 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/dependency_order.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/dependency_order.move index 213896bd5a456..93c5ec0c9bc1c 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/dependency_order.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/dependency_order.move @@ -8,14 +8,14 @@ module 0x42::C { //# publish // names used to try to force an ordering of depedencies -module 0x42::B { +module 0x43::B { public fun foo(): 0x42::C::T { 0x42::C::foo() } } //# publish -module 0x42::A { +module 0x44::A { struct T { t_b: 0x42::C::T, t_c: 0x42::C::T, @@ -23,7 +23,7 @@ module 0x42::A { public fun foo(): T { T { t_c: 0x42::C::foo(), - t_b: 0x42::B::foo() + t_b: 0x43::B::foo() } } } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/transitive_deps.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/transitive_deps.move index 7c54abb76332d..244430d35ef4b 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/transitive_deps.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/dependencies/transitive_deps.move @@ -7,7 +7,7 @@ module 0x42::X { } //# publish -module 0x42::Y { +module 0x43::Y { use 0x42::X; public fun foo(): X::T { X::new() @@ -16,8 +16,8 @@ module 0x42::Y { //# run -module 0x42::m { -use 0x42::Y; +module 0x44::m { +use 0x43::Y; fun main() { Y::foo(); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.exp index 6974f42fec473..f7b3b56aaafad 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.exp +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.exp @@ -5,7 +5,7 @@ task 1, lines 14-20: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(2), - location: 0x42::test0, + location: 0x43::test0, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -25,7 +25,7 @@ task 3, lines 31-37: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x42::test2, + location: 0x45::test2, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -35,7 +35,7 @@ task 4, lines 39-45: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x42::test3, + location: 0x46::test3, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 5, lines 47-53: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(0), - location: 0x42::test4, + location: 0x47::test4, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -55,7 +55,7 @@ task 6, lines 55-61: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(0), - location: 0x42::test5, + location: 0x48::test5, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -65,7 +65,7 @@ task 7, lines 63-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x42::test37, + location: 0x49::test37, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.move index bd43a4707e20c..c8c856eb5b503 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_aborts.move @@ -12,7 +12,7 @@ module 0x42::m { // All of these should abort //# run -module 0x42::test0 { +module 0x43::test0 { // abort with 2 public fun main(): u64 { if (false) {0x42::m::aborter(1)} else {abort 2} + {abort 3; 0} @@ -20,7 +20,7 @@ module 0x42::test0 { } //# run -module 0x42::test1 { +module 0x44::test1 { // abort with 1 public fun main(): u64 { let x = 1; @@ -29,7 +29,7 @@ module 0x42::test1 { } //# run -module 0x42::test2 { +module 0x45::test2 { // aborts with bad math public fun main(): u8 { abort (1u64 - 10u64) @@ -37,7 +37,7 @@ module 0x42::test2 { } //# run -module 0x42::test3 { +module 0x46::test3 { // aborts with bad math public fun main(): u8 { {250u8 + 50u8} + {abort 55; 5u8} @@ -45,7 +45,7 @@ module 0x42::test3 { } //# run -module 0x42::test4 { +module 0x47::test4 { // aborts with 0 public fun test(): u64 { 0x42::m::add3(abort 0, {abort 14; 0}, 0) @@ -53,7 +53,7 @@ module 0x42::test4 { } //# run -module 0x42::test5 { +module 0x48::test5 { //abort with 0 public fun test(): u64 { (abort 0) + {(abort 14); 0} + 0 @@ -61,7 +61,7 @@ module 0x42::test5 { } //# run -module 0x42::test37 { +module 0x49::test37 { // aborts with bad math public fun main(): u8 { {250u8 + 50u8} + {return 55; 5u8} diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_call_struct.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_call_struct.move index 40376a2853391..849ab7c8829ab 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_call_struct.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_call_struct.move @@ -71,24 +71,24 @@ module 0x42::m { } //# run -module 0x042::test1 { public fun main() { assert!(0x42::m::test1() == 6, 1); } } +module 0x043::test1 { public fun main() { assert!(0x42::m::test1() == 6, 1); } } //# run -module 0x042::test2 { public fun main() { assert!(0x42::m::test2() == 20, 2); } } +module 0x044::test2 { public fun main() { assert!(0x42::m::test2() == 20, 2); } } //# run -module 0x042::test3 { public fun main() { assert!(0x42::m::test3() == 6, 3); } } +module 0x045::test3 { public fun main() { assert!(0x42::m::test3() == 6, 3); } } //# run -module 0x042::test4 { public fun main() { assert!(0x42::m::test4() == 6, 4); } } +module 0x046::test4 { public fun main() { assert!(0x42::m::test4() == 6, 4); } } //# run -module 0x042::test5 { public fun main() { assert!(0x42::m::test5() == 6, 5); } } +module 0x047::test5 { public fun main() { assert!(0x42::m::test5() == 6, 5); } } //# run -module 0x042::test6 { public fun main() { assert!(0x42::m::test6() == 28, 6); } } +module 0x048::test6 { public fun main() { assert!(0x42::m::test6() == 28, 6); } } //# run -module 0x042::test7 { public fun main() { assert!(0x42::m::test7() == 6, 7); } } +module 0x049::test7 { public fun main() { assert!(0x42::m::test7() == 6, 7); } } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_calls.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_calls.move index 4199dba601bc6..b5e8541c65368 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_calls.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/binop_calls.move @@ -234,119 +234,119 @@ module 0x42::m { // Run the tests separately so that we get all the results //# run -module 0x42::test00 { public fun main() { assert!(0x42::m::test00() == 3, 0); } } +module 0x43::test00 { public fun main() { assert!(0x42::m::test00() == 3, 0); } } //# run -module 0x42::test01 { public fun main() { assert!(0x42::m::test01() == true, 1); } } +module 0x44::test01 { public fun main() { assert!(0x42::m::test01() == true, 1); } } //# run -module 0x42::test02 { public fun main() { assert!(0x42::m::test02() == 31, 2); } } +module 0x45::test02 { public fun main() { assert!(0x42::m::test02() == 31, 2); } } //# run -module 0x42::test03 { public fun main() { assert!(0x42::m::test03() == 27, 3); } } +module 0x46::test03 { public fun main() { assert!(0x42::m::test03() == 27, 3); } } //# run -module 0x42::test04 { public fun main() { assert!(0x42::m::test04() == 27, 4); } } +module 0x47::test04 { public fun main() { assert!(0x42::m::test04() == 27, 4); } } //# run -module 0x42::test05 { public fun main() { assert!(0x42::m::test05() == 14, 5); } } +module 0x48::test05 { public fun main() { assert!(0x42::m::test05() == 14, 5); } } //# run -module 0x42::test06 { public fun main() { assert!(0x42::m::test06() == 37, 6); } } +module 0x49::test06 { public fun main() { assert!(0x42::m::test06() == 37, 6); } } //# run -module 0x42::test07 { public fun main() { assert!(0x42::m::test07() == 20, 7); } } +module 0x4a::test07 { public fun main() { assert!(0x42::m::test07() == 20, 7); } } //# run -module 0x42::test08 { public fun main() { assert!(0x42::m::test08() == 962, 8); } } +module 0x4b::test08 { public fun main() { assert!(0x42::m::test08() == 962, 8); } } //# run -module 0x42::test09 { public fun main() { assert!(0x42::m::test09() == 890, 9); } } +module 0x4c::test09 { public fun main() { assert!(0x42::m::test09() == 890, 9); } } //# run -module 0x42::test10 { public fun main() { assert!(0x42::m::test10() == 746, 10); } } +module 0x4d::test10 { public fun main() { assert!(0x42::m::test10() == 746, 10); } } //# run -module 0x42::test11 { public fun main() { assert!(0x42::m::test11() == 1, 11); } } +module 0x4e::test11 { public fun main() { assert!(0x42::m::test11() == 1, 11); } } //# run -module 0x42::test12 { public fun main() { assert!(0x42::m::test12() == 4, 12); } } +module 0x4f::test12 { public fun main() { assert!(0x42::m::test12() == 4, 12); } } //# run -module 0x42::test13 { public fun main() { assert!(0x42::m::test13() == 29, 13); } } +module 0x50::test13 { public fun main() { assert!(0x42::m::test13() == 29, 13); } } //# run -module 0x42::test14 { public fun main() { assert!(0x42::m::test14() == 15, 14); } } +module 0x51::test14 { public fun main() { assert!(0x42::m::test14() == 15, 14); } } //# run -module 0x42::test15 { public fun main() { assert!(0x42::m::test15() == 19, 15); } } +module 0x52::test15 { public fun main() { assert!(0x42::m::test15() == 19, 15); } } //# run -module 0x42::test16 { public fun main() { assert!(0x42::m::test16() == 20, 16); } } +module 0x53::test16 { public fun main() { assert!(0x42::m::test16() == 20, 16); } } //# run -module 0x42::test17 { public fun main() { assert!(0x42::m::test17(54) == 168, 17); } } +module 0x54::test17 { public fun main() { assert!(0x42::m::test17(54) == 168, 17); } } //# run -module 0x42::test18 { public fun main() { assert!(0x42::m::test18(true) == 6, 18); } } +module 0x55::test18 { public fun main() { assert!(0x42::m::test18(true) == 6, 18); } } //# run -module 0x42::test19 { public fun main() { assert!(0x42::m::test18(false) == 6, 18); } } +module 0x56::test19 { public fun main() { assert!(0x42::m::test18(false) == 6, 18); } } //# run -module 0x42::test20 { public fun main() { assert!(0x42::m::test19() == 28, 18); } } +module 0x57::test20 { public fun main() { assert!(0x42::m::test19() == 28, 18); } } //# run -module 0x42::test21 { public fun main() { assert!(0x42::m::test20(true) == true, 20); } } +module 0x58::test21 { public fun main() { assert!(0x42::m::test20(true) == true, 20); } } //# run -module 0x42::test22 { public fun main() { assert!(0x42::m::test20(false) == false, 20); } } +module 0x59::test22 { public fun main() { assert!(0x42::m::test20(false) == false, 20); } } //# run -module 0x42::test23 { public fun main() { assert!(0x42::m::test21() == true, 21); } } +module 0x5a::test23 { public fun main() { assert!(0x42::m::test21() == true, 21); } } //# run -module 0x42::test24 { public fun main() { assert!(0x42::m::test22(3) == vector[3,4,5], 22); } } +module 0x5b::test24 { public fun main() { assert!(0x42::m::test22(3) == vector[3,4,5], 22); } } //# run -module 0x42::test25 { public fun main() { assert!(0x42::m::test23() == 6, 23); } } +module 0x5c::test25 { public fun main() { assert!(0x42::m::test23() == 6, 23); } } //# run -module 0x42::test26 { public fun main() { assert!(0x42::m::test24() == 39, 24); } } +module 0x5d::test26 { public fun main() { assert!(0x42::m::test24() == 39, 24); } } //# run -module 0x42::test27 { public fun main() { assert!(0x42::m::test25() == 99, 25); } } +module 0x5e::test27 { public fun main() { assert!(0x42::m::test25() == 99, 25); } } //# run -module 0x42::test28 { public fun main() { assert!(0x42::m::test26() == 9, 26); } } +module 0x5f::test28 { public fun main() { assert!(0x42::m::test26() == 9, 26); } } //# run -module 0x42::test29 { public fun main() { assert!(0x42::m::test27() == 6, 27); } } +module 0x60::test29 { public fun main() { assert!(0x42::m::test27() == 6, 27); } } //# run -module 0x42::test30 { public fun main() { assert!(0x42::m::test28() == 28, 28); } } +module 0x61::test30 { public fun main() { assert!(0x42::m::test28() == 28, 28); } } //# run -module 0x42::test31 { public fun main() { assert!(0x42::m::test29() == 11, 29); } } +module 0x62::test31 { public fun main() { assert!(0x42::m::test29() == 11, 29); } } //# run -module 0x42::test32 { public fun main() { assert!(0x42::m::test30() == 8, 30); } } +module 0x63::test32 { public fun main() { assert!(0x42::m::test30() == 8, 30); } } //# run -module 0x42::test33 { public fun main() { assert!(0x42::m::test31() == 6, 31); } } +module 0x64::test33 { public fun main() { assert!(0x42::m::test31() == 6, 31); } } //# run -module 0x42::test34 { public fun main() { assert!(0x42::m::test32() == 7, 32); } } +module 0x65::test34 { public fun main() { assert!(0x42::m::test32() == 7, 32); } } //# run -module 0x42::test35 { public fun main() { assert!(0x42::m::test33() == 6, 33); } } +module 0x66::test35 { public fun main() { assert!(0x42::m::test33() == 6, 33); } } //# run -module 0x42::test36 { public fun main() { assert!(0x42::m::test34() == 1, 34); } } +module 0x67::test36 { public fun main() { assert!(0x42::m::test34() == 1, 34); } } //# run -module 0x42::test37 { public fun main() { assert!(0x42::m::test35() == 6, 35); } } +module 0x68::test37 { public fun main() { assert!(0x42::m::test35() == 6, 35); } } //# run -module 0x42::test38 { public fun main() { assert!(0x42::m::test36(1) == 4, 36); } } +module 0x69::test38 { public fun main() { assert!(0x42::m::test36(1) == 4, 36); } } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/macro_calls.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/macro_calls.move index c502215d644b4..31fc4f33bc716 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/macro_calls.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/macro_calls.move @@ -61,7 +61,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; public fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting.move index 2be7d17b17911..48c71c87abf78 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting.move @@ -6,7 +6,7 @@ module 0x42::X { } //# run -module 0x42::m { +module 0x43::m { use 0x42::X; fun main() { let vtrue = true; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.exp index e8d63c87cbba4..7084ceb74b297 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.exp +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.exp @@ -45,7 +45,7 @@ task 5, lines 43-48: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(0), - location: 0x6::m, + location: 0xb::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.move index b916d0197c126..87314184e8dda 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/short_circuiting_invalid.move @@ -8,7 +8,7 @@ module 0x42::X { // all should abort //# run -module 2::m { +module 7::m { use 0x42::X; fun main() { false || X::error(); @@ -17,7 +17,7 @@ fun main() { //# run -module 3::m { +module 8::m { use 0x42::X; fun main() { true && X::error(); @@ -25,7 +25,7 @@ fun main() { } //# run -module 4::m { +module 9::m { use 0x42::X; fun main() { X::error() && false; @@ -33,7 +33,7 @@ fun main() { } //# run -module 5::m { +module 0xa::m { use 0x42::X; fun main() { X::error() || true; @@ -41,7 +41,7 @@ fun main() { } //# run -module 6::m { +module 0xb::m { fun main() { false || { abort 0 }; } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/struct_arguments.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/struct_arguments.move index c4aa1bb37e383..75c320034d127 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/struct_arguments.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/evaluation_order/struct_arguments.move @@ -38,7 +38,7 @@ module 0x42::M { } //# run -module 1::m { +module 6::m { use 0x42::M; fun main() { // arithmetic error @@ -47,7 +47,7 @@ fun main() { } //# run -module 2::m { +module 7::m { use 0x42::M; fun main() { // arithmetic error @@ -56,7 +56,7 @@ fun main() { } //# run -module 3::m { +module 8::m { use 0x42::M; fun main() { // arithmetic error @@ -65,7 +65,7 @@ fun main() { } //# run -module 4::m { +module 9::m { use 0x42::M; fun main() { // arithmetic error @@ -74,7 +74,7 @@ fun main() { } //# run -module 5::m { +module 0xa::m { use 0x42::M; fun main() { // arithmetic error diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.exp index f32b667741b9f..27d53946a009e 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.exp +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.exp @@ -1,11 +1,15 @@ -processed 4 tasks +processed 5 tasks -task 2, line 34: +task 2, line 43: //# run 0x42::m::x1 return values: |0|{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 } -task 3, line 36: +task 3, line 45: //# run 0x42::m::x3 +return values: |1|{ 0 } + +task 4, line 47: +//# run 0x42::m::x4 Error: Function execution failed with VMError: { major_status: VERIFICATION_ERROR, sub_status: None, diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.move index 97813d424a567..121896afebc9d 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/functions/large_enum.move @@ -21,6 +21,11 @@ public enum X3 { U64(u64), } +public enum X4 { + X2(X3, X3, X3), + U64(u64), +} + entry fun x1(): X1 { X1::Big(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0) } @@ -29,8 +34,14 @@ entry fun x3(): X3 { X3::U64(0) } +entry fun x4(): X4 { + X4::U64(0) +} + } //# run 0x42::m::x1 //# run 0x42::m::x3 + +//# run 0x42::m::x4 diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match.move index f2fef14b65254..d43af7f068006 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match.move @@ -21,7 +21,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { use 0x42::m::t0; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_binder.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_binder.move index 43f15561691ab..470d83c800bb3 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_binder.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_binder.move @@ -53,7 +53,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::{a, b, c}; fun main() { assert!(a().t0().is_a(), 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_mut_ref.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_mut_ref.move index def0ff8c56753..e38a388b085cf 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_mut_ref.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_mut_ref.move @@ -53,7 +53,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::{a, b, c}; fun main() { let mut x = 43; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_no_drop.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_no_drop.move index 485d9ae7e0f2c..65d5d5b52ff2f 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_no_drop.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_no_drop.move @@ -19,7 +19,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { use 0x42::m::t0; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_ref.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_ref.move index dab2b9e3fc77b..21bcaa15c32d5 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_ref.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_ref.move @@ -31,7 +31,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::{a, b, c}; fun main() { let x = 43; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_twice.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_twice.move index 5522c27072def..0ea05706e205c 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_twice.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_twice.move @@ -73,7 +73,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::{a, b, c}; fun main() { let a = a().t0(); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_value_wildcard.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_value_wildcard.move index 8dc2bf151ff94..88bdac7923369 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_value_wildcard.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/abc_match_value_wildcard.move @@ -19,7 +19,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::t0; fun main() { assert!(t0() == 0, 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/at_pattern_lit.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/at_pattern_lit.move index 500981296f79e..0e3bbfe5df855 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/at_pattern_lit.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/at_pattern_lit.move @@ -13,7 +13,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t() == 10); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.move new file mode 100644 index 0000000000000..fc8525f8dfec3 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_0.move @@ -0,0 +1,27 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum E { + One { a: u64, b: u64, c: u64 }, + Two { d: u64, e: u64, f: u64 }, + } + + fun test() { + let e_0 = E::One { a: 0, b: 1, c: 2 }; + let x = match (e_0) { + E::One { c: a, b: b, a: c } => a + c * b, + E::Two { .. } => abort 0, + }; + assert!(x == 2); + let e_1 = E::Two { d: 0, e: 1, f: 2 }; + let x = match (e_1) { + E::Two { e: d, f: e, d: f } => d * e + f, + E::One { .. } => abort 0, + }; + assert!(x == 2); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.move new file mode 100644 index 0000000000000..301729babc167 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/awful_naming_1.move @@ -0,0 +1,26 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum E { + One { a: u64, b: u64, c: u64 }, + Two(u64, u64, u64) + } + + fun do(e: E): u64 { + match (e) { + E::One { c: a, b: c, a: b } | E::Two(c, b, a) => a * (b + c) + } + } + + + fun test() { + let e_0 = E::One { a: 0, b: 1, c: 2 }; + assert!(do(e_0) == 2); + let e_1 = E::Two(0, 1, 2); + assert!(do(e_1) == 2); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_1.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_1.move index 4b17f2574a216..f7749cdbd0d15 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_1.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_1.move @@ -23,7 +23,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 2); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_2.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_2.move index e9b41b4a71ae7..2034890d461f8 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_2.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bad_guard_2.move @@ -32,7 +32,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 2); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bind_subject.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bind_subject.move index 1cb8dc09caaa6..e096feec0f80e 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bind_subject.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/bind_subject.move @@ -22,7 +22,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 3); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_at.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_at.move index 12a32f09b92b8..accc670a96450 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_at.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_at.move @@ -18,7 +18,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { assert!(0x42::m::test() == 1, 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_in_guard.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_in_guard.move index 554ee7fcf271c..a6af3009f2381 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_in_guard.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_in_guard.move @@ -18,7 +18,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { assert!(0x42::m::test() == 0, 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_matching.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_matching.move index d490d2de1ec25..83483820e80d2 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_matching.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_matching.move @@ -68,7 +68,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m::{t00, t01, t02, t03, t04, t05}; fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_mut_guard.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_mut_guard.move index 4057beb96619e..cca960b9e7ec8 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_mut_guard.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_mut_guard.move @@ -19,7 +19,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { assert!(0x42::m::test() == 0, 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_pair.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_pair.move index 46aa732480ab0..173ed79d06ca4 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_pair.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_pair.move @@ -21,7 +21,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_result.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_result.move index 469cd42f96a0b..c6d7dd57c6fb1 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_result.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_result.move @@ -63,7 +63,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_self.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_self.move index 76111a7d41e87..7ac6bd13e80db 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_self.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/const_self.move @@ -62,7 +62,7 @@ module 0x42::m { } //#run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/drop_ref.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/drop_ref.move index d1c1a697253a5..62fe202904865 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/drop_ref.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/drop_ref.move @@ -32,7 +32,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { let f = m::f(); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_no_tyargs_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_no_tyargs_match.move index 44e40db02583c..e40275f38cb48 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_no_tyargs_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_no_tyargs_match.move @@ -20,7 +20,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_poly_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_poly_match.move index d47d86cdf3450..f98e4e949dd70 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_poly_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/enum_poly_match.move @@ -22,7 +22,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/exhaustive_struct.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/exhaustive_struct.move index 96a0e62ad73da..6c0d69e514890 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/exhaustive_struct.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/exhaustive_struct.move @@ -47,7 +47,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t00() == 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/fib.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/fib.move index b7aacde4bc4eb..ef565c7cef2c1 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/fib.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/fib.move @@ -14,7 +14,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { use 0x42::m::fib; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/go_repro.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/go_repro.exp new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard.move index ea5349c80f7d0..6b2d2ee80b4c3 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard.move @@ -21,7 +21,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 3); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack.move index 07f0d18bee2ee..4737f7ffd8a7e 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack.move @@ -24,7 +24,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t2() == 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_2.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_2.move index 90448c248a8b6..302911bbe20e0 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_2.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_2.move @@ -24,7 +24,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_3.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_3.move index 9cd461c9974a7..b6f5b0dac97ae 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_3.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_3.move @@ -31,7 +31,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_4.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_4.move index c296a00a77e7d..863d70f143ca7 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_4.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_4.move @@ -40,7 +40,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 1); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_5.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_5.move index 638584f64a292..89e33137e7d44 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_5.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/guard_backtrack_5.move @@ -26,7 +26,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 6); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.move new file mode 100644 index 0000000000000..c93dc36859e70 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_0.move @@ -0,0 +1,35 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum E has drop { + A { a: vector }, + B { b: vector } + } + + #[syntax(index)] + fun e_index(e: &E, ndx: u64): &u64 { + match (e) { + E::A { a } => &a[ndx], + E::B { b } => &b[ndx], + } + } + + fun test() { + let e = E::A { a: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + let e = E::B { b: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.move new file mode 100644 index 0000000000000..3943c13738eeb --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_1.move @@ -0,0 +1,34 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum E has drop { + A { a: vector }, + B { b: vector } + } + + #[syntax(index)] + fun e_index(e: &E, ndx: u64): &u64 { + match (e) { + E::A { a } | E::B { b: a } => &a[ndx], + } + } + + fun test() { + let e = E::A { a: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + let e = E::B { b: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.move new file mode 100644 index 0000000000000..965a2a1627f76 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/index_syntax_2.move @@ -0,0 +1,39 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum E has drop { + A { a: vector }, + B { b: vector } + } + + fun get_vec(e: &E): &vector { + match (e) { + E::A { a } => a, + E::B { b } => b, + } + } + + #[syntax(index)] + fun e_index(e: &E, ndx: u64): &u64 { + &e.get_vec()[ndx] + } + + fun test() { + let e = E::A { a: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + let e = E::B { b: vector[0,1,2,3,4] }; + assert!(e[0] == 0); + assert!(e[1] == 1); + assert!(e[2] == 2); + assert!(e[3] == 3); + assert!(e[4] == 4); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_match.move index 010bc3ddba90d..2b7dd140c8189 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_match.move @@ -13,7 +13,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::fib(5) == 8); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_mut_borrow_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_mut_borrow_match.move index 8d19030758263..0008dd9762b74 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_mut_borrow_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/lit_mut_borrow_match.move @@ -13,7 +13,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { let mut n = 5; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/macro_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/macro_match.move index 85cb1c9dd6a94..aa90f9b91f0e4 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/macro_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/macro_match.move @@ -24,7 +24,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { m::maybe_macro_call_2(); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.move new file mode 100644 index 0000000000000..340b1798822e1 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_lit_mut_bind.move @@ -0,0 +1,26 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + public fun test() { + let x = 10; + let y = match (x) { + 0 => 10, + mut x => { + x = x + 10; + x + } + }; + assert!(y == 20); + let y = match (x) { + 0 => 10, + mut y => { + y = y + 10; + y + } + }; + assert!(y == 20); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_mut_lit.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_mut_lit.move index de1606604a6e4..1fac5f2ead3e5 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_mut_lit.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_mut_lit.move @@ -13,7 +13,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { let mut n = 10; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_type.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_type.move index 4cb21aca404ed..8f7169f5c8a46 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_type.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/match_type.move @@ -24,7 +24,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/multi_or.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/multi_or.move index 95014baafc814..3db53129d4142 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/multi_or.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/multi_or.move @@ -31,7 +31,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t0() == 0); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.move new file mode 100644 index 0000000000000..c4b8f503cacff --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/mut_field_alias.move @@ -0,0 +1,22 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + public enum E { + X { x: u64 }, + Y { y: u64 } + } + + public fun test() { + let e = E::Y { y: 1 }; + let value = match (e) { + E::X { mut x } | E::Y { y: mut x } => { + x = x + 1; + x + } + }; + assert!(value == 2); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/nested_at.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/nested_at.move index dd83a34345491..f34efb5ecefef 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/nested_at.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/nested_at.move @@ -12,7 +12,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { assert!(m::t() == 20); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.move new file mode 100644 index 0000000000000..2b7935cf74791 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_0.move @@ -0,0 +1,56 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum Action has drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), + } + + public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(40), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop, + Action::ChangeSpeed(40), + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = &actions[i]; + + match (action) { + Action::MoveTo { x, y } => { + 'loop_label: loop { + total_moves = total_moves + *x + *y; + break 'loop_label + }; + }, + Action::ChangeSpeed(speed) => { + 'loop_label: loop { + total_moves = total_moves + *speed; + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + }; + i = i + 1; + }; + }; + + actions.destroy!(|_| {}); + + assert!(total_moves == 100); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.move new file mode 100644 index 0000000000000..76874f413e91c --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_1.move @@ -0,0 +1,50 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum Action has drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), + } + + public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(20), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop, + Action::ChangeSpeed(40), + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = &actions[i]; + + match (action) { + Action::MoveTo { x: speed, y: _ } | Action::ChangeSpeed(speed) => { + 'loop_label: loop { + total_moves = total_moves + *speed; + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + }; + i = i + 1; + }; + }; + + actions.destroy!(|_| {}); + + assert!(total_moves == 40); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.move new file mode 100644 index 0000000000000..8088c6de28c60 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_2.move @@ -0,0 +1,58 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum Action has copy, drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), + } + + public fun speed(action: &Action): u64 { + match (action) { + Action::MoveTo { x: speed, y: _ } => *speed, + Action::ChangeSpeed(speed) => *speed, + Action::Stop => abort 0, + } + } + + public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(20), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop, + Action::ChangeSpeed(40), + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = actions[i]; + + match (action) { + action @ Action::MoveTo { .. } | action @ Action::ChangeSpeed(_) => { + 'loop_label: loop { + total_moves = total_moves + action.speed(); + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + }; + i = i + 1; + }; + }; + + actions.destroy!(|_| {}); + + assert!(total_moves == 40); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.exp new file mode 100644 index 0000000000000..fc5a4436b29d4 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.exp @@ -0,0 +1 @@ +processed 3 tasks diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.move new file mode 100644 index 0000000000000..95e1eaef76511 --- /dev/null +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/rhs_shadow_loop_label_3.move @@ -0,0 +1,58 @@ +//# init --edition 2024.beta + +//# publish +module 0x42::m { + + public enum Action has copy, drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), + } + + public fun speed(action: &Action): u64 { + match (action) { + Action::MoveTo { x: speed, y: _ } => *speed, + Action::ChangeSpeed(speed) => *speed, + Action::Stop => abort 0, + } + } + + public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(20), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop, + Action::ChangeSpeed(40), + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = actions[i]; + + match (action) { + action @ (Action::MoveTo { x: _, y: _ } | Action::ChangeSpeed(..)) => { + 'loop_label: loop { + total_moves = total_moves + action.speed(); + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + }; + i = i + 1; + }; + }; + + actions.destroy!(|_| {}); + + assert!(total_moves == 40); + } +} + +//# run 0x42::m::test diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match.move index 6c21f625acc69..d972ca775df86 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match.move @@ -291,7 +291,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { use 0x42::m::{make_nbase, make_pbase, make_npoly, make_ppoly, make_nempty, make_pempty}; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match_mut.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match_mut.move index 8a350506dd518..43ee6ad0d24d1 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match_mut.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/matching/struct_match_mut.move @@ -102,7 +102,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { use 0x42::m::{make_nbase, make_pbase, make_npoly, make_ppoly}; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.exp index 30105f51054fa..e4b8e8b7a361e 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.exp +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.exp @@ -5,7 +5,7 @@ task 1, lines 17-23: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 25-31: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x4::m, + location: 0x9::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 48-54: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x6::m, + location: 0x11::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -35,7 +35,7 @@ task 5, lines 56-62: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0x12::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 79-85: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x9::m, + location: 0x14::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 10, lines 112-119: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0xc::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 11, lines 121-126: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0xd::m, + location: 0x18::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 14, lines 152-158: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0xf::m, + location: 0x20::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 15, lines 160-166: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x21::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 16, lines 168-174: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x22::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.move index 8bff88c4aea32..be36ed036aa38 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/arithmetic_operators_u32.move @@ -1,5 +1,5 @@ //# run -module 1::m { +module 6::m { fun main() { assert!(0u32 + 0u32 == 0u32, 1000); assert!(0u32 + 1u32 == 1u32, 1001); @@ -15,7 +15,7 @@ fun main() { } //# run -module 2::m { +module 7::m { fun main() { // should fail 1u32 + 4294967295u32; @@ -23,7 +23,7 @@ fun main() { } //# run -module 4::m { +module 9::m { fun main() { // should fail 4294967295u32 + 4294967295u32; @@ -31,7 +31,7 @@ fun main() { } //# run -module 5::m { +module 10::m { fun main() { assert!(0u32 - 0u32 == 0u32, 2000); assert!(1u32 - 0u32 == 1u32, 2001); @@ -46,7 +46,7 @@ fun main() { } //# run -module 6::m { +module 0x11::m { fun main() { // should fail 0u32 - 1u32; @@ -54,7 +54,7 @@ fun main() { } //# run -module 7::m { +module 0x12::m { fun main() { // should fail 54u32 - 100u32; @@ -63,7 +63,7 @@ fun main() { //# run -module 8::m { +module 0x13::m { fun main() { assert!(0u32 * 0u32 == 0u32, 3000); assert!(1u32 * 0u32 == 0u32, 3001); @@ -77,7 +77,7 @@ fun main() { } //# run -module 9::m { +module 0x14::m { fun main() { // should fail 1147483647u32 * 2147483647u32; @@ -85,7 +85,7 @@ fun main() { } //# run -module 10::m { +module 0x15::m { fun main() { // should fail 1147483647u32 * 2u32; @@ -95,7 +95,7 @@ fun main() { //# run -module 11::m { +module 0x16::m { fun main() { assert!(0u32 / 1u32 == 0u32, 4000); assert!(1u32 / 1u32 == 1u32, 4001); @@ -110,7 +110,7 @@ fun main() { } //# run -module 12::m { +module 0x17::m { fun main() { // should fail 0u32 / 0u32; @@ -119,7 +119,7 @@ fun main() { // check: ARITHMETIC_ERROR //# run -module 13::m { +module 0x18::m { fun main() { 1u32 / 0u32; } @@ -135,7 +135,7 @@ fun main() { //# run -module 14::m { +module 0x19::m { fun main() { assert!(0u32 % 1u32 == 0u32, 5000); assert!(1u32 % 1u32 == 0u32, 5001); @@ -150,7 +150,7 @@ fun main() { } //# run -module 15::m { +module 0x20::m { fun main() { // should fail 0u32 % 0u32; @@ -158,7 +158,7 @@ fun main() { } //# run -module 16::m { +module 0x21::m { fun main() { // should fail 1u32 % 0u32; @@ -166,7 +166,7 @@ fun main() { } //# run -module 17::m { +module 0x22::m { fun main() { // should fail 4294967294u32 % 0u32; diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/bitwise_operators.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/bitwise_operators.move index 7382f201b1daa..683d364e30813 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/bitwise_operators.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/bitwise_operators.move @@ -1,5 +1,5 @@ //# run -module 1::m { +module 6::m { fun main() { assert!(0u8 & 0u8 == 0u8, 1000); assert!(0u64 & 0u64 == 0u64, 1001); @@ -100,7 +100,7 @@ fun main() { //# run -module 2::m { +module 7::m { fun main() { assert!(0u8 ^ 0u8 == 0u8, 3000); assert!(0u64 ^ 0u64 == 0u64, 3001); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/eq_refs.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/eq_refs.move index 2636e3aa71043..e8b967861f3f0 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/eq_refs.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/eq_refs.move @@ -172,7 +172,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { fun main() { let s_val = 0x42::m::make_s(42); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.exp b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.exp index 9142a02ea9bb7..7bc92a9d98f5f 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.exp +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.exp @@ -5,7 +5,7 @@ task 0, lines 1-8: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x1::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 1, lines 10-16: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 2, lines 18-24: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -35,7 +35,7 @@ task 3, lines 26-32: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x4::m, + location: 0x9::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 4, lines 34-40: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 5, lines 42-48: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x6::m, + location: 0xb::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 6, lines 50-56: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 7, lines 58-64: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 8, lines 66-72: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x9::m, + location: 0xe::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 9, lines 74-83: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0xa::m, + location: 0xf::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.move index cc77e580ae2ff..669fbb1b4ff19 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/operators/shift_operators.move @@ -1,6 +1,6 @@ //# run // Number of bits shifted >= total number of bits in the number. -module 1::m { +module 6::m { fun main() { // should fail 0u8 << 8u8; @@ -8,7 +8,7 @@ fun main() { } //# run -module 2::m { +module 7::m { fun main() { // should fail 0u64 << 64u8; @@ -16,7 +16,7 @@ fun main() { } //# run -module 3::m { +module 8::m { fun main() { // should fail 0u128 << 128u8; @@ -24,7 +24,7 @@ fun main() { } //# run -module 4::m { +module 9::m { fun main() { // should fail 0u8 >> 8u8; @@ -32,7 +32,7 @@ fun main() { } //# run -module 5::m { +module 0xa::m { fun main() { // should fail 0u64 >> 64u8; @@ -40,7 +40,7 @@ fun main() { } //# run -module 6::m { +module 0xb::m { fun main() { // should fail 0u128 >> 128u8; @@ -48,7 +48,7 @@ fun main() { } //# run -module 7::m { +module 0xc::m { fun main() { // should fail 0u16 << 16u8; @@ -56,7 +56,7 @@ fun main() { } //# run -module 8::m { +module 0xd::m { fun main() { // should fail 0u32 << 32u8; @@ -64,7 +64,7 @@ fun main() { } //# run -module 9::m { +module 0xe::m { fun main() { // should fail 0u16 >> 16u8; @@ -72,7 +72,7 @@ fun main() { } //# run -module 10::m { +module 0xf::m { fun main() { // should fail 0u32 >> 32u8; @@ -82,7 +82,7 @@ fun main() { // Shifting 0 results in 0. //# run -module 11::m { +module 0x10::m { fun main() { assert!(0u8 << 4u8 == 0u8, 1000); assert!(0u64 << 1u8 == 0u64, 1001); @@ -103,7 +103,7 @@ fun main() { // Shifting by 0 bits results in the same number. //# run -module 12::m { +module 0x11::m { fun main() { assert!(100u8 << 0u8 == 100u8, 2000); assert!(43u64 << 0u8 == 43u64, 2001); @@ -146,7 +146,7 @@ fun main() { // Underflowing results in 0. //# run -module 13::m { +module 0x13::m { fun main() { assert!(1234u64 >> 63u8 == 0u64, 4000); assert!(3u8 >> 5u8 == 0u8, 4001); @@ -161,7 +161,7 @@ fun main() { // Overflowing results are truncated. //# run -module 14::m { +module 0x14::m { fun main() { assert!(7u8 << 7u8 == 128u8, 5000); assert!(7u64 << 62u8 == 13835058055282163712u64, 5001); @@ -176,7 +176,7 @@ fun main() { // Some random tests. //# run -module 15::m { +module 0x15::m { fun main() { assert!(54u8 << 3u8 == 176u8, 6000); assert!(5u8 << 2u8 == 20u8, 6001); diff --git a/external-crates/move/crates/move-compiler-transactional-tests/tests/syntax/index_syntax.move b/external-crates/move/crates/move-compiler-transactional-tests/tests/syntax/index_syntax.move index 74fe6cd5c3207..e114bd460ce85 100644 --- a/external-crates/move/crates/move-compiler-transactional-tests/tests/syntax/index_syntax.move +++ b/external-crates/move/crates/move-compiler-transactional-tests/tests/syntax/index_syntax.move @@ -38,7 +38,7 @@ module 0x42::m { } //# run -module 0x42::main { +module 0x43::main { use 0x42::m; fun main() { diff --git a/external-crates/move/crates/move-compiler/src/cfgir/visitor.rs b/external-crates/move/crates/move-compiler/src/cfgir/visitor.rs index 8e698014173dc..0f48ad41568b1 100644 --- a/external-crates/move/crates/move-compiler/src/cfgir/visitor.rs +++ b/external-crates/move/crates/move-compiler/src/cfgir/visitor.rs @@ -51,7 +51,7 @@ pub trait AbstractInterpreterVisitor { } //************************************************************************************************** -// simple ast visitor +// CFGIR visitor //************************************************************************************************** pub trait CFGIRVisitorConstructor { @@ -768,3 +768,76 @@ impl AbstractInterpreterVisitor for V { SimpleAbsIntConstructor::verify(self, env, context, cfg) } } + +//************************************************************************************************** +// utils +//************************************************************************************************** + +pub fn calls_special_function(special: &[(&str, &str, &str)], cfg: &ImmForwardCFG) -> bool { + cfg.blocks().values().any(|block| { + block + .iter() + .any(|cmd| calls_special_function_command(special, cmd)) + }) +} + +pub fn calls_special_function_command( + special: &[(&str, &str, &str)], + sp!(_, cmd_): &Command, +) -> bool { + use H::Command_ as C; + match cmd_ { + C::Assign(_, _, e) + | C::Abort(_, e) + | C::Return { exp: e, .. } + | C::IgnoreAndPop { exp: e, .. } + | C::JumpIf { cond: e, .. } + | C::VariantSwitch { subject: e, .. } => calls_special_function_exp(special, e), + C::Mutate(el, er) => { + calls_special_function_exp(special, el) || calls_special_function_exp(special, er) + } + C::Jump { .. } => false, + C::Break(_) | C::Continue(_) => panic!("ICE break/continue not translated to jumps"), + } +} + +#[growing_stack] +pub fn calls_special_function_exp(special: &[(&str, &str, &str)], e: &Exp) -> bool { + use H::UnannotatedExp_ as E; + match &e.exp.value { + E::Unit { .. } + | E::Move { .. } + | E::Copy { .. } + | E::Constant(_) + | E::ErrorConstant { .. } + | E::BorrowLocal(_, _) + | E::Unreachable + | E::UnresolvedError + | E::Value(_) => false, + + E::Freeze(e) + | E::Dereference(e) + | E::UnaryExp(_, e) + | E::Borrow(_, e, _, _) + | E::Cast(e, _) => calls_special_function_exp(special, e), + + E::BinopExp(el, _, er) => { + calls_special_function_exp(special, el) || calls_special_function_exp(special, er) + } + + E::ModuleCall(call) => { + special.iter().any(|(a, m, f)| call.is(*a, *m, *f)) + || call + .arguments + .iter() + .any(|arg| calls_special_function_exp(special, arg)) + } + E::Vector(_, _, _, es) | E::Multiple(es) => { + es.iter().any(|e| calls_special_function_exp(special, e)) + } + + E::Pack(_, _, es) | E::PackVariant(_, _, _, es) => es + .iter() + .any(|(_, _, e)| calls_special_function_exp(special, e)), + } +} diff --git a/external-crates/move/crates/move-compiler/src/expansion/ast.rs b/external-crates/move/crates/move-compiler/src/expansion/ast.rs index 4b01284de103a..2b804dab8a395 100644 --- a/external-crates/move/crates/move-compiler/src/expansion/ast.rs +++ b/external-crates/move/crates/move-compiler/src/expansion/ast.rs @@ -937,32 +937,6 @@ impl fmt::Display for Visibility { } } -impl fmt::Display for Type_ { - fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result { - use Type_::*; - match self { - UnresolvedError => write!(f, "_"), - Apply(n, tys) => { - write!(f, "{}", n)?; - if !tys.is_empty() { - write!(f, "<")?; - write!(f, "{}", format_comma(tys))?; - write!(f, ">")?; - } - Ok(()) - } - Ref(mut_, ty) => write!(f, "&{}{}", if *mut_ { "mut " } else { "" }, ty), - Fun(args, result) => write!(f, "({}):{}", format_comma(args), result), - Unit => write!(f, "()"), - Multiple(tys) => { - write!(f, "(")?; - write!(f, "{}", format_comma(tys))?; - write!(f, ")") - } - } - } -} - impl std::fmt::Display for Value_ { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use Value_ as V; @@ -976,7 +950,17 @@ impl std::fmt::Display for Value_ { V::U128(u) => write!(f, "{}", u), V::U256(u) => write!(f, "{}", u), V::Bool(b) => write!(f, "{}", b), - V::Bytearray(v) => write!(f, "{:?}", v), // Good enough? + // TODO preserve the user's original string + V::Bytearray(v) => { + write!(f, "vector[")?; + for (idx, byte) in v.iter().enumerate() { + if idx != 0 { + write!(f, ", ")?; + } + write!(f, "{}", byte)?; + } + write!(f, "]") + } } } } diff --git a/external-crates/move/crates/move-compiler/src/hlir/translate.rs b/external-crates/move/crates/move-compiler/src/hlir/translate.rs index e06d45b0b26ce..34cec72077020 100644 --- a/external-crates/move/crates/move-compiler/src/hlir/translate.rs +++ b/external-crates/move/crates/move-compiler/src/hlir/translate.rs @@ -203,17 +203,15 @@ impl<'env> Context<'env> { } } - pub fn record_named_block_binders( + pub fn enter_named_block( &mut self, block_name: H::BlockLabel, binders: Vec, + ty: H::Type, ) { self.named_block_binders .add(block_name, binders) .expect("ICE reused block name"); - } - - pub fn record_named_block_type(&mut self, block_name: H::BlockLabel, ty: H::Type) { self.named_block_types .add(block_name, ty) .expect("ICE reused named block name"); @@ -226,6 +224,15 @@ impl<'env> Context<'env> { .clone() } + pub fn exit_named_block(&mut self, block_name: H::BlockLabel) { + self.named_block_binders + .remove(&block_name) + .expect("Tried to leave an unnkown block"); + self.named_block_types + .remove(&block_name) + .expect("Tried to leave an unnkown block"); + } + pub fn lookup_named_block_type(&mut self, block_name: &H::BlockLabel) -> Option { self.named_block_types.get(block_name).cloned() } @@ -904,8 +911,7 @@ fn tail( } else { maybe_freeze(context, block, expected_type.cloned(), bound_exp) }; - context.record_named_block_binders(name, binders); - context.record_named_block_type(name, out_type.clone()); + context.enter_named_block(name, binders, out_type.clone()); let (loop_body, has_break) = process_loop_body(context, &name, *body); block.push_back(sp( eloc, @@ -915,6 +921,7 @@ fn tail( block: loop_body, }, )); + context.exit_named_block(name); if has_break { Some(result) } else { @@ -936,11 +943,10 @@ fn tail( } else { maybe_freeze(context, block, expected_type.cloned(), bound_exp) }; - context.record_named_block_binders(name, binders.clone()); - context.record_named_block_type(name, out_type.clone()); + context.enter_named_block(name, binders.clone(), out_type.clone()); let mut body_block = make_block!(); let final_exp = tail_block(context, &mut body_block, Some(&out_type), seq); - final_exp.map(|exp| { + let result = final_exp.map(|exp| { bind_value_in_block(context, binders, Some(out_type), &mut body_block, exp); block.push_back(sp( eloc, @@ -950,7 +956,9 @@ fn tail( }, )); result - }) + }); + context.exit_named_block(name); + result } E::Block((_, seq)) => tail_block(context, block, expected_type, seq), @@ -1234,8 +1242,7 @@ fn value( } => { let name = translate_block_label(name); let (binders, bound_exp) = make_binders(context, eloc, out_type.clone()); - context.record_named_block_binders(name, binders); - context.record_named_block_type(name, out_type.clone()); + context.enter_named_block(name, binders, out_type.clone()); let (loop_body, has_break) = process_loop_body(context, &name, *body); block.push_back(sp( eloc, @@ -1245,11 +1252,13 @@ fn value( block: loop_body, }, )); - if has_break { + let result = if has_break { bound_exp } else { make_exp(HE::Unreachable) - } + }; + context.exit_named_block(name); + result } e_ @ E::Loop { .. } => { statement(context, block, T::exp(in_type.clone(), sp(eloc, e_))); @@ -1258,8 +1267,7 @@ fn value( E::NamedBlock(name, (_, seq)) => { let name = translate_block_label(name); let (binders, bound_exp) = make_binders(context, eloc, out_type.clone()); - context.record_named_block_binders(name, binders.clone()); - context.record_named_block_type(name, out_type.clone()); + context.enter_named_block(name, binders.clone(), out_type.clone()); let mut body_block = make_block!(); let final_exp = value_block(context, &mut body_block, Some(&out_type), eloc, seq); bind_value_in_block(context, binders, Some(out_type), &mut body_block, final_exp); @@ -1270,6 +1278,7 @@ fn value( block: body_block, }, )); + context.exit_named_block(name); bound_exp } E::Block((_, seq)) => value_block(context, block, Some(&out_type), eloc, seq), @@ -1851,8 +1860,7 @@ fn statement(context: &mut Context, block: &mut Block, e: T::Exp) { let cond = (cond_block, Box::new(cond_exp)); let name = translate_block_label(name); // While loops can still use break and continue so we build them dummy binders. - context.record_named_block_binders(name, vec![]); - context.record_named_block_type(name, tunit(eloc)); + context.enter_named_block(name, vec![], tunit(eloc)); let mut body_block = make_block!(); statement(context, &mut body_block, *body); block.push_back(sp( @@ -1863,13 +1871,13 @@ fn statement(context: &mut Context, block: &mut Block, e: T::Exp) { block: body_block, }, )); + context.exit_named_block(name); } E::Loop { name, body, .. } => { let name = translate_block_label(name); let out_type = type_(context, ty.clone()); let (binders, bound_exp) = make_binders(context, eloc, out_type.clone()); - context.record_named_block_binders(name, binders); - context.record_named_block_type(name, out_type); + context.enter_named_block(name, binders, out_type); let (loop_body, has_break) = process_loop_body(context, &name, *body); block.push_back(sp( eloc, @@ -1882,6 +1890,7 @@ fn statement(context: &mut Context, block: &mut Block, e: T::Exp) { if has_break { make_ignore_and_pop(block, bound_exp); } + context.exit_named_block(name); } E::Block((_, seq)) => statement_block(context, block, seq), E::Return(rhs) => { diff --git a/external-crates/move/crates/move-compiler/src/linters/loop_without_exit.rs b/external-crates/move/crates/move-compiler/src/linters/loop_without_exit.rs new file mode 100644 index 0000000000000..a3466328323ed --- /dev/null +++ b/external-crates/move/crates/move-compiler/src/linters/loop_without_exit.rs @@ -0,0 +1,67 @@ +//! Detects empty loop expressions, including `while(true) {}` and `loop {}` without exit mechanisms, highlighting potential infinite loops. +//! Aims to identify and warn against loops that may lead to hangs or excessive resource consumption due to lack of content. +//! Encourages adding meaningful logic within loops or ensuring proper exit conditions to improve code reliability and maintainability. +use super::StyleCodes; +use crate::{ + diag, + diagnostics::WarningFilters, + shared::CompilationEnv, + typing::{ + ast::{self as T, UnannotatedExp_}, + visitor::{has_special_exp, TypingVisitorConstructor, TypingVisitorContext}, + }, +}; + +pub struct LoopWithoutExit; + +pub struct Context<'a> { + env: &'a mut CompilationEnv, +} + +impl TypingVisitorConstructor for LoopWithoutExit { + type Context<'a> = Context<'a>; + + fn context<'a>(env: &'a mut CompilationEnv, _program: &T::Program) -> Self::Context<'a> { + Context { env } + } +} + +impl TypingVisitorContext for Context<'_> { + fn add_warning_filter_scope(&mut self, filter: WarningFilters) { + self.env.add_warning_filter_scope(filter) + } + fn pop_warning_filter_scope(&mut self) { + self.env.pop_warning_filter_scope() + } + + fn visit_exp_custom(&mut self, exp: &T::Exp) -> bool { + // we do not care about `while` since there is another lint that handles reporting + // that `while (true)` should be `loop` + let UnannotatedExp_::Loop { + name: _, + has_break: false, + body, + } = &exp.exp.value + else { + return false; + }; + // TODO maybe move this to Loop? Bit of an n^2 problem here in the worst case + if has_return(body) { + return false; + } + let diag = diag!( + StyleCodes::LoopWithoutExit.diag_info(), + ( + exp.exp.loc, + "'loop' without 'break' or 'return'. \ + This code will until it errors, e.g. reaching an 'abort' or running out of gas" + ) + ); + self.env.add_diag(diag); + false + } +} + +fn has_return(e: &T::Exp) -> bool { + has_special_exp(e, |e| matches!(e.exp.value, UnannotatedExp_::Return(_))) +} diff --git a/external-crates/move/crates/move-compiler/src/linters/mod.rs b/external-crates/move/crates/move-compiler/src/linters/mod.rs index b5673095ddfa5..67e97f7c0d5bb 100644 --- a/external-crates/move/crates/move-compiler/src/linters/mod.rs +++ b/external-crates/move/crates/move-compiler/src/linters/mod.rs @@ -13,7 +13,9 @@ use crate::{ pub mod abort_constant; pub mod constant_naming; +pub mod loop_without_exit; pub mod meaningless_math_operation; +pub mod unnecessary_conditional; pub mod unnecessary_while_loop; pub mod unneeded_return; @@ -103,7 +105,7 @@ lints!( ), ( WhileTrueToLoop, - LinterDiagnosticCategory::Complexity, + LinterDiagnosticCategory::Style, "while_true", "unnecessary 'while (true)', replace with 'loop'" ), @@ -125,6 +127,18 @@ lints!( "abort_without_constant", "'abort' or 'assert' without named constant" ), + ( + LoopWithoutExit, + LinterDiagnosticCategory::Suspicious, + "loop_without_exit", + "'loop' without 'break' or 'return'" + ), + ( + UnnecessaryConditional, + LinterDiagnosticCategory::Complexity, + "unnecessary_conditional", + "'if' expression can be removed" + ) ); pub const ALLOW_ATTR_CATEGORY: &str = "lint"; @@ -157,6 +171,8 @@ pub fn linter_visitors(level: LintLevel) -> Vec { meaningless_math_operation::MeaninglessMathOperation.visitor(), unneeded_return::UnneededReturnVisitor.visitor(), abort_constant::AssertAbortNamedConstants.visitor(), + loop_without_exit::LoopWithoutExit.visitor(), + unnecessary_conditional::UnnecessaryConditional.visitor(), ] } } diff --git a/external-crates/move/crates/move-compiler/src/linters/unnecessary_conditional.rs b/external-crates/move/crates/move-compiler/src/linters/unnecessary_conditional.rs new file mode 100644 index 0000000000000..490b8eb57f33c --- /dev/null +++ b/external-crates/move/crates/move-compiler/src/linters/unnecessary_conditional.rs @@ -0,0 +1,118 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +//! Detects and suggests simplification for `if (c) e1 else e2` can be removed +use move_proc_macros::growing_stack; + +use crate::expansion::ast::Value; +use crate::linters::StyleCodes; +use crate::{ + diag, + diagnostics::WarningFilters, + expansion::ast::Value_, + shared::CompilationEnv, + typing::{ + ast::{self as T, SequenceItem_, UnannotatedExp_}, + visitor::{TypingVisitorConstructor, TypingVisitorContext}, + }, +}; + +pub struct UnnecessaryConditional; + +pub struct Context<'a> { + env: &'a mut CompilationEnv, +} + +impl TypingVisitorConstructor for UnnecessaryConditional { + type Context<'a> = Context<'a>; + + fn context<'a>(env: &'a mut CompilationEnv, _program: &T::Program) -> Self::Context<'a> { + Context { env } + } +} + +impl TypingVisitorContext for Context<'_> { + fn add_warning_filter_scope(&mut self, filter: WarningFilters) { + self.env.add_warning_filter_scope(filter) + } + fn pop_warning_filter_scope(&mut self) { + self.env.pop_warning_filter_scope() + } + + fn visit_exp_custom(&mut self, exp: &T::Exp) -> bool { + let UnannotatedExp_::IfElse(_, etrue, efalse) = &exp.exp.value else { + return false; + }; + let Some(vtrue) = extract_value(etrue) else { + return false; + }; + let Some(vfalse) = extract_value(efalse) else { + return false; + }; + + match (&vtrue.value, &vfalse.value) { + (Value_::Bool(v1 @ true), Value_::Bool(false)) + | (Value_::Bool(v1 @ false), Value_::Bool(true)) => { + let negation = if *v1 { "" } else { "!" }; + let msg = format!( + "Detected an unnecessary conditional expression 'if (cond)'. Consider using \ + the condition directly, i.e. '{negation}cond'", + ); + self.env.add_diag(diag!( + StyleCodes::UnnecessaryConditional.diag_info(), + (exp.exp.loc, msg) + )); + } + (v1, v2) if v1 == v2 => { + let msg = + "Detected a redundant conditional expression 'if (..) v else v', where each \ + branch results in the same value 'v'. Consider using the value directly"; + self.env.add_diag(diag!( + StyleCodes::UnnecessaryConditional.diag_info(), + (exp.exp.loc, msg), + (vtrue.loc, "This value"), + (vfalse.loc, "is the same as this value"), + )); + } + _ => (), + } + + // if let (Some(if_bool), Some(else_bool)) = ( + // extract_bool_literal_from_block(if_block), + // extract_bool_literal_from_block(else_block), + // ) { + // if if_bool != else_bool { + // let msg = format!( + // "Detected a redundant conditional expression `if (...) {} else {}`. Consider using the condition directly.", + // if_bool, else_bool + // ); + // let diag = diag!( + // StyleCodes::UnnecessaryConditional.diag_info(), + // (exp.exp.loc, msg) + // ); + + // self.env.add_diag(diag); + // } + // } + // } + false + } +} + +#[growing_stack] +fn extract_value(block: &T::Exp) -> Option<&Value> { + match &block.exp.value { + UnannotatedExp_::Block((_, seq)) if seq.len() == 1 => extract_value_seq_item(&seq[0]), + UnannotatedExp_::Value(v) => Some(v), + UnannotatedExp_::Annotate(e, _) => extract_value(e), + _ => None, + } +} + +#[growing_stack] +fn extract_value_seq_item(sp!(_, item_): &T::SequenceItem) -> Option<&Value> { + match &item_ { + SequenceItem_::Declare(_) | SequenceItem_::Bind(_, _, _) => None, + SequenceItem_::Seq(e) => extract_value(e), + } +} diff --git a/external-crates/move/crates/move-compiler/src/shared/program_info.rs b/external-crates/move/crates/move-compiler/src/shared/program_info.rs index 2ceadbb461cce..1c2f2ce588fc7 100644 --- a/external-crates/move/crates/move-compiler/src/shared/program_info.rs +++ b/external-crates/move/crates/move-compiler/src/shared/program_info.rs @@ -3,9 +3,7 @@ use std::{collections::BTreeMap, fmt::Display, sync::Arc}; -use move_ir_types::location::Loc; -use move_symbol_pool::Symbol; - +use self::known_attributes::AttributePosition; use crate::{ expansion::ast::{AbilitySet, Attributes, ModuleIdent, TargetKind, Visibility}, naming::ast::{ @@ -15,11 +13,12 @@ use crate::{ parser::ast::{ConstantName, DatatypeName, Field, FunctionName, VariantName}, shared::unique_map::UniqueMap, shared::*, + sui_mode::info::SuiInfo, typing::ast::{self as T}, FullyCompiledProgram, }; - -use self::known_attributes::AttributePosition; +use move_ir_types::location::Loc; +use move_symbol_pool::Symbol; #[derive(Debug, Clone)] pub struct FunctionInfo { @@ -52,6 +51,14 @@ pub struct ModuleInfo { pub constants: UniqueMap, } +#[derive(Debug, Clone)] +pub struct ProgramInfo { + pub modules: UniqueMap, + pub sui_flavor_info: Option, +} +pub type NamingProgramInfo = ProgramInfo; +pub type TypingProgramInfo = ProgramInfo; + #[derive(Debug, Clone, PartialEq, Eq)] pub enum DatatypeKind { Struct, @@ -66,13 +73,6 @@ pub enum NamedMemberKind { Constant, } -#[derive(Debug, Clone)] -pub struct ProgramInfo { - pub modules: UniqueMap, -} -pub type NamingProgramInfo = ProgramInfo; -pub type TypingProgramInfo = ProgramInfo; - macro_rules! program_info { ($pre_compiled_lib:ident, $prog:ident, $pass:ident, $module_use_funs:ident) => {{ let all_modules = $prog.modules.key_cloned_iter(); @@ -118,12 +118,16 @@ macro_rules! program_info { } } } - ProgramInfo { modules } + ProgramInfo { + modules, + sui_flavor_info: None, + } }}; } impl TypingProgramInfo { pub fn new( + env: &CompilationEnv, pre_compiled_lib: Option>, modules: &UniqueMap, mut module_use_funs: BTreeMap, @@ -133,7 +137,18 @@ impl TypingProgramInfo { } let mut module_use_funs = Some(&mut module_use_funs); let prog = Prog { modules }; - program_info!(pre_compiled_lib, prog, typing, module_use_funs) + let pcl = pre_compiled_lib.clone(); + let mut info = program_info!(pcl, prog, typing, module_use_funs); + // TODO we should really have an idea of root package flavor here + // but this feels roughly equivalent + if env + .package_configs() + .any(|(_, config)| config.flavor == Flavor::Sui) + { + let sui_flavor_info = SuiInfo::new(pre_compiled_lib, modules, &info); + info.sui_flavor_info = Some(sui_flavor_info); + }; + info } } diff --git a/external-crates/move/crates/move-compiler/src/sui_mode/info.rs b/external-crates/move/crates/move-compiler/src/sui_mode/info.rs new file mode 100644 index 0000000000000..2bfaedeafef51 --- /dev/null +++ b/external-crates/move/crates/move-compiler/src/sui_mode/info.rs @@ -0,0 +1,308 @@ +// Copyright (c) The Move Contributors +// SPDX-License-Identifier: Apache-2.0 + +//! ProgramInfo extension for Sui Flavor +//! Contains information that may be expensive to compute and is needed only for Sui + +use std::{ + collections::{BTreeMap, BTreeSet}, + sync::Arc, +}; + +use crate::{ + expansion::ast::{Fields, ModuleIdent}, + naming::ast as N, + parser::ast::{Ability_, DatatypeName, Field}, + shared::{ + program_info::{DatatypeKind, TypingProgramInfo}, + unique_map::UniqueMap, + }, + sui_mode::{ + OBJECT_MODULE_NAME, SUI_ADDR_NAME, TRANSFER_FUNCTION_NAME, TRANSFER_MODULE_NAME, + UID_TYPE_NAME, + }, + typing::{ast as T, visitor::TypingVisitorContext}, + FullyCompiledProgram, +}; +use move_ir_types::location::Loc; +use move_proc_macros::growing_stack; + +#[derive(Debug, Clone, Copy)] +pub enum UIDHolder { + /// is `sui::object::UID`` + IsUID, + /// holds UID directly as one of the fields + Direct { field: Field, ty: Loc }, + /// holds a type which in turn `Direct`ly or `Indirect`ly holds UID + Indirect { field: Field, ty: Loc, uid: Loc }, +} + +#[derive(Debug, Clone, Copy)] +pub enum TransferKind { + /// The object has store + PublicTransfer(Loc), + /// transferred within the module to an address vis `sui::transfer::transfer` + PrivateTransfer(Loc), +} + +#[derive(Debug, Clone)] +pub struct SuiInfo { + /// All types that contain a UID, directly or indirectly + /// This requires a DFS traversal of type declarations + pub uid_holders: BTreeMap<(ModuleIdent, DatatypeName), UIDHolder>, + /// All types that either have store or are transferred privately + pub transferred: BTreeMap<(ModuleIdent, DatatypeName), TransferKind>, +} + +impl SuiInfo { + pub fn new( + pre_compiled_lib: Option>, + modules: &UniqueMap, + info: &TypingProgramInfo, + ) -> Self { + assert!(info.sui_flavor_info.is_none()); + let uid_holders = all_uid_holders(info); + let transferred = all_transferred(pre_compiled_lib, modules, info); + Self { + uid_holders, + transferred, + } + } +} + +/// DFS traversal to find all UID holders +fn all_uid_holders(info: &TypingProgramInfo) -> BTreeMap<(ModuleIdent, DatatypeName), UIDHolder> { + fn merge_uid_holder(u1: UIDHolder, u2: UIDHolder) -> UIDHolder { + match (u1, u2) { + (u @ UIDHolder::IsUID, _) | (_, u @ UIDHolder::IsUID) => u, + (d @ UIDHolder::Direct { .. }, _) | (_, d @ UIDHolder::Direct { .. }) => d, + (u1, _) => u1, + } + } + + fn merge_uid_holder_opt( + u1_opt: Option, + u2_opt: Option, + ) -> Option { + match (u1_opt, u2_opt) { + (Some(u1), Some(u2)) => Some(merge_uid_holder(u1, u2)), + (o1, o2) => o1.or(o2), + } + } + + // returns true if the type at the given position is a phantom type + fn phantom_positions( + info: &TypingProgramInfo, + sp!(_, tn_): &N::TypeName, + ) -> Vec { + match tn_ { + N::TypeName_::Multiple(n) => vec![false; *n], + N::TypeName_::Builtin(sp!(_, b_)) => b_ + .tparam_constraints(Loc::invalid()) + .into_iter() + .map(|_| false) + .collect(), + N::TypeName_::ModuleType(m, n) => { + let ty_params = match info.datatype_kind(m, n) { + DatatypeKind::Struct => &info.struct_definition(m, n).type_parameters, + DatatypeKind::Enum => &info.enum_definition(m, n).type_parameters, + }; + ty_params.iter().map(|tp| tp.is_phantom).collect() + } + } + } + + #[growing_stack] + fn visit_ty( + info: &TypingProgramInfo, + visited: &mut BTreeSet<(ModuleIdent, DatatypeName)>, + uid_holders: &mut BTreeMap<(ModuleIdent, DatatypeName), UIDHolder>, + sp!(_, ty_): &N::Type, + ) -> Option { + match ty_ { + N::Type_::Unit + | N::Type_::Param(_) + | N::Type_::Var(_) + | N::Type_::Fun(_, _) + | N::Type_::Anything + | N::Type_::UnresolvedError => None, + + N::Type_::Ref(_, inner) => visit_ty(info, visited, uid_holders, inner), + + N::Type_::Apply(_, sp!(_, tn_), _) + if tn_.is(SUI_ADDR_NAME, OBJECT_MODULE_NAME, UID_TYPE_NAME) => + { + Some(UIDHolder::IsUID) + } + + N::Type_::Apply(_, tn, tys) => { + let phantom_positions = phantom_positions(info, tn); + let ty_args_holder = tys + .iter() + .zip(phantom_positions) + .filter(|(_t, is_phantom)| *is_phantom) + .map(|(t, _is_phantom)| visit_ty(info, visited, uid_holders, t)) + .fold(None, merge_uid_holder_opt); + let tn_holder = if let N::TypeName_::ModuleType(m, n) = tn.value { + visit_decl(info, visited, uid_holders, m, n); + uid_holders.get(&(m, n)).copied() + } else { + None + }; + merge_uid_holder_opt(ty_args_holder, tn_holder) + } + } + } + + #[growing_stack] + fn visit_fields( + info: &TypingProgramInfo, + visited: &mut BTreeSet<(ModuleIdent, DatatypeName)>, + uid_holders: &mut BTreeMap<(ModuleIdent, DatatypeName), UIDHolder>, + fields: &Fields, + ) -> Option { + fields + .key_cloned_iter() + .map(|(field, (_, ty))| { + Some(match visit_ty(info, visited, uid_holders, ty)? { + UIDHolder::IsUID => UIDHolder::Direct { field, ty: ty.loc }, + UIDHolder::Direct { field, ty: uid } + | UIDHolder::Indirect { field, uid, ty: _ } => UIDHolder::Indirect { + field, + ty: ty.loc, + uid, + }, + }) + }) + .fold(None, merge_uid_holder_opt) + } + + #[growing_stack] + fn visit_decl( + info: &TypingProgramInfo, + visited: &mut BTreeSet<(ModuleIdent, DatatypeName)>, + uid_holders: &mut BTreeMap<(ModuleIdent, DatatypeName), UIDHolder>, + mident: ModuleIdent, + tn: DatatypeName, + ) { + if visited.contains(&(mident, tn)) { + return; + } + visited.insert((mident, tn)); + + let uid_holder_opt = match info.datatype_kind(&mident, &tn) { + DatatypeKind::Struct => match &info.struct_definition(&mident, &tn).fields { + N::StructFields::Defined(_, fields) => { + visit_fields(info, visited, uid_holders, fields) + } + N::StructFields::Native(_) => None, + }, + DatatypeKind::Enum => info + .enum_definition(&mident, &tn) + .variants + .iter() + .filter_map(|(_, _, v)| match &v.fields { + N::VariantFields::Defined(_, fields) => Some(fields), + N::VariantFields::Empty => None, + }) + .map(|fields| visit_fields(info, visited, uid_holders, fields)) + .fold(None, merge_uid_holder_opt), + }; + if let Some(uid_holder) = uid_holder_opt { + uid_holders.insert((mident, tn), uid_holder); + } + } + + // iterate over all struct/enum declarations + let visited = &mut BTreeSet::new(); + let mut uid_holders = BTreeMap::new(); + for (mident, mdef) in info.modules.key_cloned_iter() { + let datatypes = mdef + .structs + .key_cloned_iter() + .map(|(n, _)| n) + .chain(mdef.enums.key_cloned_iter().map(|(n, _)| n)); + for tn in datatypes { + visit_decl(info, visited, &mut uid_holders, mident, tn) + } + } + uid_holders +} + +fn all_transferred( + pre_compiled_lib: Option>, + modules: &UniqueMap, + info: &TypingProgramInfo, +) -> BTreeMap<(ModuleIdent, DatatypeName), TransferKind> { + let mut transferred = BTreeMap::new(); + for (mident, minfo) in info.modules.key_cloned_iter() { + for (s, sdef) in minfo.structs.key_cloned_iter() { + if !sdef.abilities.has_ability_(Ability_::Key) { + continue; + } + let Some(store_loc) = sdef.abilities.ability_loc_(Ability_::Store) else { + continue; + }; + transferred.insert((mident, s), TransferKind::PublicTransfer(store_loc)); + } + + let mdef = match modules.get(&mident) { + Some(mdef) => mdef, + None => pre_compiled_lib + .as_ref() + .unwrap() + .typing + .modules + .get(&mident) + .unwrap(), + }; + for (_, _, fdef) in &mdef.functions { + add_private_transfers(&mut transferred, fdef); + } + } + transferred +} + +fn add_private_transfers( + transferred: &mut BTreeMap<(ModuleIdent, DatatypeName), TransferKind>, + fdef: &T::Function, +) { + struct TransferVisitor<'a> { + transferred: &'a mut BTreeMap<(ModuleIdent, DatatypeName), TransferKind>, + } + impl<'a> TypingVisitorContext for TransferVisitor<'a> { + fn add_warning_filter_scope(&mut self, _: crate::diagnostics::WarningFilters) { + unreachable!("no warning filters in function bodies") + } + + fn pop_warning_filter_scope(&mut self) { + unreachable!("no warning filters in function bodies") + } + + fn visit_exp_custom(&mut self, e: &T::Exp) -> bool { + use T::UnannotatedExp_ as E; + let E::ModuleCall(call) = &e.exp.value else { + return false; + }; + if !call.is(SUI_ADDR_NAME, TRANSFER_MODULE_NAME, TRANSFER_FUNCTION_NAME) { + return false; + } + let [sp!(_, ty)] = call.type_arguments.as_slice() else { + return false; + }; + let Some(n) = ty.type_name().and_then(|t| t.value.datatype_name()) else { + return false; + }; + self.transferred + .entry(n) + .or_insert_with(|| TransferKind::PrivateTransfer(e.exp.loc)); + false + } + } + + let mut visitor = TransferVisitor { transferred }; + match &fdef.body.value { + T::FunctionBody_::Native | &T::FunctionBody_::Macro => (), + T::FunctionBody_::Defined(seq) => visitor.visit_seq(seq), + } +} diff --git a/external-crates/move/crates/move-compiler/src/sui_mode/linters/share_owned.rs b/external-crates/move/crates/move-compiler/src/sui_mode/linters/share_owned.rs index 77fdc12f8a213..a3e44f87a3a2d 100644 --- a/external-crates/move/crates/move-compiler/src/sui_mode/linters/share_owned.rs +++ b/external-crates/move/crates/move-compiler/src/sui_mode/linters/share_owned.rs @@ -6,13 +6,15 @@ //! fresh object and share it within the same function use move_ir_types::location::*; +use move_proc_macros::growing_stack; use crate::{ cfgir::{ absint::JoinResult, cfg::ImmForwardCFG, visitor::{ - LocalState, SimpleAbsInt, SimpleAbsIntConstructor, SimpleDomain, SimpleExecutionContext, + calls_special_function, LocalState, SimpleAbsInt, SimpleAbsIntConstructor, + SimpleDomain, SimpleExecutionContext, }, CFGContext, }, @@ -21,11 +23,21 @@ use crate::{ codes::{custom, DiagnosticInfo, Severity}, Diagnostic, Diagnostics, }, + expansion::ast::ModuleIdent, hlir::ast::{ - Exp, LValue, LValue_, Label, ModuleCall, SingleType, Type, Type_, UnannotatedExp_, Var, + BaseType, BaseType_, Exp, LValue, LValue_, Label, ModuleCall, SingleType, SingleType_, + Type, TypeName_, Type_, UnannotatedExp_, Var, + }, + naming::ast::BuiltinTypeName_, + parser::ast::{Ability_, DatatypeName}, + shared::{ + program_info::{DatatypeKind, TypingProgramInfo}, + CompilationEnv, Identifier, + }, + sui_mode::{ + info::{SuiInfo, TransferKind}, + SUI_ADDR_NAME, TX_CONTEXT_MODULE_NAME, TX_CONTEXT_TYPE_NAME, }, - parser::ast::Ability_, - shared::{CompilationEnv, Identifier}, }; use std::collections::BTreeMap; @@ -52,7 +64,10 @@ const SHARE_OWNED_DIAG: DiagnosticInfo = custom( //************************************************************************************************** pub struct ShareOwnedVerifier; -pub struct ShareOwnedVerifierAI; + +pub struct ShareOwnedVerifierAI<'a> { + info: &'a TypingProgramInfo, +} #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Default)] pub enum Value { @@ -78,12 +93,12 @@ pub struct State { //************************************************************************************************** impl SimpleAbsIntConstructor for ShareOwnedVerifier { - type AI<'a> = ShareOwnedVerifierAI; + type AI<'a> = ShareOwnedVerifierAI<'a>; fn new<'a>( _env: &CompilationEnv, context: &'a CFGContext<'a>, - _cfg: &ImmForwardCFG, + cfg: &ImmForwardCFG, _init_state: &mut as SimpleAbsInt>::State, ) -> Option> { if context.attributes.is_test_or_test_only() @@ -95,11 +110,14 @@ impl SimpleAbsIntConstructor for ShareOwnedVerifier { { return None; } - Some(ShareOwnedVerifierAI) + if !calls_special_function(SHARE_FUNCTIONS, cfg) { + return None; + } + Some(ShareOwnedVerifierAI { info: context.info }) } } -impl SimpleAbsInt for ShareOwnedVerifierAI { +impl<'a> SimpleAbsInt for ShareOwnedVerifierAI<'a> { type State = State; type ExecutionContext = ExecutionContext; @@ -148,37 +166,28 @@ impl SimpleAbsInt for ShareOwnedVerifierAI { if SHARE_FUNCTIONS .iter() .any(|(addr, module, fun)| f.is(addr, module, fun)) - && args[0] != Value::FreshObj + && args.first().is_some_and(|v| v != &Value::FreshObj) { - let msg = "Potential abort from a (potentially) owned object created by a different transaction."; - let uid_msg = "Creating a fresh object and sharing it within the same function will ensure this does not abort."; - let mut d = diag!( - SHARE_OWNED_DIAG, - (*loc, msg), - (f.arguments[0].exp.loc, uid_msg) - ); - if let Value::NotFreshObj(l) = args[0] { - d.add_secondary_label((l, "A potentially owned object coming from here")) - } - context.add_diag(d) + self.maybe_warn_share_owned(context, loc, f, args) } + let all_args_pure = !f.arguments.iter().any(|a| self.can_hold_obj(&a.ty)); Some(match &return_ty.value { Type_::Unit => vec![], Type_::Single(t) => { - let v = if is_obj_type(t) { - Value::NotFreshObj(t.loc) - } else { + let v = if all_args_pure || !is_obj_type(t) { Value::Other + } else { + Value::NotFreshObj(t.loc) }; vec![v] } Type_::Multiple(types) => types .iter() .map(|t| { - if is_obj_type(t) { - Value::NotFreshObj(t.loc) - } else { + if all_args_pure || !is_obj_type(t) { Value::Other + } else { + Value::NotFreshObj(t.loc) } }) .collect(), @@ -210,6 +219,108 @@ impl SimpleAbsInt for ShareOwnedVerifierAI { } } +impl<'a> ShareOwnedVerifierAI<'a> { + fn can_hold_obj(&self, sp!(_, ty_): &Type) -> bool { + match ty_ { + Type_::Unit => false, + Type_::Single(st) => self.can_hold_obj_single(st), + Type_::Multiple(sts) => sts.iter().any(|st| self.can_hold_obj_single(st)), + } + } + + fn can_hold_obj_single(&self, sp!(_, st_): &SingleType) -> bool { + match st_ { + SingleType_::Base(bt) | SingleType_::Ref(_, bt) => self.can_hold_obj_base(bt), + } + } + + #[growing_stack] + fn can_hold_obj_base(&self, sp!(_, bt_): &BaseType) -> bool { + match bt_ { + // special case TxContext as not holding an object + BaseType_::Apply(_, sp!(_, tn), _) + if tn.is(SUI_ADDR_NAME, TX_CONTEXT_MODULE_NAME, TX_CONTEXT_TYPE_NAME) => + { + false + } + // vector in value might have an object + BaseType_::Apply( + _, + sp!(_, TypeName_::Builtin(sp!(_, BuiltinTypeName_::Vector))), + bs, + ) => bs.iter().any(|b| self.can_hold_obj_base(b)), + // builtins cannot hold objects + BaseType_::Apply(_, sp!(_, TypeName_::Builtin(_)), _) => false, + + BaseType_::Apply(_, sp!(_, TypeName_::ModuleType(m, n)), targs) => { + let m = *m; + let n = *n; + if self.sui_info().uid_holders.contains_key(&(m, n)) { + return true; + } + let phantom_positions = phantom_positions(self.info, &m, &n); + phantom_positions + .into_iter() + .zip(targs) + .filter(|(is_phantom, _)| !*is_phantom) + .any(|(_, t)| self.can_hold_obj_base(t)) + } + // any user defined type or type parameter is pessimistically assumed to hold an object + BaseType_::Param(_) => true, + BaseType_::Unreachable | BaseType_::UnresolvedError => false, + } + } + + fn maybe_warn_share_owned( + &self, + context: &mut ExecutionContext, + loc: &Loc, + f: &ModuleCall, + args: Vec, + ) { + let Value::NotFreshObj(not_fresh_loc) = &args[0] else { + return; + }; + let Some(tn) = f + .type_arguments + .first() + .and_then(|t| t.value.type_name()) + .and_then(|n| n.value.datatype_name()) + else { + return; + }; + let Some(transferred_kind) = self.sui_info().transferred.get(&tn) else { + return; + }; + + let msg = + "Potential abort from a (potentially) owned object created by a different transaction."; + let uid_msg = "Creating a fresh object and sharing it within the same function will \ + ensure this does not abort."; + let not_fresh_msg = "A potentially owned object coming from here"; + let (tloc, tmsg) = match transferred_kind { + TransferKind::PublicTransfer(store_loc) => ( + store_loc, + "Potentially an owned object because 'store' grants access to public transfers", + ), + TransferKind::PrivateTransfer(loc) => (loc, "Transferred as an owned object here"), + }; + let d = diag!( + SHARE_OWNED_DIAG, + (*loc, msg), + (f.arguments[0].exp.loc, uid_msg), + (*not_fresh_loc, not_fresh_msg), + (*tloc, tmsg), + ); + + context.add_diag(d) + } + + fn sui_info(&self) -> &'a SuiInfo { + self.info.sui_flavor_info.as_ref().unwrap() + } +} + fn is_obj(sp!(_, l_): &LValue) -> bool { if let LValue_::Var { ty: st, .. } = l_ { return is_obj_type(st); @@ -224,6 +335,18 @@ fn is_obj_type(st_: &SingleType) -> bool { abilities.has_ability_(Ability_::Key) } +fn phantom_positions( + info: &TypingProgramInfo, + m: &ModuleIdent, + n: &DatatypeName, +) -> Vec { + let ty_params = match info.datatype_kind(m, n) { + DatatypeKind::Struct => &info.struct_definition(m, n).type_parameters, + DatatypeKind::Enum => &info.enum_definition(m, n).type_parameters, + }; + ty_params.iter().map(|tp| tp.is_phantom).collect() +} + impl SimpleDomain for State { type Value = Value; diff --git a/external-crates/move/crates/move-compiler/src/sui_mode/mod.rs b/external-crates/move/crates/move-compiler/src/sui_mode/mod.rs index f0a3c3587f461..d803ec83b83dd 100644 --- a/external-crates/move/crates/move-compiler/src/sui_mode/mod.rs +++ b/external-crates/move/crates/move-compiler/src/sui_mode/mod.rs @@ -6,6 +6,7 @@ use move_symbol_pool::Symbol; use crate::diagnostics::codes::{custom, DiagnosticInfo, Severity}; pub mod id_leak; +pub mod info; pub mod linters; pub mod typing; diff --git a/external-crates/move/crates/move-compiler/src/typing/translate.rs b/external-crates/move/crates/move-compiler/src/typing/translate.rs index dd39bddd9d8bb..8f26d2948ae19 100644 --- a/external-crates/move/crates/move-compiler/src/typing/translate.rs +++ b/external-crates/move/crates/move-compiler/src/typing/translate.rs @@ -10,7 +10,7 @@ use crate::{ AbilitySet, Attribute, AttributeValue_, Attribute_, DottedUsage, Fields, Friend, ModuleAccess_, ModuleIdent, ModuleIdent_, Mutability, TargetKind, Value_, Visibility, }, - ice, + ice, ice_assert, naming::ast::{ self as N, BlockLabel, DatatypeTypeParameter, IndexSyntaxMethods, TParam, TParamID, Type, TypeName, TypeName_, Type_, @@ -81,7 +81,8 @@ pub fn program( .into_iter() .map(|(mident, minfo)| (mident, minfo.use_funs)) .collect(); - let module_info = TypingProgramInfo::new(pre_compiled_lib, &modules, module_use_funs); + let module_info = + TypingProgramInfo::new(compilation_env, pre_compiled_lib, &modules, module_use_funs); let prog = T::Program { modules, info: Arc::new(module_info), @@ -2916,14 +2917,12 @@ fn add_variant_field_types( N::VariantFields::Defined(_, m) => m, N::VariantFields::Empty => { if !fields.is_empty() { - let msg = format!( - "Invalid usage for empty variant '{}::{}::{}'. Empty variants do not take \ - any arguments.", - m, n, v + ice_assert!( + context.env, + context.env.has_errors(), + loc, + "Empty variant with fields but no error from naming" ); - context - .env - .add_diag(diag!(TypeSafety::TooManyArguments, (loc, msg),)); return fields.map(|f, (idx, x)| (idx, (context.error_type(f.loc()), x))); } else { return Fields::new(); diff --git a/external-crates/move/crates/move-compiler/src/typing/visitor.rs b/external-crates/move/crates/move-compiler/src/typing/visitor.rs index ff9e9066d1ce9..b1c9e59801bda 100644 --- a/external-crates/move/crates/move-compiler/src/typing/visitor.rs +++ b/external-crates/move/crates/move-compiler/src/typing/visitor.rs @@ -1096,3 +1096,107 @@ impl TypingMutVisitor for V { self.visit(env, program) } } + +//************************************************************************************************** +// util +//************************************************************************************************** + +pub fn has_special_exp(e: &T::Exp, mut p: F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + has_special_exp_(e, &mut p) +} + +pub fn has_special_seq(seq: &T::Sequence, mut p: F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + has_special_seq_(seq, &mut p) +} + +pub fn has_special_exp_list(list: &[T::ExpListItem], mut p: F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + has_special_exp_list_(list, &mut p) +} + +#[growing_stack] +fn has_special_exp_(e: &T::Exp, p: &mut F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + use T::UnannotatedExp_ as E; + if p(e) { + return true; + } + match &e.exp.value { + E::Unit { .. } + | E::Value(_) + | E::Move { .. } + | E::Copy { .. } + | E::Use(_) + | E::Constant(..) + | E::Continue(_) + | E::BorrowLocal(..) + | E::ErrorConstant { .. } + | E::UnresolvedError => false, + E::Builtin(_, e) + | E::Vector(_, _, _, e) + | E::Loop { body: e, .. } + | E::Assign(_, _, e) + | E::Return(e) + | E::Abort(e) + | E::Give(_, e) + | E::Dereference(e) + | E::UnaryExp(_, e) + | E::Borrow(_, e, _) + | E::TempBorrow(_, e) + | E::Cast(e, _) + | E::Annotate(e, _) => has_special_exp_(e, p), + E::While(_, e1, e2) | E::Mutate(e1, e2) | E::BinopExp(e1, _, _, e2) => { + has_special_exp_(e1, p) || has_special_exp_(e2, p) + } + E::IfElse(e1, e2, e3) => { + has_special_exp_(e1, p) || has_special_exp_(e2, p) || has_special_exp_(e3, p) + } + E::ModuleCall(c) => has_special_exp_(&c.arguments, p), + E::Match(esubject, arms) => { + has_special_exp_(esubject, p) + || arms + .value + .iter() + .any(|sp!(_, arm)| has_special_exp_(&arm.rhs, p)) + } + E::VariantMatch(esubject, _, arms) => { + has_special_exp_(esubject, p) || arms.iter().any(|(_, arm)| has_special_exp_(arm, p)) + } + + E::NamedBlock(_, seq) | E::Block(seq) => has_special_seq_(seq, p), + + E::Pack(_, _, _, fields) | E::PackVariant(_, _, _, _, fields) => fields + .iter() + .any(|(_, _, (_, (_, e)))| has_special_exp_(e, p)), + E::ExpList(list) => has_special_exp_list_(list, p), + } +} + +fn has_special_seq_(seq: &T::Sequence, p: &mut F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + seq.1.iter().any(|item| match &item.value { + T::SequenceItem_::Declare(_) => false, + T::SequenceItem_::Seq(e) | T::SequenceItem_::Bind(_, _, e) => has_special_exp_(e, p), + }) +} + +fn has_special_exp_list_(list: &[T::ExpListItem], p: &mut F) -> bool +where + F: FnMut(&T::Exp) -> bool, +{ + list.iter().any(|item| match item { + T::ExpListItem::Single(e, _) | T::ExpListItem::Splat(_, e, _) => has_special_exp_(e, p), + }) +} diff --git a/external-crates/move/crates/move-compiler/src/unit_test/mod.rs b/external-crates/move/crates/move-compiler/src/unit_test/mod.rs index 798e07c62a4f5..e121298ad8c7b 100644 --- a/external-crates/move/crates/move-compiler/src/unit_test/mod.rs +++ b/external-crates/move/crates/move-compiler/src/unit_test/mod.rs @@ -5,6 +5,7 @@ use crate::{ compiled_unit::NamedCompiledModule, shared::files::MappedFiles, shared::NumericalAddress, }; +use move_binary_format::CompiledModule; use move_core_types::{ account_address::AccountAddress, identifier::Identifier, @@ -23,6 +24,7 @@ pub struct TestPlan { pub mapped_files: MappedFiles, pub module_tests: BTreeMap, pub module_info: BTreeMap, + pub bytecode_deps_modules: Vec, } #[derive(Debug, Clone)] @@ -91,6 +93,7 @@ impl TestPlan { tests: Vec, mapped_files: MappedFiles, units: Vec, + bytecode_deps_modules: Vec, ) -> Self { let module_tests: BTreeMap<_, _> = tests .into_iter() @@ -106,6 +109,7 @@ impl TestPlan { mapped_files, module_tests, module_info, + bytecode_deps_modules, } } } diff --git a/external-crates/move/crates/move-compiler/tests/linter/false_negative_loop_without_exit.move b/external-crates/move/crates/move-compiler/tests/linter/false_negative_loop_without_exit.move new file mode 100644 index 0000000000000..2ad2db4c8264b --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/false_negative_loop_without_exit.move @@ -0,0 +1,15 @@ +// tests for false negatives in the loop without exit lint + +module a::m { + public fun t1() { + loop { + if (false) break + } + } + + public fun t2() { + loop { + if (false) return + } + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/false_negative_unnecessary_conditional.move b/external-crates/move/crates/move-compiler/tests/linter/false_negative_unnecessary_conditional.move new file mode 100644 index 0000000000000..4e676fa7ca518 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/false_negative_unnecessary_conditional.move @@ -0,0 +1,12 @@ +module a::m { + // These very simply could be rewritten but we are overly conservative when it comes to blocks + public fun t0(condition: bool) { + if (condition) { (); true } else false; + if (condition) b"" else { (); (); vector[] }; + } + + // we don't do this check after constant folding + public fun t1(condition: bool) { + if (condition) 1 + 1 else 2; + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/suppress_loop_without_exit.move b/external-crates/move/crates/move-compiler/tests/linter/suppress_loop_without_exit.move new file mode 100644 index 0000000000000..431c75e475769 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/suppress_loop_without_exit.move @@ -0,0 +1,7 @@ +module a::m { + // Suppress Case + #[allow(lint(loop_without_exit))] + public fun suppressed_empty_loop() { + loop {} + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_conditional.move b/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_conditional.move new file mode 100644 index 0000000000000..c91bbf14bc5ea --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_conditional.move @@ -0,0 +1,17 @@ +module a::m { + + #[allow(lint(unnecessary_conditional))] + public fun t0(condition: bool) { + if (condition) true else false; + } + + #[allow(lint(unnecessary_conditional))] + public fun t1(condition: bool) { + if (condition) vector[] else vector[]; + } + + #[allow(lint(unnecessary_conditional))] + public fun t2(condition: bool) { + if (condition) @0 else @0; + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_while_loop.move b/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_while_loop.move index 1477d966a74c3..ed9337b491510 100644 --- a/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_while_loop.move +++ b/external-crates/move/crates/move-compiler/tests/linter/suppress_unnecessary_while_loop.move @@ -1,7 +1,7 @@ -module 0x42::loop_test { +module a::loop_test { #[allow(lint(while_true))] public fun suppressed_while_true() { - while (true) {}; + while(true) {} } } diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_negative_loop_without_exit.move b/external-crates/move/crates/move-compiler/tests/linter/true_negative_loop_without_exit.move new file mode 100644 index 0000000000000..965ae9f9b7dfa --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_negative_loop_without_exit.move @@ -0,0 +1,19 @@ +// tests for negatives in the loop without exit lint + +module a::m { + public fun t1() { + let i = 0; + loop { + if (i >= 10) break; + i = i + 1; + } + } + + public fun t2() { + let i = 0; + loop { + if (i >= 10) return; + i = i + 1; + } + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_negative_unnecessary_conditional.move b/external-crates/move/crates/move-compiler/tests/linter/true_negative_unnecessary_conditional.move new file mode 100644 index 0000000000000..fc94f50066498 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_negative_unnecessary_conditional.move @@ -0,0 +1,17 @@ +// true negative cases for redundant conditional +module a::m { + public fun t0(condition: bool) { + if (condition) 1 else { 0 }; + if (condition) vector[] else vector[1]; + } + + public fun t1(x: u64, y: u64) { + let _ = if (x > y) { x } else y; + } + + // has side effects, too complex to analyze + public fun t2(condition: &mut bool) { + let _ = if (*condition) { *condition = false; true } else { *condition = true; false }; + } + +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.exp b/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.exp new file mode 100644 index 0000000000000..ed14df04816b0 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.exp @@ -0,0 +1,27 @@ +warning[Lint W02006]: 'loop' without 'break' or 'return' + ┌─ tests/linter/true_positive_loop_without_exit.move:5:9 + │ +5 │ loop {} + │ ^^^^^^^ 'loop' without 'break' or 'return'. This code will until it errors, e.g. reaching an 'abort' or running out of gas + │ + = This warning can be suppressed with '#[allow(lint(loop_without_exit))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W02006]: 'loop' without 'break' or 'return' + ┌─ tests/linter/true_positive_loop_without_exit.move:10:9 + │ +10 │ loop { abort ZERO } + │ ^^^^^^^^^^^^^^^^^^^ 'loop' without 'break' or 'return'. This code will until it errors, e.g. reaching an 'abort' or running out of gas + │ + = This warning can be suppressed with '#[allow(lint(loop_without_exit))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W02006]: 'loop' without 'break' or 'return' + ┌─ tests/linter/true_positive_loop_without_exit.move:14:9 + │ +14 │ â•­ loop { +15 │ │ t2(); +16 │ │ t2(); +17 │ │ } + │ ╰─────────^ 'loop' without 'break' or 'return'. This code will until it errors, e.g. reaching an 'abort' or running out of gas + │ + = This warning can be suppressed with '#[allow(lint(loop_without_exit))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.move b/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.move new file mode 100644 index 0000000000000..48876fe8c57e7 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_positive_loop_without_exit.move @@ -0,0 +1,19 @@ +// tests for positives in the loop without exit lint + +module a::m { + public fun t1() { + loop {} + } + + const ZERO: u64 = 0; + public fun t2() { + loop { abort ZERO } + } + + public fun t3() { + loop { + t2(); + t2(); + } + } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.exp b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.exp new file mode 100644 index 0000000000000..747a6aa889c13 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.exp @@ -0,0 +1,52 @@ +warning[Lint W01007]: 'if' expression can be removed + ┌─ tests/linter/true_positive_unnecessary_conitional.move:4:9 + │ +4 │ if (!condition) true else false; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Detected an unnecessary conditional expression 'if (cond)'. Consider using the condition directly, i.e. 'cond' + │ + = This warning can be suppressed with '#[allow(lint(unnecessary_conditional))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W01007]: 'if' expression can be removed + ┌─ tests/linter/true_positive_unnecessary_conitional.move:5:9 + │ +5 │ if (condition) { { false } } else { (true: bool) }; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Detected an unnecessary conditional expression 'if (cond)'. Consider using the condition directly, i.e. '!cond' + │ + = This warning can be suppressed with '#[allow(lint(unnecessary_conditional))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W01007]: 'if' expression can be removed + ┌─ tests/linter/true_positive_unnecessary_conitional.move:9:9 + │ +9 │ if (true) true else true; + │ ^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ │ + │ │ │ is the same as this value + │ │ This value + │ Detected a redundant conditional expression 'if (..) v else v', where each branch results in the same value 'v'. Consider using the value directly + │ + = This warning can be suppressed with '#[allow(lint(unnecessary_conditional))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W01007]: 'if' expression can be removed + ┌─ tests/linter/true_positive_unnecessary_conitional.move:10:9 + │ +10 │ if (foo()) 0 else 0; + │ ^^^^^^^^^^^^^^^^^^^ + │ │ │ │ + │ │ │ is the same as this value + │ │ This value + │ Detected a redundant conditional expression 'if (..) v else v', where each branch results in the same value 'v'. Consider using the value directly + │ + = This warning can be suppressed with '#[allow(lint(unnecessary_conditional))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W01007]: 'if' expression can be removed + ┌─ tests/linter/true_positive_unnecessary_conitional.move:11:9 + │ +11 │ if (!foo()) b"" else x""; + │ ^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ │ + │ │ │ is the same as this value + │ │ This value + │ Detected a redundant conditional expression 'if (..) v else v', where each branch results in the same value 'v'. Consider using the value directly + │ + = This warning can be suppressed with '#[allow(lint(unnecessary_conditional))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.move b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.move new file mode 100644 index 0000000000000..407718b2f054b --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_conitional.move @@ -0,0 +1,15 @@ +// tests cases where a lint is reported for a redundant conditional expression +module a::m { + public fun t0(condition: bool) { + if (!condition) true else false; + if (condition) { { false } } else { (true: bool) }; + } + + public fun t1() { + if (true) true else true; + if (foo()) 0 else 0; + if (!foo()) b"" else x""; + } + + fun foo(): bool { true } +} diff --git a/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_while_loop.exp b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_while_loop.exp index f3f78d6cca224..d1ccfb5973007 100644 --- a/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_while_loop.exp +++ b/external-crates/move/crates/move-compiler/tests/linter/true_positive_unnecessary_while_loop.exp @@ -1,4 +1,4 @@ -warning[Lint W01002]: unnecessary 'while (true)', replace with 'loop' +warning[Lint W04002]: unnecessary 'while (true)', replace with 'loop' ┌─ tests/linter/true_positive_unnecessary_while_loop.move:3:9 │ 3 │ while (true) {}; @@ -7,7 +7,7 @@ warning[Lint W01002]: unnecessary 'while (true)', replace with 'loop' = A 'loop' is more useful in these cases. Unlike 'while', 'loop' can have a 'break' with a value, e.g. 'let x = loop { break 42 };' = This warning can be suppressed with '#[allow(lint(while_true))]' applied to the 'module' or module member ('const', 'fun', or 'struct') -warning[Lint W01002]: unnecessary 'while (true)', replace with 'loop' +warning[Lint W04002]: unnecessary 'while (true)', replace with 'loop' ┌─ tests/linter/true_positive_unnecessary_while_loop.move:4:9 │ 4 │ while (true) { break } diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.exp new file mode 100644 index 0000000000000..8757b3e87a6ea --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.exp @@ -0,0 +1,12 @@ +error[E05001]: ability constraint not satisfied + ┌─ tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.move:14:13 + │ + 3 │ public enum ABC { + │ --- To satisfy the constraint, the 'drop' ability would need to be added here + · +10 │ match (ABC::C(0)) { + │ --------- The type '0x42::m::ABC' does not have the ability 'drop' + · +14 │ _ => 1, + │ ^ Cannot ignore values without the 'drop' ability. '_' patterns discard their values + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.move new file mode 100644 index 0000000000000..aa3084d69575a --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/abc_match_no_drop_exhaustive_invalid.move @@ -0,0 +1,18 @@ +module 0x42::m { + + public enum ABC { + A(T), + B, + C(T) + } + + fun t0(): u64 { + match (ABC::C(0)) { + ABC::C(x) => x, + ABC::A(x) => x, + ABC::B => 0, + _ => 1, + } + } + +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_match.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_match.move new file mode 100644 index 0000000000000..3b03be9233c6c --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_match.move @@ -0,0 +1,15 @@ +module 0x42::go; + +public enum Color has copy, store, drop { + Empty, + Black, + White, +} + +public fun from_index(color: Color): u64 { + match (color) { + Color::White => 0, + Color::Black => 1, + _ => abort 0, + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_repro.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_repro.move new file mode 100644 index 0000000000000..3cc3ea8db1395 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/go_repro.move @@ -0,0 +1,16 @@ +module 0x42::go; + +public enum Colour has copy, store, drop { + Empty, + Black, + White, +} + +public fun from_index(index: u64): Colour { + match (index) { + 0 => Colour::Empty, + 1 => Colour::Black, + 2 => Colour::White, + _ => abort 0, + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/if_compilation.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/if_compilation.move new file mode 100644 index 0000000000000..1b326d6e43211 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/if_compilation.move @@ -0,0 +1,9 @@ +module 0x42::m; + +fun test(): u64 { + if (true) { + 5 + } else { + abort 0 + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.exp new file mode 100644 index 0000000000000..ae3ad49d77348 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.exp @@ -0,0 +1,12 @@ +error[E04036]: non-exhaustive pattern + ┌─ tests/move_2024/matching/invalid_or_binding_3.move:9:12 + │ +9 │ match (e) { + │ ^ Pattern 'E::Y { y: _ }' not covered + +error[E03010]: unbound field + ┌─ tests/move_2024/matching/invalid_or_binding_3.move:10:22 + │ +10 │ E::X { x } | E::Y { y: _, x } => *x + │ ^^^^^^^^^^^^^^^^ Unbound field 'x' in '0x42::m::E::Y' + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.move new file mode 100644 index 0000000000000..aeccf3f59ccc3 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_3.move @@ -0,0 +1,12 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y { y: u64 } +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x } | E::Y { y: _, x } => *x + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.exp new file mode 100644 index 0000000000000..d1ec4619e9081 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.exp @@ -0,0 +1,28 @@ +error[E03019]: invalid pattern + ┌─ tests/move_2024/matching/invalid_or_binding_4.move:11:19 + │ +11 │ E::X { x: y } | E::Y(x) | E::Z { z: x } => *x + │ ^ ----------------------- right or-pattern does not + │ │ + │ left or-pattern binds variable y + │ + = Both sides of an or-pattern must bind the same variables. + +warning[W09002]: unused variable + ┌─ tests/move_2024/matching/invalid_or_binding_4.move:11:19 + │ +11 │ E::X { x: y } | E::Y(x) | E::Z { z: x } => *x + │ ^ Unused local variable 'y'. Consider removing or prefixing with an underscore: '_y' + │ + = This warning can be suppressed with '#[allow(unused_variable)]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +error[E03019]: invalid pattern + ┌─ tests/move_2024/matching/invalid_or_binding_4.move:11:30 + │ +11 │ E::X { x: y } | E::Y(x) | E::Z { z: x } => *x + │ ------------- ^ right or-pattern binds variable x + │ │ + │ left or-pattern does not + │ + = Both sides of an or-pattern must bind the same variables. + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.move new file mode 100644 index 0000000000000..5598703049ceb --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_binding_4.move @@ -0,0 +1,13 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y(u64), + Z { z: u64 } +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x: y } | E::Y(x) | E::Z { z: x } => *x + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.exp new file mode 100644 index 0000000000000..1842f8ab05e89 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.exp @@ -0,0 +1,17 @@ +error[E04036]: non-exhaustive pattern + ┌─ tests/move_2024/matching/invalid_or_pattern.move:9:12 + │ +9 │ match (e) { + │ ^ Pattern 'E::Y' not covered + +error[E03013]: positional call mismatch + ┌─ tests/move_2024/matching/invalid_or_pattern.move:10:22 + │ + 5 │ Y + │ - 'Y' is declared here + · +10 │ E::X { x } | E::Y { y: _, x } => *x + │ ^^^^^^^^^^^^^^^^ Invalid variant pattern. Empty variant declarations require empty patterns + │ + = Remove '{ }' arguments from this pattern + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.move new file mode 100644 index 0000000000000..4ccd310b2ef61 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_pattern.move @@ -0,0 +1,12 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x } | E::Y { y: _, x } => *x + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.exp new file mode 100644 index 0000000000000..2dba4ccee21e1 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.exp @@ -0,0 +1,11 @@ +error[E04007]: incompatible types + ┌─ tests/move_2024/matching/invalid_or_types.move:10:24 + │ + 4 │ X(u64), + │ --- Given: 'u64' + 5 │ Y(u32) + │ --- Expected: 'u32' + · +10 │ E::X(x) | E::Y(x) => *x + │ ^ Invalid pattern field type + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.move new file mode 100644 index 0000000000000..19aa0990a5458 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types.move @@ -0,0 +1,12 @@ +module 0x42::m; + +public enum E { + X(u64), + Y(u32) +} + +public fun test(e: &E): u64 { + match (e) { + E::X(x) | E::Y(x) => *x + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.exp new file mode 100644 index 0000000000000..07790fd3e54ec --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.exp @@ -0,0 +1,11 @@ +error[E04007]: incompatible types + ┌─ tests/move_2024/matching/invalid_or_types_2.move:10:35 + │ + 4 │ X { x: u64 }, + │ --- Given: 'u64' + 5 │ Y { y: u64, x: u32 } + │ --- Expected: 'u32' + · +10 │ E::X { x } | E::Y { y: _, x } => *x + │ ^ Invalid pattern field type + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.move new file mode 100644 index 0000000000000..3a30f1051ad60 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_2.move @@ -0,0 +1,12 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y { y: u64, x: u32 } +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x } | E::Y { y: _, x } => *x + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.exp new file mode 100644 index 0000000000000..d19b356a5b4cb --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.exp @@ -0,0 +1,11 @@ +error[E04007]: incompatible types + ┌─ tests/move_2024/matching/invalid_or_types_3.move:10:30 + │ + 4 │ X { x: u64 }, + │ --- Given: 'u64' + 5 │ Y(u32) + │ --- Expected: 'u32' + · +10 │ E::X { x: y } | E::Y(y) => *y + │ ^ Invalid pattern field type + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.move new file mode 100644 index 0000000000000..c8e2ecfe5c66e --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_3.move @@ -0,0 +1,12 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y(u32) +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x: y } | E::Y(y) => *y + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.exp new file mode 100644 index 0000000000000..055b1ca7d7fb3 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.exp @@ -0,0 +1,11 @@ +error[E04007]: incompatible types + ┌─ tests/move_2024/matching/invalid_or_types_4.move:11:30 + │ + 4 │ X { x: u64 }, + │ --- Given: 'u64' + 5 │ Y(u32), + │ --- Expected: 'u32' + · +11 │ E::X { x: y } | E::Y(y) | E::Z { z: y } => *y + │ ^ Invalid pattern field type + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.move new file mode 100644 index 0000000000000..f4a44a3d3e9c7 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_or_types_4.move @@ -0,0 +1,13 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y(u32), + Z { z: u64 } +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x: y } | E::Y(y) | E::Z { z: y } => *y + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.exp b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.exp new file mode 100644 index 0000000000000..3d913a1831e86 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.exp @@ -0,0 +1,33 @@ +error[E04036]: non-exhaustive pattern + ┌─ tests/move_2024/matching/invalid_raw_variant_name.move:10:12 + │ +10 │ match (e) { + │ ^ Pattern 'E::Z { z: _ }' not covered + +error[E03019]: invalid pattern + ┌─ tests/move_2024/matching/invalid_raw_variant_name.move:11:30 + │ +11 │ E::X { x: y } | E::Y(y) | Z { z: y } => *y + │ ^ ---------- right or-pattern does not + │ │ + │ left or-pattern binds variable y + │ + = Both sides of an or-pattern must bind the same variables. + +error[E04007]: incompatible types + ┌─ tests/move_2024/matching/invalid_raw_variant_name.move:11:30 + │ + 4 │ X { x: u64 }, + │ --- Given: 'u64' + 5 │ Y(u32), + │ --- Expected: 'u32' + · +11 │ E::X { x: y } | E::Y(y) | Z { z: y } => *y + │ ^ Invalid pattern field type + +error[E01002]: unexpected token + ┌─ tests/move_2024/matching/invalid_raw_variant_name.move:11:35 + │ +11 │ E::X { x: y } | E::Y(y) | Z { z: y } => *y + │ ^ Unexpected name access. Expected a valid 'enum' variant, 'struct', or 'const'. + diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.move new file mode 100644 index 0000000000000..ccd9f43aedfb9 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/invalid_raw_variant_name.move @@ -0,0 +1,13 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y(u32), + Z { z: u64 } +} + +public fun test(e: &E): u64 { + match (e) { + E::X { x: y } | E::Y(y) | Z { z: y } => *y + } +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias.move new file mode 100644 index 0000000000000..73068022ec209 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias.move @@ -0,0 +1,17 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y { y: u64 } +} + +public fun test() { + let e = E::Y { y: 1 }; + let value = match (e) { + E::X { mut x } | E::Y { y: mut x } => { + x = x + 1; + x + } + }; + assert!(value == 2); +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias_2.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias_2.move new file mode 100644 index 0000000000000..73068022ec209 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/mut_field_alias_2.move @@ -0,0 +1,17 @@ +module 0x42::m; + +public enum E { + X { x: u64 }, + Y { y: u64 } +} + +public fun test() { + let e = E::Y { y: 1 }; + let value = match (e) { + E::X { mut x } | E::Y { y: mut x } => { + x = x + 1; + x + } + }; + assert!(value == 2); +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_label.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_label.move new file mode 100644 index 0000000000000..9b63bd09c74c2 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_label.move @@ -0,0 +1,49 @@ +module 0x42::TestLoopLabelShadowing; + +public enum Action has drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), +} + +public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(40), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = &actions[i]; + + match (action) { + Action::MoveTo { x, y } => { + 'loop_label: loop { + total_moves = total_moves + *x + *y; + break 'loop_label + }; + }, + Action::ChangeSpeed(_) => { + 'loop_label: loop { + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + _ => {}, + }; + i = i + 1; + }; + }; + + actions.destroy_empty(); + + assert!(total_moves == 60); +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_with_or.move b/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_with_or.move new file mode 100644 index 0000000000000..1fd0a00daa5fb --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/move_2024/matching/rhs_shadow_loop_with_or.move @@ -0,0 +1,44 @@ +module 0x42::TestLoopLabelShadowing; + +public enum Action has drop { + Stop, + MoveTo { x: u64, y: u64 }, + ChangeSpeed(u64), +} + +public fun test() { + // Define a list of actions + let actions: vector = vector[ + Action::MoveTo { x: 10, y: 20 }, + Action::ChangeSpeed(20), + Action::MoveTo { x: 10, y: 20 }, + Action::Stop + ]; + + let mut total_moves = 0; + + 'loop_label: loop { + let mut i = 0; + while (i < actions.length()) { + let action = &actions[i]; + + match (action) { + Action::MoveTo { x: speed, y: _ } | Action::ChangeSpeed(speed) => { + 'loop_label: loop { + total_moves = total_moves + *speed; + break 'loop_label + }; + }, + Action::Stop => { + break 'loop_label + }, + _ => {}, + }; + i = i + 1; + }; + }; + + actions.destroy_empty(); + + assert!(total_moves == 40); +} diff --git a/external-crates/move/crates/move-compiler/tests/move_2024/naming/pattern_ellipsis_invalid.exp b/external-crates/move/crates/move-compiler/tests/move_2024/naming/pattern_ellipsis_invalid.exp index f63799601e2fd..4aaa9fe55afcf 100644 --- a/external-crates/move/crates/move-compiler/tests/move_2024/naming/pattern_ellipsis_invalid.exp +++ b/external-crates/move/crates/move-compiler/tests/move_2024/naming/pattern_ellipsis_invalid.exp @@ -31,12 +31,6 @@ error[E03013]: positional call mismatch │ = Remove '()' arguments from this pattern -error[E04017]: too many arguments - ┌─ tests/move_2024/naming/pattern_ellipsis_invalid.move:18:13 - │ -18 │ Y::D(x, ..) => 0, - │ ^^^^^^^^^^^ Invalid usage for empty variant '0x42::m::Y::D'. Empty variants do not take any arguments. - warning[W09002]: unused variable ┌─ tests/move_2024/naming/pattern_ellipsis_invalid.move:18:18 │ @@ -56,12 +50,6 @@ error[E03013]: positional call mismatch │ = Remove '{ }' arguments from this pattern -error[E04017]: too many arguments - ┌─ tests/move_2024/naming/pattern_ellipsis_invalid.move:19:13 - │ -19 │ Y::D{x, ..} => 0, - │ ^^^^^^^^^^^ Invalid usage for empty variant '0x42::m::Y::D'. Empty variants do not take any arguments. - warning[W09002]: unused variable ┌─ tests/move_2024/naming/pattern_ellipsis_invalid.move:19:18 │ diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.exp b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.exp new file mode 100644 index 0000000000000..f91df3b166d3d --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.exp @@ -0,0 +1,34 @@ +warning[Lint W99000]: possible owned object share + ┌─ tests/sui_mode/linter/false_positive_share_owned.move:12:9 + │ + 6 │ struct Obj has key, store { + │ ----- Potentially an owned object because 'store' grants access to public transfers + · +10 │ public fun arg_object(o: Obj) { + │ - A potentially owned object coming from here +11 │ let arg = o; +12 │ transfer::public_share_object(arg); + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ + │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. + │ Potential abort from a (potentially) owned object created by a different transaction. + │ + = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W99000]: possible owned object share + ┌─ tests/sui_mode/linter/false_positive_share_owned.move:42:9 + │ +30 │ fun make_obj(o: Obj2, ctx: &mut sui::tx_context::TxContext): Obj { + │ --- A potentially owned object coming from here + · +37 │ transfer::transfer(o, sui::tx_context::sender(ctx)); + │ --------------------------------------------------- Transferred as an owned object here + · +42 │ transfer::share_object(o); + │ ^^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ + │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. + │ Potential abort from a (potentially) owned object created by a different transaction. + │ + = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.move b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.move new file mode 100644 index 0000000000000..76484a8fbb616 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/false_positive_share_owned.move @@ -0,0 +1,77 @@ +// object has store but is never created externally +module a::has_store { + use sui::transfer; + use sui::object::UID; + + struct Obj has key, store { + id: UID + } + + public fun arg_object(o: Obj) { + let arg = o; + transfer::public_share_object(arg); + } +} + +// object is created locally, but the analysis cannot determine that currently +module a::cannot_determine_to_be_new { + use sui::transfer; + use sui::object::UID; + + struct Obj has key { + id: UID + } + + struct Obj2 has key { + id: UID + } + + // we do not do interprodedural analysis here + fun make_obj(o: Obj2, ctx: &mut sui::tx_context::TxContext): Obj { + transfer::transfer(o, @0); + Obj { id: sui::object::new(ctx) } + } + + public fun transfer(o2: Obj2, ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(o2, ctx); + transfer::transfer(o, sui::tx_context::sender(ctx)); + } + + public fun share(o2: Obj2, ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(o2, ctx); // cannot determine this is local because of `X` + transfer::share_object(o); + } +} + +module sui::tx_context { + struct TxContext has drop {} + public fun sender(_: &TxContext): address { + @0 + } +} + +module sui::object { + const ZERO: u64 = 0; + struct UID has store { + id: address, + } + public fun delete(_: UID) { + abort ZERO + } + public fun new(_: &mut sui::tx_context::TxContext): UID { + abort ZERO + } +} + +module sui::transfer { + const ZERO: u64 = 0; + public fun transfer(_: T, _: address) { + abort ZERO + } + public fun share_object(_: T) { + abort ZERO + } + public fun public_share_object(_: T) { + abort ZERO + } +} diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.exp b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.exp deleted file mode 100644 index b5d849a103828..0000000000000 --- a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.exp +++ /dev/null @@ -1,27 +0,0 @@ -warning[Lint W99000]: possible owned object share - ┌─ tests/sui_mode/linter/share_owned.move:14:9 - │ -12 │ public entry fun arg_object(o: Obj) { - │ - A potentially owned object coming from here -13 │ let arg = o; -14 │ transfer::public_share_object(arg); - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - │ │ │ - │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. - │ Potential abort from a (potentially) owned object created by a different transaction. - │ - = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') - -warning[Lint W99000]: possible owned object share - ┌─ tests/sui_mode/linter/share_owned.move:34:9 - │ -33 │ let Wrapper { id, i: _, o } = w; - │ - A potentially owned object coming from here -34 │ transfer::public_share_object(o); - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - │ │ │ - │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. - │ Potential abort from a (potentially) owned object created by a different transaction. - │ - = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') - diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.move b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.move deleted file mode 100644 index 04a7d9a3169f6..0000000000000 --- a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/share_owned.move +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright (c) Mysten Labs, Inc. -// SPDX-License-Identifier: Apache-2.0 - -module a::test1 { - use sui::transfer; - use sui::object::UID; - - struct Obj has key, store { - id: UID - } - - public entry fun arg_object(o: Obj) { - let arg = o; - transfer::public_share_object(arg); - } -} - -module a::test2 { - use sui::transfer; - use sui::object::{Self, UID}; - - struct Obj has key, store { - id: UID - } - - struct Wrapper has key, store { - id: UID, - i: u32, - o: Obj, - } - - public entry fun unpack_obj(w: Wrapper) { - let Wrapper { id, i: _, o } = w; - transfer::public_share_object(o); - object::delete(id); - } - - #[allow(lint(share_owned))] - public entry fun unpack_obj_suppressed(w: Wrapper) { - let Wrapper { id, i: _, o } = w; - transfer::public_share_object(o); - object::delete(id); - } -} - -module sui::object { - const ZERO: u64 = 0; - struct UID has store { - id: address, - } - public fun delete(_: UID) { - abort ZERO - } -} - -module sui::transfer { - const ZERO: u64 = 0; - public fun public_share_object(_: T) { - abort ZERO - } -} diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_negative_share_owned.move b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_negative_share_owned.move new file mode 100644 index 0000000000000..99f4d04ed30cb --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_negative_share_owned.move @@ -0,0 +1,108 @@ +// object is re shared, but it is never transferred and doesn't have public transfer +module a::is_not_transferred { + use sui::transfer; + use sui::tx_context::TxContext; + use sui::object::UID; + + struct Obj has key { + id: UID + } + + public fun make_obj(ctx: &mut TxContext): Obj { + Obj { id: sui::object::new(ctx) } + } + + public fun crate(ctx: &mut TxContext) { + transfer::share_object(make_obj(ctx)); + } + + public fun share(o: Obj) { + transfer::share_object(o); + } +} + +// object is created locally, even though it is transferred somewhere else and has public share +module a::can_determine_to_be_new { + use sui::transfer; + use sui::object::UID; + + struct Obj has key, store { + id: UID + } + + fun make_obj(_: u64, _: vector>, ctx: &mut sui::tx_context::TxContext): Obj { + Obj { id: sui::object::new(ctx) } + } + + public fun transfer(ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(0, vector[], ctx); + transfer::transfer(o, @0); + } + + public fun share(ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(0, vector[], ctx); + transfer::share_object(o); + } +} + + +// object is created locally, but the analysis cannot determine that currently +module b::can_determine_to_be_new_with_struct { + use sui::transfer; + use sui::object::UID; + + struct Obj has key { + id: UID + } + + struct X has drop {} + + fun make_obj(_: X, ctx: &mut sui::tx_context::TxContext): Obj { + Obj { id: sui::object::new(ctx) } + } + + public fun transfer(ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(X {}, ctx); + transfer::transfer(o, sui::tx_context::sender(ctx)); + } + + public fun share(ctx: &mut sui::tx_context::TxContext) { + let o = make_obj(X {}, ctx); + transfer::share_object(o); + } +} + + + +module sui::tx_context { + struct TxContext has drop {} + public fun sender(_: &TxContext): address { + @0 + } +} + +module sui::object { + const ZERO: u64 = 0; + struct UID has store { + id: address, + } + public fun delete(_: UID) { + abort ZERO + } + public fun new(_: &mut sui::tx_context::TxContext): UID { + abort ZERO + } +} + +module sui::transfer { + const ZERO: u64 = 0; + public fun transfer(_: T, _: address) { + abort ZERO + } + public fun share_object(_: T) { + abort ZERO + } + public fun public_share_object(_: T) { + abort ZERO + } +} diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.exp b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.exp new file mode 100644 index 0000000000000..8ee1b12fc86a6 --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.exp @@ -0,0 +1,34 @@ +warning[Lint W99000]: possible owned object share + ┌─ tests/sui_mode/linter/true_positive_share_owned.move:17:9 + │ + 7 │ struct Obj has key, store { + │ ----- Potentially an owned object because 'store' grants access to public transfers + · +15 │ public fun share(o: Obj) { + │ - A potentially owned object coming from here +16 │ let arg = o; +17 │ transfer::public_share_object(arg); + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ + │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. + │ Potential abort from a (potentially) owned object created by a different transaction. + │ + = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + +warning[Lint W99000]: possible owned object share + ┌─ tests/sui_mode/linter/true_positive_share_owned.move:42:9 + │ +37 │ transfer::transfer(arg, tx_context::sender(ctx)); + │ ------------------------------------------------ Transferred as an owned object here + · +40 │ public fun share(o: Obj) { + │ - A potentially owned object coming from here +41 │ let arg = o; +42 │ transfer::share_object(arg); + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ │ │ + │ │ Creating a fresh object and sharing it within the same function will ensure this does not abort. + │ Potential abort from a (potentially) owned object created by a different transaction. + │ + = This warning can be suppressed with '#[allow(lint(share_owned))]' applied to the 'module' or module member ('const', 'fun', or 'struct') + diff --git a/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.move b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.move new file mode 100644 index 0000000000000..6e70d5f3eec0c --- /dev/null +++ b/external-crates/move/crates/move-compiler/tests/sui_mode/linter/true_positive_share_owned.move @@ -0,0 +1,77 @@ +// object has store, might be transferred elsewhere +module a::has_store { + use sui::transfer; + use sui::tx_context::TxContext; + use sui::object::UID; + + struct Obj has key, store { + id: UID + } + + public fun make_obj(ctx: &mut TxContext): Obj { + Obj { id: sui::object::new(ctx) } + } + + public fun share(o: Obj) { + let arg = o; + transfer::public_share_object(arg); + } +} + +// object does not have store and is transferred +module a::is_transferred { + use sui::transfer; + use sui::tx_context::{Self, TxContext}; + use sui::object::UID; + + struct Obj has key { + id: UID + } + + public fun make_obj(ctx: &mut TxContext): Obj { + Obj { id: sui::object::new(ctx) } + } + + public fun transfer(o: Obj, ctx: &mut TxContext) { + let arg = o; + transfer::transfer(arg, tx_context::sender(ctx)); + } + + public fun share(o: Obj) { + let arg = o; + transfer::share_object(arg); + } +} + +module sui::tx_context { + struct TxContext has drop {} + public fun sender(_: &TxContext): address { + @0 + } +} + +module sui::object { + const ZERO: u64 = 0; + struct UID has store { + id: address, + } + public fun delete(_: UID) { + abort ZERO + } + public fun new(_: &mut sui::tx_context::TxContext): UID { + abort ZERO + } +} + +module sui::transfer { + const ZERO: u64 = 0; + public fun transfer(_: T, _: address) { + abort ZERO + } + public fun share_object(_: T) { + abort ZERO + } + public fun public_share_object(_: T) { + abort ZERO + } +} diff --git a/external-crates/move/crates/move-core-types/src/annotated_value.rs b/external-crates/move/crates/move-core-types/src/annotated_value.rs index 602c4ee050408..b3624fc398741 100644 --- a/external-crates/move/crates/move-core-types/src/annotated_value.rs +++ b/external-crates/move/crates/move-core-types/src/annotated_value.rs @@ -87,7 +87,7 @@ impl MoveFieldLayout { pub struct MoveStructLayout { /// An decorated representation with both types and human-readable field names pub type_: StructTag, - pub fields: Vec, + pub fields: Box>, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -98,8 +98,8 @@ pub struct MoveEnumLayout { #[derive(Debug, Clone, Serialize, Deserialize)] pub enum MoveDatatypeLayout { - Struct(MoveStructLayout), - Enum(MoveEnumLayout), + Struct(Box), + Enum(Box), } impl MoveDatatypeLayout { @@ -126,7 +126,7 @@ pub enum MoveTypeLayout { #[serde(rename(serialize = "vector", deserialize = "vector"))] Vector(Box), #[serde(rename(serialize = "struct", deserialize = "struct"))] - Struct(MoveStructLayout), + Struct(Box), #[serde(rename(serialize = "signer", deserialize = "signer"))] Signer, @@ -138,7 +138,7 @@ pub enum MoveTypeLayout { #[serde(rename(serialize = "u256", deserialize = "u256"))] U256, #[serde(rename(serialize = "enum", deserialize = "enum"))] - Enum(MoveEnumLayout), + Enum(Box), } impl MoveValue { @@ -298,7 +298,10 @@ impl MoveVariant { impl MoveStructLayout { pub fn new(type_: StructTag, fields: Vec) -> Self { - Self { type_, fields } + Self { + type_, + fields: Box::new(fields), + } } pub fn into_fields(self) -> Vec { @@ -585,7 +588,7 @@ impl fmt::Display for MoveStructLayout { write!(f, "struct ")?; write!(f, "{} ", self.type_)?; let mut map = f.debug_map(); - for field in &self.fields { + for field in &*self.fields { map.entry(&DD(&field.name), &DD(&field.layout)); } map.finish() @@ -633,8 +636,8 @@ impl From<&MoveTypeLayout> for TypeTag { let inner_type = &**v; TypeTag::Vector(Box::new(inner_type.into())) } - MoveTypeLayout::Struct(v) => TypeTag::Struct(Box::new(v.into())), - MoveTypeLayout::Enum(e) => TypeTag::Struct(Box::new(e.into())), + MoveTypeLayout::Struct(v) => TypeTag::Struct(Box::new(v.as_ref().into())), + MoveTypeLayout::Enum(e) => TypeTag::Struct(Box::new(e.as_ref().into())), } } } diff --git a/external-crates/move/crates/move-core-types/src/runtime_value.rs b/external-crates/move/crates/move-core-types/src/runtime_value.rs index ddeec601cd801..2c089b3ed8f80 100644 --- a/external-crates/move/crates/move-core-types/src/runtime_value.rs +++ b/external-crates/move/crates/move-core-types/src/runtime_value.rs @@ -50,15 +50,15 @@ pub enum MoveValue { } #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MoveStructLayout(pub Vec); +pub struct MoveStructLayout(pub Box>); #[derive(Debug, Clone, Serialize, Deserialize)] -pub struct MoveEnumLayout(pub Vec>); +pub struct MoveEnumLayout(pub Box>>); #[derive(Debug, Clone, Serialize, Deserialize)] pub enum MoveDatatypeLayout { - Struct(MoveStructLayout), - Enum(MoveEnumLayout), + Struct(Box), + Enum(Box), } impl MoveDatatypeLayout { @@ -86,7 +86,7 @@ pub enum MoveTypeLayout { #[serde(rename(serialize = "vector", deserialize = "vector"))] Vector(Box), #[serde(rename(serialize = "struct", deserialize = "struct"))] - Struct(MoveStructLayout), + Struct(Box), #[serde(rename(serialize = "signer", deserialize = "signer"))] Signer, @@ -98,7 +98,7 @@ pub enum MoveTypeLayout { #[serde(rename(serialize = "u256", deserialize = "u256"))] U256, #[serde(rename(serialize = "enum", deserialize = "enum"))] - Enum(MoveEnumLayout), + Enum(Box), } impl MoveValue { @@ -192,7 +192,7 @@ impl MoveStruct { type_: type_.clone(), fields: vals .into_iter() - .zip(fields) + .zip(fields.iter()) .map(|(v, l)| (l.name.clone(), v.decorate(&l.layout))) .collect(), } @@ -228,7 +228,7 @@ impl MoveVariant { tag, fields: fields .into_iter() - .zip(v_layout) + .zip(v_layout.iter()) .map(|(v, l)| (l.name.clone(), v.decorate(&l.layout))) .collect(), variant_name: v_name.clone(), @@ -246,7 +246,7 @@ impl MoveVariant { impl MoveStructLayout { pub fn new(types: Vec) -> Self { - Self(types) + Self(Box::new(types)) } pub fn fields(&self) -> &[MoveTypeLayout] { @@ -254,7 +254,7 @@ impl MoveStructLayout { } pub fn into_fields(self) -> Vec { - self.0 + *self.0 } } diff --git a/external-crates/move/crates/move-core-types/src/unit_tests/value_test.rs b/external-crates/move/crates/move-core-types/src/unit_tests/value_test.rs index ffa09a0b063e2..ccaaa35e1ad56 100644 --- a/external-crates/move/crates/move-core-types/src/unit_tests/value_test.rs +++ b/external-crates/move/crates/move-core-types/src/unit_tests/value_test.rs @@ -11,6 +11,12 @@ use crate::{ }; use serde_json::json; +#[test] +fn check_layout_size() { + assert_eq!(std::mem::size_of::(), 16); + assert_eq!(std::mem::size_of::(), 16); +} + #[test] fn struct_deserialization() { let struct_type = StructTag { @@ -37,17 +43,21 @@ fn struct_deserialization() { let struct_type_layout = A::MoveStructLayout { type_: struct_type.clone(), - fields: vec![ - A::MoveFieldLayout::new(ident_str!("f").to_owned(), A::MoveTypeLayout::U64), - A::MoveFieldLayout::new(ident_str!("g").to_owned(), A::MoveTypeLayout::Bool), - ] - .into_iter() - .collect(), + fields: Box::new( + vec![ + A::MoveFieldLayout::new(ident_str!("f").to_owned(), A::MoveTypeLayout::U64), + A::MoveFieldLayout::new(ident_str!("g").to_owned(), A::MoveTypeLayout::Bool), + ] + .into_iter() + .collect(), + ), }; - let deser_typed_value = - A::MoveValue::simple_deserialize(&ser, &A::MoveTypeLayout::Struct(struct_type_layout)) - .unwrap(); + let deser_typed_value = A::MoveValue::simple_deserialize( + &ser, + &A::MoveTypeLayout::Struct(Box::new(struct_type_layout)), + ) + .unwrap(); let typed_value = A::MoveStruct::new(struct_type, field_values); assert_eq!( @@ -96,8 +106,8 @@ fn enum_deserialization() { R::MoveTypeLayout::Bool, R::MoveTypeLayout::U8, ]; - let enum_layout = R::MoveEnumLayout(vec![variant_layout1, variant_layout2]); - R::MoveTypeLayout::Enum(enum_layout) + let enum_layout = R::MoveEnumLayout(Box::new(vec![variant_layout1, variant_layout2])); + R::MoveTypeLayout::Enum(Box::new(enum_layout)) }; // test each deserialization scheme @@ -156,9 +166,11 @@ fn enum_deserialization() { json!([1, [8, false, 0]]) ); - let deser_typed_value = - A::MoveValue::simple_deserialize(&ser, &A::MoveTypeLayout::Enum(enum_type_layout.clone())) - .unwrap(); + let deser_typed_value = A::MoveValue::simple_deserialize( + &ser, + &A::MoveTypeLayout::Enum(Box::new(enum_type_layout.clone())), + ) + .unwrap(); let typed_value = A::MoveVariant { type_: enum_type.clone(), variant_name: ident_str!("Variant1").to_owned(), @@ -182,9 +194,11 @@ fn enum_deserialization() { let ser1 = R::MoveValue::Variant(runtime_value.clone()) .simple_serialize() .unwrap(); - let deser1_typed_value = - A::MoveValue::simple_deserialize(&ser1, &A::MoveTypeLayout::Enum(enum_type_layout)) - .unwrap(); + let deser1_typed_value = A::MoveValue::simple_deserialize( + &ser1, + &A::MoveTypeLayout::Enum(Box::new(enum_type_layout)), + ) + .unwrap(); let typed_value = A::MoveVariant { type_: enum_type, variant_name: ident_str!("Variant2").to_owned(), @@ -220,10 +234,10 @@ fn enum_deserialization_vec_option_runtime_layout_equiv() { let vec_ser = vec_option.simple_serialize().unwrap(); let enum_ser = enum_option.simple_serialize().unwrap(); - let enum_layout = R::MoveTypeLayout::Enum(R::MoveEnumLayout(vec![ + let enum_layout = R::MoveTypeLayout::Enum(Box::new(R::MoveEnumLayout(Box::new(vec![ vec![], vec![R::MoveTypeLayout::U64], - ])); + ])))); let vec_layout = R::MoveTypeLayout::Vector(Box::new(R::MoveTypeLayout::U64)); diff --git a/external-crates/move/crates/move-core-types/src/unit_tests/visitor_test.rs b/external-crates/move/crates/move-core-types/src/unit_tests/visitor_test.rs index 3edbb4e4c20ae..2daee77142270 100644 --- a/external-crates/move/crates/move-core-types/src/unit_tests/visitor_test.rs +++ b/external-crates/move/crates/move-core-types/src/unit_tests/visitor_test.rs @@ -689,7 +689,10 @@ fn struct_layout_(rep: &str, fields: Vec<(&str, MoveTypeLayout)>) -> MoveTypeLay .map(|(name, layout)| MoveFieldLayout::new(Identifier::new(name).unwrap(), layout)) .collect(); - MoveTypeLayout::Struct(MoveStructLayout { type_, fields }) + MoveTypeLayout::Struct(Box::new(MoveStructLayout { + type_, + fields: Box::new(fields), + })) } /// Create a variant value for test purposes. @@ -723,7 +726,7 @@ fn enum_layout_(rep: &str, variants: Vec<(&str, Vec<(&str, MoveTypeLayout)>)>) - }) .collect(); - MoveTypeLayout::Enum(MoveEnumLayout { type_, variants }) + MoveTypeLayout::Enum(Box::new(MoveEnumLayout { type_, variants })) } /// BCS encode Move value. diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.exp index aa7fce81d80be..b19062a78956e 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.exp @@ -3,7 +3,7 @@ processed 2 tasks task 0, lines 1-9: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.mvir index 5f699b22e8316..2a593eff0cb0c 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/declarations/let.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // `let` generates a local with the given type. entry foo() { let x: u64; @@ -9,7 +9,7 @@ label b0: } //# print-bytecode -module 0x1.m { +module 0x6.m { // Two `let` cannot use the same name for a local. entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.exp index c69235c0fde3c..842feca66f97c 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.exp @@ -3,7 +3,7 @@ processed 9 tasks task 0, lines 1-13: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -26,7 +26,7 @@ B0: task 1, lines 15-28: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.mvir index d73a45b61f9ca..275e6c0fbf9f6 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // Borrowing a local produces a reference type. entry foo() { let x: u64; @@ -13,7 +13,7 @@ label b0: } //# print-bytecode -module 0x2.m { +module 0x7.m { // Borrowing a local reference produces a reference type. entry foo() { let x: u64; @@ -112,7 +112,7 @@ module 0x3d4.N { } //# print-bytecode -module 0x1.m { +module 0x8.m { import 0x3d4.M; entry foo(account: signer) { let t: M.T; @@ -142,7 +142,7 @@ module 0x3d4.O { } //# print-bytecode -module 0x1.m { +module 0x9.m { entry foo(account: signer) { let t: M.T; let r: &bool; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.exp index d0f315319959a..deab19316666a 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-12: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.mvir index ee34b463c58fa..d72757a32f2a6 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/borrow_mut.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { let x: u64; let ref_x: &mut u64; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.exp index 1ac319a8abb53..2a629059b8bbc 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.exp @@ -3,7 +3,7 @@ processed 1 task task 0, lines 1-33: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.mvir index 607bb21e07ca4..32ba979b4e815 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/builtins/vector.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { let v: vector; let v_imm: &vector; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.exp index a005304454356..3f0f88fddfba0 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.exp @@ -3,7 +3,7 @@ processed 1 task task 0, lines 1-13: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.mvir index 103f0b2bd9df8..8e825e5a2965e 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/combined.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { let x: u64; let y: u64; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/unpack.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/unpack.mvir index e4521b5c70977..85709fb5206eb 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/unpack.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/expressions/unpack.mvir @@ -30,7 +30,7 @@ module 0x2d12.M { //# print-bytecode // An unpack expression cannot reference a struct type defined in a separate module. -module 0x2d12.N { +module 0x2d13.N { import 0x2d12.M; f() { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.exp index a86934cc2fc84..1665f09a29f74 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.exp @@ -3,7 +3,7 @@ processed 1 task task 0, lines 1-10: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.mvir index b68fffd474d5d..a5b06fa0c2e94 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assert.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assign.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assign.mvir index 01d67d43a7ef6..46c1aa23ba6ed 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assign.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/assign.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // Un-declared locals cannot be assigned to. entry foo() { label b0: diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.exp index 93db6005b0807..55f5bd61bc21b 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-8: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -18,7 +18,7 @@ B0: task 1, lines 10-19: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.mvir index 098073027c713..d6111b7079d78 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // An endless loop is valid Move IR. entry foo() { label loop: @@ -8,7 +8,7 @@ label loop: } //# print-bytecode -module 0x2.m { +module 0x7.m { // Jumping to a label that is declared after the jump statement itself is valid. entry foo() { label foo: @@ -20,7 +20,7 @@ label bar: //# print-bytecode // Jumping to a label that isn't declared in the same function is invalid. -module 0x1.M { +module 0x6.M { f() { label foo: jump bar; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.exp index fc7789076eb48..452a823987063 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.exp @@ -3,7 +3,7 @@ processed 4 tasks task 0, lines 1-24: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -34,7 +34,7 @@ B4: task 1, lines 26-45: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { @@ -62,7 +62,7 @@ B3: task 2, lines 47-65: //# print-bytecode // Move bytecode v6 -module 3.m { +module 8.m { @@ -99,7 +99,7 @@ B5: task 3, lines 67-82: //# print-bytecode // Move bytecode v6 -module 4.m { +module 9.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.mvir index f28d2c2cb741e..2fa7ef1e7d428 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // This is roughly equivalent to the following Move program: // ``` // if (42 > 0) { x = 1; } else { x = 2; } @@ -24,7 +24,7 @@ label b3: } //# print-bytecode -module 0x2.m { +module 0x7.m { // This is roughly equivalent to the following Move program: // ``` // if (42 > 0) { x = 1; } else { x = 2; } @@ -45,7 +45,7 @@ label b2: } //# print-bytecode -module 0x3.m { +module 0x8.m { // `jump_if` statements can be sequenced, akin to "nested" `if` statements in Move. entry foo() { let x: u64; @@ -65,7 +65,7 @@ label end: } //# print-bytecode -module 0x4.m { +module 0x9.m { // `return` statements can appear anywhere, including within a jump destination's block. entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.exp index 427c1c557b28b..5272473a59c66 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.exp @@ -3,7 +3,7 @@ processed 4 tasks task 0, lines 1-20: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -33,7 +33,7 @@ B3: task 1, lines 22-36: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { @@ -59,7 +59,7 @@ B4: task 2, lines 38-48: //# print-bytecode // Move bytecode v6 -module 3.m { +module 8.m { @@ -79,7 +79,7 @@ B2: task 3, lines 50-63: //# print-bytecode // Move bytecode v6 -module 4.m { +module 9.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.mvir index 0df1612cc7fde..a112b54aa0099 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/bytecode-generation/statements/jump_if_false.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // This is roughly equivalent to the following Move program: // ``` // let x: u64 = 0; @@ -20,7 +20,7 @@ label b2: } //# print-bytecode -module 0x2.m { +module 0x7.m { // `return` statements can appear anywhere, even within a loop. entry foo() { let x: u64; @@ -36,7 +36,7 @@ label b2: } //# print-bytecode -module 0x3.m { +module 0x8.m { // This is roughly equivalent to the following Move code: `while (true) { break; }`. entry foo() { label b0: @@ -48,7 +48,7 @@ label b2: } //# print-bytecode -module 0x4.m { +module 0x9.m { // This is roughly equivalent to the following Move code: `loop { loop { break; } break; }`. entry foo() { label b0: diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.exp index 659464597357b..c8fda79caca85 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.exp @@ -3,7 +3,7 @@ processed 6 tasks task 0, lines 1-8: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -18,7 +18,7 @@ B0: task 1, lines 10-18: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { @@ -33,7 +33,7 @@ B0: task 2, lines 20-26: //# print-bytecode // Move bytecode v6 -module 3.m { +module 8.m { @@ -48,7 +48,7 @@ B0: task 3, lines 28-35: //# print-bytecode // Move bytecode v6 -module 4.m { +module 9.m { @@ -67,7 +67,7 @@ Error: ParserError: Invalid Token: invalid token kind for statement Slash task 5, lines 48-58: //# print-bytecode // Move bytecode v6 -module 6.m { +module b.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.mvir index 91441a9864964..f2e5a139ad1ea 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/comments.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { label b0: // return; @@ -8,7 +8,7 @@ label b0: } //# print-bytecode -module 0x2.m { +module 0x7.m { entry foo() { label b0: // return; @@ -18,7 +18,7 @@ label b0: } //# print-bytecode -module 0x3.m { +module 0x8.m { entry foo() { label b0: return; // return; @@ -26,7 +26,7 @@ label b0: } //# print-bytecode -module 0x4.m { +module 0x9.m { entry foo() { label b0: // return; @@ -35,7 +35,7 @@ label b0: } //# print-bytecode -module 0x5.m { +module 0xa.m { // In Move, /* */ are block comment delimiters. Not so in Move IR, so the `/*` below // cannot be parsed. entry foo() { @@ -46,7 +46,7 @@ label b0: } //# print-bytecode -module 0x6.m { +module 0xb.m { // Since /* */ are not block comment delimiters, they do not behave in any unique way when // they appear within comments. entry foo() { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.exp b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.exp index 659464597357b..c8fda79caca85 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.exp +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.exp @@ -3,7 +3,7 @@ processed 6 tasks task 0, lines 1-8: //# print-bytecode // Move bytecode v6 -module 1.m { +module 6.m { @@ -18,7 +18,7 @@ B0: task 1, lines 10-18: //# print-bytecode // Move bytecode v6 -module 2.m { +module 7.m { @@ -33,7 +33,7 @@ B0: task 2, lines 20-26: //# print-bytecode // Move bytecode v6 -module 3.m { +module 8.m { @@ -48,7 +48,7 @@ B0: task 3, lines 28-35: //# print-bytecode // Move bytecode v6 -module 4.m { +module 9.m { @@ -67,7 +67,7 @@ Error: ParserError: Invalid Token: invalid token kind for statement Slash task 5, lines 48-58: //# print-bytecode // Move bytecode v6 -module 6.m { +module b.m { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.mvir index 91441a9864964..f2e5a139ad1ea 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/crlf.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { entry foo() { label b0: // return; @@ -8,7 +8,7 @@ label b0: } //# print-bytecode -module 0x2.m { +module 0x7.m { entry foo() { label b0: // return; @@ -18,7 +18,7 @@ label b0: } //# print-bytecode -module 0x3.m { +module 0x8.m { entry foo() { label b0: return; // return; @@ -26,7 +26,7 @@ label b0: } //# print-bytecode -module 0x4.m { +module 0x9.m { entry foo() { label b0: // return; @@ -35,7 +35,7 @@ label b0: } //# print-bytecode -module 0x5.m { +module 0xa.m { // In Move, /* */ are block comment delimiters. Not so in Move IR, so the `/*` below // cannot be parsed. entry foo() { @@ -46,7 +46,7 @@ label b0: } //# print-bytecode -module 0x6.m { +module 0xb.m { // Since /* */ are not block comment delimiters, they do not behave in any unique way when // they appear within comments. entry foo() { diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/keywords.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/keywords.mvir index b64145bb9c789..4ecfc4cca200b 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/keywords.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/keywords.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // `label` is a keyword so it cannot be used as a parameter name. entry foo(label: address) { label b0: diff --git a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/types.mvir b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/types.mvir index 068ba2dbbf669..2a405f6ebeac6 100644 --- a/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/types.mvir +++ b/external-crates/move/crates/move-ir-compiler-transactional-tests/tests/parsing/types.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.m { +module 0x6.m { // Struct field specifiers in immutable borrows must not be qualified. entry foo() { label b0: @@ -8,7 +8,7 @@ label b0: } //# print-bytecode -module 0x2.m { +module 0x7.m { // Struct field specifiers in mutable borrows must not be qualified. entry foo() { label b0: diff --git a/external-crates/move/crates/move-package/src/resolution/dependency_cache.rs b/external-crates/move/crates/move-package/src/resolution/dependency_cache.rs index 6752f3f2fe39d..4cd38aa334b9e 100644 --- a/external-crates/move/crates/move-package/src/resolution/dependency_cache.rs +++ b/external-crates/move/crates/move-package/src/resolution/dependency_cache.rs @@ -66,6 +66,12 @@ impl DependencyCache { if !self.fetched_deps.insert(repository_path.clone()) { return Ok(()); } + + if Command::new("git").arg("--version").output().is_err() { + writeln!(progress_output, "Git is not installed or not in the PATH.")?; + return Err(anyhow::anyhow!("Git is not installed or not in the PATH.")); + } + let git_path = repository_path; let os_git_url = OsStr::new(git_url.as_str()); let os_git_rev = OsStr::new(git_rev.as_str()); @@ -78,15 +84,25 @@ impl DependencyCache { git_url, )?; // If the cached folder does not exist, download and clone accordingly - Command::new("git") + if let Ok(mut output) = Command::new("git") .args([OsStr::new("clone"), os_git_url, git_path.as_os_str()]) - .output() - .map_err(|_| { + .spawn() + { + output.wait().map_err(|_| { anyhow::anyhow!( "Failed to clone Git repository for package '{}'", dep_name ) })?; + if output.stdout.is_some() { + writeln!(progress_output, "{:?}", output)?; + } + } else { + return Err(anyhow::anyhow!( + "Failed to clone Git repository for package '{}'", + dep_name + )); + } Command::new("git") .args([ @@ -158,31 +174,31 @@ impl DependencyCache { // // NOTE: this means that you must run the package system with a working network // connection. - let status = Command::new("git") + + if let Ok(mut output) = Command::new("git") .args([ OsStr::new("-C"), git_path.as_os_str(), OsStr::new("fetch"), OsStr::new("origin"), ]) - .stdin(Stdio::null()) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .map_err(|_| { + .spawn() + { + output.wait().map_err(|_| { anyhow::anyhow!( "Failed to fetch latest Git state for package '{}', to skip set \ --skip-fetch-latest-git-deps", dep_name ) })?; - - if !status.success() { + if output.stdout.is_some() { + writeln!(progress_output, "{:?}", output)?; + } + } else { return Err(anyhow::anyhow!( - "Failed to fetch to latest Git state for package '{}', to skip set \ - --skip-fetch-latest-git-deps | Exit status: {}", - dep_name, - status + "Failed to fetch latest Git state for package '{}', to skip set \ + --skip-fetch-latest-git-deps", + dep_name )); } diff --git a/external-crates/move/crates/move-package/src/resolution/resolution_graph.rs b/external-crates/move/crates/move-package/src/resolution/resolution_graph.rs index 603d4c07f7493..f8979af95857a 100644 --- a/external-crates/move/crates/move-package/src/resolution/resolution_graph.rs +++ b/external-crates/move/crates/move-package/src/resolution/resolution_graph.rs @@ -557,6 +557,16 @@ impl Package { .collect()) } + pub fn get_bytecodes_bytes(&self) -> Result>> { + let mut ret = vec![]; + for path in self.get_bytecodes()? { + let bytes = std::fs::read(path.to_string())?; + ret.push(bytes); + } + + Ok(ret) + } + pub(crate) fn compiler_config( &self, is_dependency: bool, @@ -600,6 +610,9 @@ fn source_paths_for_config(package_path: &Path, config: &BuildConfig) -> Vec Result { diff --git a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps/sources/A.move b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps/sources/A.move index 925cbe1fef94d..93f3094bd55ec 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps/sources/A.move +++ b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps/sources/A.move @@ -1,3 +1,3 @@ -module 0x1::M { +module 0x6::M { public fun foo() { } } diff --git a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/A.move b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/A.move index 925cbe1fef94d..93f3094bd55ec 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/A.move +++ b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/A.move @@ -1,3 +1,3 @@ -module 0x1::M { +module 0x6::M { public fun foo() { } } diff --git a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/ATest.move b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/ATest.move index 63f212d0efd03..70c725c733e28 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/ATest.move +++ b/external-crates/move/crates/move-package/tests/test_sources/basic_no_deps_test_mode/sources/ATest.move @@ -1,4 +1,4 @@ #[test_only] -module 0x1::MTest { +module 0x6::MTest { public fun foo() { } } diff --git a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/Set.move b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/Set.move index be1b434459893..9c2e23466fbd3 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/Set.move +++ b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/Set.move @@ -1,3 +1,3 @@ -module 0x1::Set { +module 0x6::Set { public fun foo() { } } diff --git a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/a.move b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/a.move index da5a1b5ec5c5b..f4ebb20a82c40 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/a.move +++ b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/a.move @@ -1 +1 @@ -module 0x1::A { } +module 0x6::A { } diff --git a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/foo.move b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/foo.move index e8c3657afccd4..2124b3a3262d9 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/foo.move +++ b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/foo.move @@ -1,3 +1,3 @@ -module 0x1::set { +module 0x6::set { public fun foo() { } } diff --git a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/otherModule.move b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/otherModule.move index 1e6c517d0c71b..57951cd4257f7 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/otherModule.move +++ b/external-crates/move/crates/move-package/tests/test_sources/case_insensitive_check/sources/otherModule.move @@ -1,2 +1,2 @@ -module 0x2::seT { +module 0x7::seT { } diff --git a/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_backflow_resolution/deps_only/B/sources/AA.move b/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_backflow_resolution/deps_only/B/sources/AA.move index 411a75ce7d3f9..bd117ac28656e 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_backflow_resolution/deps_only/B/sources/AA.move +++ b/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_backflow_resolution/deps_only/B/sources/AA.move @@ -1,3 +1,3 @@ -module 0x1::AAA { +module 0x6::AAA { public fun foo() {} } diff --git a/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_no_conflict/deps_only/B/sources/AA.move b/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_no_conflict/deps_only/B/sources/AA.move index 411a75ce7d3f9..bd117ac28656e 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_no_conflict/deps_only/B/sources/AA.move +++ b/external-crates/move/crates/move-package/tests/test_sources/diamond_problem_no_conflict/deps_only/B/sources/AA.move @@ -1,3 +1,3 @@ -module 0x1::AAA { +module 0x6::AAA { public fun foo() {} } diff --git a/external-crates/move/crates/move-package/tests/test_sources/resolve_pkg_name/deps_only/B-rename/sources/AA.move b/external-crates/move/crates/move-package/tests/test_sources/resolve_pkg_name/deps_only/B-rename/sources/AA.move index 411a75ce7d3f9..bd117ac28656e 100644 --- a/external-crates/move/crates/move-package/tests/test_sources/resolve_pkg_name/deps_only/B-rename/sources/AA.move +++ b/external-crates/move/crates/move-package/tests/test_sources/resolve_pkg_name/deps_only/B-rename/sources/AA.move @@ -1,3 +1,3 @@ -module 0x1::AAA { +module 0x6::AAA { public fun foo() {} } diff --git a/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package1/sources/Dummy.move b/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package1/sources/Dummy.move index a62a377b51b4f..d920a96fb016a 100644 --- a/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package1/sources/Dummy.move +++ b/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package1/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy { } +module 0x6::Dummy { } diff --git a/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package2/sources/Dummy.move b/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package2/sources/Dummy.move index a62a377b51b4f..d920a96fb016a 100644 --- a/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package2/sources/Dummy.move +++ b/external-crates/move/crates/move-package/tests/thread_safety_package_test_sources/Package2/sources/Dummy.move @@ -1 +1 @@ -module 0x1::Dummy { } +module 0x6::Dummy { } diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/borrow/function_call.move b/external-crates/move/crates/move-stackless-bytecode/tests/borrow/function_call.move index b2f766c47e846..ed1d73a0aa363 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/borrow/function_call.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/borrow/function_call.move @@ -5,7 +5,7 @@ // dep: ../move-stdlib/sources/string.move // dep: ../move-stdlib/sources/vector.move -module 0x2::MultiLayerCalling { +module 0x7::MultiLayerCalling { public struct HasVector { v: vector, diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/borrow/hyper_edge.move b/external-crates/move/crates/move-stackless-bytecode/tests/borrow/hyper_edge.move index a0f97ccdc06d9..74b62f008293d 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/borrow/hyper_edge.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/borrow/hyper_edge.move @@ -5,7 +5,7 @@ // dep: ../move-stdlib/sources/string.move // dep: ../move-stdlib/sources/vector.move -module 0x2::Collection { +module 0x7::Collection { public struct Collection has drop { items: vector, owner: address, @@ -23,8 +23,8 @@ module 0x2::Collection { } } -module 0x2::Test { - use 0x2::Collection; +module 0x8::Test { + use 0x7::Collection; public struct Token has drop { value: u64 } diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_internal_refs.move b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_internal_refs.move index 15df8e2cbee95..69664580c8908 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_internal_refs.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_internal_refs.move @@ -1,4 +1,4 @@ -module 0x1::LeakInternalRefs { +module 0x6::LeakInternalRefs { public struct S { f: u64, g: u64 } diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.exp b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.exp index fa7d8f9228b5d..a12c7aae4d3d1 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.exp +++ b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.exp @@ -831,30 +831,6 @@ public fun vector::swap_remove<#0>($t0|v: &mut vector<#0>, $t1|i: u64): #0 { } -[variant baseline] -fun ReturnRefsIntoVec::return_vec_index_immut($t0|v: &vector): &u64 { - var $t1: &vector - var $t2: u64 - var $t3: &u64 - 0: $t1 := move($t0) - 1: $t2 := 0 - 2: $t3 := vector::borrow($t1, $t2) - 3: return $t3 -} - - -[variant baseline] -fun ReturnRefsIntoVec::return_vec_index_mut($t0|v: &mut vector): &mut u64 { - var $t1: &mut vector - var $t2: u64 - var $t3: &mut u64 - 0: $t1 := move($t0) - 1: $t2 := 0 - 2: $t3 := vector::borrow_mut($t1, $t2) - 3: return $t3 -} - - [variant baseline] public fun option::borrow<#0>($t0|t: &option::Option<#0>): � { var $t1: &option::Option<#0> @@ -2672,6 +2648,30 @@ public fun string::utf8($t0|bytes: vector): string::String { 11: return $t5 } + +[variant baseline] +fun ReturnRefsIntoVec::return_vec_index_immut($t0|v: &vector): &u64 { + var $t1: &vector + var $t2: u64 + var $t3: &u64 + 0: $t1 := move($t0) + 1: $t2 := 0 + 2: $t3 := vector::borrow($t1, $t2) + 3: return $t3 +} + + +[variant baseline] +fun ReturnRefsIntoVec::return_vec_index_mut($t0|v: &mut vector): &mut u64 { + var $t1: &mut vector + var $t2: u64 + var $t3: &mut u64 + 0: $t1 := move($t0) + 1: $t2 := 0 + 2: $t3 := vector::borrow_mut($t1, $t2) + 3: return $t3 +} + ============ after pipeline `escape_analysis` ================ [variant baseline] @@ -3505,30 +3505,6 @@ public fun vector::swap_remove<#0>($t0|v: &mut vector<#0>, $t1|i: u64): #0 { } -[variant baseline] -fun ReturnRefsIntoVec::return_vec_index_immut($t0|v: &vector): &u64 { - var $t1: &vector - var $t2: u64 - var $t3: &u64 - 0: $t1 := move($t0) - 1: $t2 := 0 - 2: $t3 := vector::borrow($t1, $t2) - 3: return $t3 -} - - -[variant baseline] -fun ReturnRefsIntoVec::return_vec_index_mut($t0|v: &mut vector): &mut u64 { - var $t1: &mut vector - var $t2: u64 - var $t3: &mut u64 - 0: $t1 := move($t0) - 1: $t2 := 0 - 2: $t3 := vector::borrow_mut($t1, $t2) - 3: return $t3 -} - - [variant baseline] public fun option::borrow<#0>($t0|t: &option::Option<#0>): � { var $t1: &option::Option<#0> @@ -5345,3 +5321,27 @@ public fun string::utf8($t0|bytes: vector): string::String { 10: $t5 := pack string::String($t4) 11: return $t5 } + + +[variant baseline] +fun ReturnRefsIntoVec::return_vec_index_immut($t0|v: &vector): &u64 { + var $t1: &vector + var $t2: u64 + var $t3: &u64 + 0: $t1 := move($t0) + 1: $t2 := 0 + 2: $t3 := vector::borrow($t1, $t2) + 3: return $t3 +} + + +[variant baseline] +fun ReturnRefsIntoVec::return_vec_index_mut($t0|v: &mut vector): &mut u64 { + var $t1: &mut vector + var $t2: u64 + var $t3: &mut u64 + 0: $t1 := move($t0) + 1: $t2 := 0 + 2: $t3 := vector::borrow_mut($t1, $t2) + 3: return $t3 +} diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.move b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.move index 7c6ac51c5377a..353de1931b777 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_into_vec.move @@ -5,7 +5,7 @@ // dep: ../move-stdlib/sources/string.move // dep: ../move-stdlib/sources/vector.move -module 0x1::ReturnRefsIntoVec { +module 0x6::ReturnRefsIntoVec { // should not complain fun return_vec_index_immut(v: &vector): &u64 { diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_safe.move b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_safe.move index 0398e90191b02..00c5c69b3cc22 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_safe.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/return_refs_safe.move @@ -1,4 +1,4 @@ -module 0x1::ReturnRefsSafe { +module 0x6::ReturnRefsSafe { // Make sure the analysis doesn't complain about returning // refs to formals or their children diff --git a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/struct_eq.move b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/struct_eq.move index 216c7b3074f1a..49d4ed5fd4e97 100644 --- a/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/struct_eq.move +++ b/external-crates/move/crates/move-stackless-bytecode/tests/escape_analysis/struct_eq.move @@ -1,4 +1,4 @@ -module 0x1::StructEq { +module 0x6::StructEq { public struct S { f: u64 } diff --git a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/example.move b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/example.move index 814a83e5261e8..90eb4b882ea53 100644 --- a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/example.move +++ b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/example.move @@ -2,7 +2,7 @@ //# run -module 0x1::m { +module 0x5::m { fun main() {} } @@ -30,7 +30,7 @@ module A::N { //# run --args 0 -module 0x2::m { +module 0x6::m { entry fun main(v: u64) { helper(v) } @@ -41,7 +41,7 @@ module 0x2::m { //# run --args 42 --syntax=mvir -module 0x3.m { +module 0x7.m { import 0x42.N; entry foo(v: u64) { label b0: diff --git a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.exp b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.exp index 7cdb1a383ce4f..00e1910f1c0b4 100644 --- a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.exp +++ b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.exp @@ -1,5 +1,5 @@ processed 4 tasks task 3, line 21: -//# run 42::M::test -return values: { 42 } +//# run 43::M::test +return values: { 43 } diff --git a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.move b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.move index aef3acd8611c9..a2f3b4edabb49 100644 --- a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.move +++ b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/named_addresses_in_commands.move @@ -1,9 +1,9 @@ -//# init --addresses A=42 +//# init --addresses A=43 //# run --args @A module 0x42::m { fun main(a: &address) { - assert!(*a == @42, 1000); + assert!(*a == @43, 1000); } } @@ -14,8 +14,8 @@ module A::M { } public fun test(): Foo { - Foo { x: 42 } + Foo { x: 43 } } } -//# run 42::M::test +//# run 43::M::test diff --git a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.exp b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.exp index ffe8b4bf2392a..b8f2549843d85 100644 --- a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.exp +++ b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.exp @@ -18,7 +18,7 @@ B0: task 1, lines 9-13: //# print-bytecode // Move bytecode v6 -module 42.M { +module 43.M { diff --git a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.move b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.move index 054f68f813c24..ef1952472bc03 100644 --- a/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.move +++ b/external-crates/move/crates/move-transactional-test-runner/tests/vm_test_harness/print_bytecode.move @@ -7,7 +7,7 @@ label b0: } //# print-bytecode -module 0x42::M { +module 0x43::M { entry fun foo() { } } diff --git a/external-crates/move/crates/move-unit-test/src/lib.rs b/external-crates/move/crates/move-unit-test/src/lib.rs index 9043af54dd6d9..4b40c47cbb208 100644 --- a/external-crates/move/crates/move-unit-test/src/lib.rs +++ b/external-crates/move/crates/move-unit-test/src/lib.rs @@ -10,6 +10,7 @@ pub mod test_runner; use crate::test_runner::TestRunner; use anyhow::{bail, Result}; use clap::*; +use move_binary_format::CompiledModule; use move_command_line_common::files::verify_and_create_named_address_mapping; use move_compiler::{ self, @@ -67,6 +68,15 @@ pub struct UnitTestingConfig { )] pub dep_files: Vec, + /// Bytecode dependency files + #[clap( + name = "bytecode-depencencies", + long = "bytecode-dependencies", + num_args(1..), + action = clap::ArgAction::Append, + )] + pub bytecode_deps_files: Vec, + /// Report test statistics at the end of testing. CSV report generated if 'csv' passed #[clap(name = "report-statistics", short = 's', long = "statistics")] pub report_statistics: Option>, @@ -135,6 +145,7 @@ impl UnitTestingConfig { report_stacktrace_on_abort: false, source_files: vec![], dep_files: vec![], + bytecode_deps_files: vec![], verbose: false, list: false, named_address_values: vec![], @@ -157,6 +168,7 @@ impl UnitTestingConfig { &self, source_files: Vec, deps: Vec, + bytecode_deps_files: Vec, ) -> Option { let addresses = verify_and_create_named_address_mapping(self.named_address_values.clone()).ok()?; @@ -179,16 +191,30 @@ impl UnitTestingConfig { diagnostics::unwrap_or_report_pass_diagnostics(&files, compilation_result); diagnostics::report_warnings(&files, warnings); let units: Vec<_> = units.into_iter().map(|unit| unit.named_module).collect(); - test_plan.map(|tests| TestPlan::new(tests, mapped_files, units)) + + let bytecode_deps_modules = bytecode_deps_files + .iter() + .map(|path| { + let bytes = std::fs::read(path).unwrap(); + CompiledModule::deserialize_with_defaults(&bytes).unwrap() + }) + .collect::>(); + + test_plan.map(|tests| TestPlan::new(tests, mapped_files, units, bytecode_deps_modules)) } /// Build a test plan from a unit test config pub fn build_test_plan(&self) -> Option { let deps = self.dep_files.clone(); - let TestPlan { module_info, .. } = self.compile_to_test_plan(deps.clone(), vec![])?; + let TestPlan { module_info, .. } = + self.compile_to_test_plan(deps.clone(), vec![], vec![])?; - let mut test_plan = self.compile_to_test_plan(self.source_files.clone(), deps)?; + let mut test_plan = self.compile_to_test_plan( + self.source_files.clone(), + deps, + self.bytecode_deps_files.clone(), + )?; test_plan.module_info.extend(module_info); Some(test_plan) } diff --git a/external-crates/move/crates/move-unit-test/src/test_runner.rs b/external-crates/move/crates/move-unit-test/src/test_runner.rs index 9cc5b87c9ca0f..dd386ac52bf2f 100644 --- a/external-crates/move/crates/move-unit-test/src/test_runner.rs +++ b/external-crates/move/crates/move-unit-test/src/test_runner.rs @@ -62,13 +62,11 @@ pub struct TestRunner { /// Setup storage state with the set of modules that will be needed for all tests fn setup_test_storage<'a>( modules: impl Iterator, + bytecode_deps_modules: impl Iterator, ) -> Result { let mut storage = InMemoryStorage::new(); - let modules = Modules::new(modules); - for module in modules - .compute_dependency_graph() - .compute_topological_order()? - { + let modules = Modules::new(modules.chain(bytecode_deps_modules)); + for module in modules.compute_topological_order()? { let module_id = module.self_id(); let mut module_bytes = Vec::new(); module.serialize_with_version(module.version, &mut module_bytes)?; @@ -121,7 +119,8 @@ impl TestRunner { cost_table: Option, ) -> Result { let modules = tests.module_info.values().map(|info| &info.module); - let starting_storage_state = setup_test_storage(modules)?; + let starting_storage_state = + setup_test_storage(modules, tests.bytecode_deps_modules.iter())?; let native_function_table = native_function_table.unwrap_or_else(|| { move_stdlib_natives::all_natives( AccountAddress::from_hex_literal("0x1").unwrap(), diff --git a/external-crates/move/crates/move-unit-test/tests/sources/A.move b/external-crates/move/crates/move-unit-test/tests/sources/A.move index 75d32f13466f9..98b029b144784 100644 --- a/external-crates/move/crates/move-unit-test/tests/sources/A.move +++ b/external-crates/move/crates/move-unit-test/tests/sources/A.move @@ -1,4 +1,4 @@ -module 0x1::A { +module 0x6::A { #[test] fun a() { } diff --git a/external-crates/move/crates/move-unit-test/tests/sources/B.move b/external-crates/move/crates/move-unit-test/tests/sources/B.move index 8e37c3b6086a7..2707c3f250044 100644 --- a/external-crates/move/crates/move-unit-test/tests/sources/B.move +++ b/external-crates/move/crates/move-unit-test/tests/sources/B.move @@ -1,6 +1,6 @@ -module 0x1::B { +module 0x6::B { #[test_only] - use 0x1::A; + use 0x6::A; #[test] fun b() { } @@ -11,13 +11,13 @@ module 0x1::B { } #[test] - #[expected_failure(abort_code = 0, location=0x1::A)] + #[expected_failure(abort_code = 0, location=0x6::A)] public fun b_other0() { A::a_call() } #[test] - #[expected_failure(abort_code = 1, location=0x1::A)] + #[expected_failure(abort_code = 1, location=0x6::A)] public fun b_other1() { A::a_call() } diff --git a/external-crates/move/crates/move-unit-test/tests/test_deps.rs b/external-crates/move/crates/move-unit-test/tests/test_deps.rs index 50437e043997a..fece66529799c 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_deps.rs +++ b/external-crates/move/crates/move-unit-test/tests/test_deps.rs @@ -27,7 +27,7 @@ fn test_deps_arent_tested() { let mut iter = test_plan.module_tests.into_iter(); let (mod_id, _) = iter.next().unwrap(); let expected_mod_id = ModuleId::new( - AccountAddress::from_hex_literal("0x1").unwrap(), + AccountAddress::from_hex_literal("0x6").unwrap(), Identifier::new("B").unwrap(), ); assert!(mod_id == expected_mod_id); diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.exp index f259c8e260578..fd3f5da0d1530 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.exp @@ -1,5 +1,5 @@ Running Move unit tests -[ PASS ] 0x1::M::correct_address -[ PASS ] 0x1::M::correct_addresses -[ PASS ] 0x1::M::wrong_address +[ PASS ] 0x6::M::correct_address +[ PASS ] 0x6::M::correct_addresses +[ PASS ] 0x6::M::wrong_address Test result: OK. Total tests: 3; passed: 3; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.move b/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.move index bb56acb2f988e..a8375f7b236a2 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/address_args.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { const ErrorCode: u64 = 100; #[test(a = @0x42)] diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.exp index bdcc00dec3755..e2e58bf110a82 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.exp @@ -1,6 +1,6 @@ Running Move unit tests -[ PASS ] 0x1::M::u64_add_overflow -[ PASS ] 0x1::M::u64_div_by_zero -[ PASS ] 0x1::M::u64_mul_overflow -[ PASS ] 0x1::M::u64_sub_underflow +[ PASS ] 0x6::M::u64_add_overflow +[ PASS ] 0x6::M::u64_div_by_zero +[ PASS ] 0x6::M::u64_mul_overflow +[ PASS ] 0x6::M::u64_sub_underflow Test result: OK. Total tests: 4; passed: 4; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.move b/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.move index d4a9cb199d735..ada0e4b772c77 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/arithmetic_errors.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { #[test] #[expected_failure] fun u64_sub_underflow() { diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.exp index d30bf07e8c331..4a96ee000ca60 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.exp @@ -1,4 +1,4 @@ Running Move unit tests -[ PASS ] 0x1::M::test_local_abort -[ PASS ] 0x1::M::test_local_abort_invalid_code +[ PASS ] 0x6::M::test_local_abort +[ PASS ] 0x6::M::test_local_abort_invalid_code Test result: OK. Total tests: 2; passed: 2; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.move b/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.move index 956b0698ccb7e..464af3ba3f328 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/const_abort.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { const ErrorCode: u64 = 42; const DifferentErrorCode: u64 = 42; diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.exp index 06604b2134193..ae41e61ef9d20 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.exp @@ -1,4 +1,4 @@ Running Move unit tests -[ PASS ] 0x1::M::make_sure_not_other_number -[ PASS ] 0x1::M::make_sure_number_matches +[ PASS ] 0x7::M::make_sure_not_other_number +[ PASS ] 0x7::M::make_sure_number_matches Test result: OK. Total tests: 2; passed: 2; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.move b/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.move index 8876a204089e9..8a8eabb521a5d 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/construct_data.move @@ -1,4 +1,4 @@ -module 0x1::B { +module 0x6::B { #[test_only] public struct TestingStruct has drop { x: u64 } @@ -13,9 +13,9 @@ module 0x1::B { } } -module 0x1::M { +module 0x7::M { #[test_only] - use 0x1::B; + use 0x6::B; #[test] fun make_sure_number_matches() { diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.exp index 6e7e37be0ed86..58e8e8902dbd5 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.exp @@ -1,19 +1,19 @@ Running Move unit tests -[ FAIL ] 0x1::B::failing_test -[ PASS ] 0x1::M::dummy_test +[ PASS ] 0x6::M::dummy_test +[ FAIL ] 0x7::B::failing_test Test failures: -Failures in 0x1::B: +Failures in 0x7::B: ┌── failing_test ────── │ error[E11001]: test failure │ ┌─ cross_module_aborts.move:4:9 │ │ │ 3 │ public fun this_aborts() { -│ │ ----------- In this function in 0x1::M +│ │ ----------- In this function in 0x6::M │ 4 │ abort 0 -│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x1::M rooted here +│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x6::M rooted here │ │ │ stack trace diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.move b/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.move index 2ab099fc48c06..21a395f831944 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/cross_module_aborts.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { #[test_only] public fun this_aborts() { abort 0 @@ -8,10 +8,10 @@ module 0x1::M { fun dummy_test() { } } -module 0x1::B { +module 0x7::B { #[test_only] - use 0x1::M; + use 0x6::M; #[test] fun failing_test() { diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.exp index 3a1e8fabffbea..0d2c2319fad25 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.exp @@ -1,3 +1,3 @@ Running Move unit tests -[ PASS ] 0x1::M::do_nothing +[ PASS ] 0x6::M::do_nothing Test result: OK. Total tests: 1; passed: 1; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.move b/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.move index 15fe4a72923ed..3b6b4c400555a 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/do_nothing.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { #[test] fun do_nothing() {} } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.exp index feefbb17334d8..3b0cb6f4004b8 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.exp @@ -1,10 +1,10 @@ Running Move unit tests -[ FAIL ] 0x1::M::fail -[ FAIL ] 0x1::M::fail_with_code +[ FAIL ] 0x6::M::fail +[ FAIL ] 0x6::M::fail_with_code Test failures: -Failures in 0x1::M: +Failures in 0x6::M: ┌── fail ────── │ Test did not error as expected diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.move b/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.move index fc94d10c56240..c991918298689 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/expected_abort_no_abort.move @@ -1,7 +1,7 @@ -module 0x1::M { +module 0x6::M { #[test, expected_failure] fun fail() { } - #[test, expected_failure(abort_code=0, location=0x1::M)] + #[test, expected_failure(abort_code=0, location=0x6::M)] fun fail_with_code() { } } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.exp index e62e47b3f0368..854c6b058232e 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.exp @@ -1,20 +1,20 @@ Running Move unit tests -[ PASS ] 0x1::A::native_abort_good_right_code -[ FAIL ] 0x1::A::native_abort_good_wrong_code -[ FAIL ] 0x1::A::native_abort_unexpected_abort +[ PASS ] 0x6::A::native_abort_good_right_code +[ FAIL ] 0x6::A::native_abort_good_wrong_code +[ FAIL ] 0x6::A::native_abort_unexpected_abort Test failures: -Failures in 0x1::A: +Failures in 0x6::A: ┌── native_abort_good_wrong_code ────── │ error[E11001]: test failure │ ┌─ native_abort.move:11:9 │ │ │ 10 │ fun native_abort_good_wrong_code() { -│ │ ---------------------------- In this function in 0x1::A +│ │ ---------------------------- In this function in 0x6::A │ 11 │ vector::borrow(&vector::empty(), 1); -│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test did not error as expected. Expected test to give a vector operation error with sub-status 0 originating in the module 0x1::A but instead it gave a vector operation error with sub-status 1 originating in the module 0x1::A rooted here +│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test did not error as expected. Expected test to give a vector operation error with sub-status 0 originating in the module 0x6::A but instead it gave a vector operation error with sub-status 1 originating in the module 0x6::A rooted here │ │ └────────────────── @@ -25,9 +25,9 @@ Failures in 0x1::A: │ ┌─ native_abort.move:5:9 │ │ │ 4 │ fun native_abort_unexpected_abort() { -│ │ ----------------------------- In this function in 0x1::A +│ │ ----------------------------- In this function in 0x6::A │ 5 │ vector::borrow(&vector::empty(), 1); -│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test was not expected to error, but it gave a vector operation error with sub-status 1 originating in the module 0x1::A rooted here +│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test was not expected to error, but it gave a vector operation error with sub-status 1 originating in the module 0x6::A rooted here │ │ └────────────────── diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.move b/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.move index 53203d68f1a84..6f5ebf4b53a10 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/native_abort.move @@ -1,4 +1,4 @@ -module 0x1::A { +module 0x6::A { #[test] fun native_abort_unexpected_abort() { @@ -6,13 +6,13 @@ module 0x1::A { } #[test] - #[expected_failure(vector_error, minor_status=0, location=0x1::A)] + #[expected_failure(vector_error, minor_status=0, location=0x6::A)] fun native_abort_good_wrong_code() { vector::borrow(&vector::empty(), 1); } #[test] - #[expected_failure(vector_error, minor_status=1, location=0x1::A)] + #[expected_failure(vector_error, minor_status=1, location=0x6::A)] fun native_abort_good_right_code() { vector::borrow(&vector::empty(), 1); } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.exp index 09223ba5484c6..e844b1cbe3d67 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.exp @@ -1,13 +1,13 @@ Running Move unit tests -[ FAIL ] 0x1::M::non_existent_native +[ FAIL ] 0x6::M::non_existent_native Test failures: -Failures in 0x1::M: +Failures in 0x6::M: ┌── non_existent_native ────── │ INTERNAL TEST ERROR: INTERNAL VM INVARIANT VIOLATION. -│ Test was not expected to error, but it gave a UNEXPECTED_VERIFIER_ERROR (code 2017) error originating in the module 0x1::M rooted here +│ Test was not expected to error, but it gave a UNEXPECTED_VERIFIER_ERROR (code 2017) error originating in the module 0x6::M rooted here └────────────────── Test result: FAILED. Total tests: 1; passed: 0; failed: 1 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.move b/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.move index b623fa3170c51..ba90e05eedc3a 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/non_exsistent_native.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { native fun foo(); #[test] diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.exp index 74debdf69ea99..405edfc621a54 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.exp @@ -1,5 +1,5 @@ Running Move unit tests -[ PASS ] 0x1::Module::other_module_aborts -[ PASS ] 0x1::Module::tests_a -[ PASS ] 0x1::Module::tests_aborts +[ PASS ] 0x7::Module::other_module_aborts +[ PASS ] 0x7::Module::tests_a +[ PASS ] 0x7::Module::tests_aborts Test result: OK. Total tests: 3; passed: 3; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.move b/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.move index 40d74305afa16..1a4cac0c9d098 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/proposal_test.move @@ -1,12 +1,12 @@ // This is a test based on the example in the unit testing proposal -module 0x1::TestonlyModule { +module 0x6::TestonlyModule { #[test_only] public fun aborts() { abort 42 } } -module 0x1::Module { +module 0x7::Module { fun a(a: u64): bool { a == 10 } @@ -21,7 +21,7 @@ module 0x1::Module { // A test-only module import #[test_only] - use 0x1::TestonlyModule; + use 0x6::TestonlyModule; // A test only struct. This will only be included in test mode. #[test_only, allow(unused_field)] @@ -41,7 +41,7 @@ module 0x1::Module { } #[test] - #[expected_failure(abort_code=42, location=0x1::TestonlyModule)] + #[expected_failure(abort_code=42, location=0x6::TestonlyModule)] fun other_module_aborts() { TestonlyModule::aborts() } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.exp index 6a1375e628989..57b5a4fe5348d 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.exp @@ -1,22 +1,22 @@ Running Move unit tests -[ FAIL ] 0x1::random_test::should_fail_test_div_mod_10 -[ FAIL ] 0x1::random_test::should_fail_test_div_mod_10_2_vec -[ FAIL ] 0x1::random_test::should_fail_test_expected_failure -[ PASS ] 0x1::random_test::should_pass_test_expected_failure_pass -[ TIMEOUT ] 0x1::random_test::should_timeout_test_timeout +[ FAIL ] 0x6::random_test::should_fail_test_div_mod_10 +[ FAIL ] 0x6::random_test::should_fail_test_div_mod_10_2_vec +[ FAIL ] 0x6::random_test::should_fail_test_expected_failure +[ PASS ] 0x6::random_test::should_pass_test_expected_failure_pass +[ TIMEOUT ] 0x6::random_test::should_timeout_test_timeout Test failures: -Failures in 0x1::random_test: +Failures in 0x6::random_test: ┌── should_fail_test_div_mod_10 ────── (seed = 8) │ error[E11001]: test failure │ ┌─ random_test.move:4:11 │ │ │ 3 │ fun should_fail_test_div_mod_10(x: u64) { -│ │ --------------------------- In this function in 0x1::random_test +│ │ --------------------------- In this function in 0x6::random_test │ 4 │ x / (x % 10); -│ │ ^ Test was not expected to error, but it gave an arithmetic error originating in the module 0x1::random_test rooted here +│ │ ^ Test was not expected to error, but it gave an arithmetic error originating in the module 0x6::random_test rooted here │ │ │ This test uses randomly generated inputs. Rerun with `test should_fail_test_div_mod_10 --seed 8` to recreate this test failure. @@ -29,9 +29,9 @@ Failures in 0x1::random_test: │ ┌─ random_test.move:9:33 │ │ │ 8 │ fun should_fail_test_div_mod_10_2_vec(x: vector>) { -│ │ --------------------------------- In this function in 0x1::random_test +│ │ --------------------------------- In this function in 0x6::random_test │ 9 │ std::vector::length(&x) / (std::vector::length(&x) % 10); -│ │ ^ Test was not expected to error, but it gave an arithmetic error originating in the module 0x1::random_test rooted here +│ │ ^ Test was not expected to error, but it gave an arithmetic error originating in the module 0x6::random_test rooted here │ │ │ This test uses randomly generated inputs. Rerun with `test should_fail_test_div_mod_10_2_vec --seed 5` to recreate this test failure. diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.move b/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.move index 39824f023625d..a74f9fde285fc 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/random_test.move @@ -1,4 +1,4 @@ -module 0x1::random_test { +module 0x6::random_test { #[random_test] fun should_fail_test_div_mod_10(x: u64) { x / (x % 10); diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.exp index b4f7f0f84b3f6..a8f2b1104448f 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.exp @@ -1,14 +1,14 @@ Running Move unit tests -[ FAIL ] 0x1::M::multi_signer_fail -[ PASS ] 0x1::M::multi_signer_pass -[ PASS ] 0x1::M::multi_signer_pass_expected_failure -[ FAIL ] 0x1::M::single_signer_fail -[ PASS ] 0x1::M::single_signer_pass -[ PASS ] 0x1::M::test_correct_signer_arg_addrs +[ FAIL ] 0x6::M::multi_signer_fail +[ PASS ] 0x6::M::multi_signer_pass +[ PASS ] 0x6::M::multi_signer_pass_expected_failure +[ FAIL ] 0x6::M::single_signer_fail +[ PASS ] 0x6::M::single_signer_pass +[ PASS ] 0x6::M::test_correct_signer_arg_addrs Test failures: -Failures in 0x1::M: +Failures in 0x6::M: ┌── multi_signer_fail ────── │ Test did not error as expected @@ -20,9 +20,9 @@ Failures in 0x1::M: │ ┌─ signer_args.move:8:9 │ │ │ 7 │ fun single_signer_fail(_a: signer) { -│ │ ------------------ In this function in 0x1::M +│ │ ------------------ In this function in 0x6::M │ 8 │ abort 0 -│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x1::M rooted here +│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x6::M rooted here │ │ └────────────────── diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.move b/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.move index 9da46ca0a99e5..24a82fc7e7447 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/signer_args.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { #[test(_a=@0x1)] fun single_signer_pass(_a: signer) { } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.exp index 05c32d5cdd88e..d9eedbe0eb1b7 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.exp @@ -1,22 +1,22 @@ Running Move unit tests -[ PASS ] 0x1::M::no_timeout -[ FAIL ] 0x1::M::no_timeout_fail -[ PASS ] 0x1::M::no_timeout_while_loop -[ TIMEOUT ] 0x1::M::timeout_fail -[ PASS ] 0x1::M::timeout_fail_with_expected_failure +[ PASS ] 0x6::M::no_timeout +[ FAIL ] 0x6::M::no_timeout_fail +[ PASS ] 0x6::M::no_timeout_while_loop +[ TIMEOUT ] 0x6::M::timeout_fail +[ PASS ] 0x6::M::timeout_fail_with_expected_failure Test failures: -Failures in 0x1::M: +Failures in 0x6::M: ┌── no_timeout_fail ────── │ error[E11001]: test failure │ ┌─ timeout.move:17:29 │ │ │ 17 │ fun no_timeout_fail() { abort 0 } -│ │ --------------- ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x1::M rooted here +│ │ --------------- ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x6::M rooted here │ │ │ -│ │ In this function in 0x1::M +│ │ In this function in 0x6::M │ │ └────────────────── diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.move b/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.move index b772e68a35d88..d516c5e0beb30 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/timeout.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { #[test] fun timeout_fail() { while (true) {} diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.exp index 7aeae31d736f8..809acaecc0268 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.exp @@ -1,23 +1,23 @@ Running Move unit tests -[ PASS ] 0x1::M::correct_abort_code -[ PASS ] 0x1::M::just_test_failure -[ FAIL ] 0x1::M::unexpected_abort -[ FAIL ] 0x1::M::unexpected_abort_in_native_function -[ FAIL ] 0x1::M::unexpected_abort_in_other_function -[ FAIL ] 0x1::M::wrong_abort_code +[ PASS ] 0x6::M::correct_abort_code +[ PASS ] 0x6::M::just_test_failure +[ FAIL ] 0x6::M::unexpected_abort +[ FAIL ] 0x6::M::unexpected_abort_in_native_function +[ FAIL ] 0x6::M::unexpected_abort_in_other_function +[ FAIL ] 0x6::M::wrong_abort_code Test failures: -Failures in 0x1::M: +Failures in 0x6::M: ┌── unexpected_abort ────── │ error[E11001]: test failure │ ┌─ unexpected_abort.move:4:9 │ │ │ 3 │ public fun unexpected_abort() { -│ │ ---------------- In this function in 0x1::M +│ │ ---------------- In this function in 0x6::M │ 4 │ abort 0 -│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x1::M rooted here +│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 0 originating in the module 0x6::M rooted here │ │ └────────────────── @@ -46,9 +46,9 @@ Failures in 0x1::M: │ ┌─ unexpected_abort.move:27:9 │ │ │ 26 │ fun abort_in_other_function() { -│ │ ----------------------- In this function in 0x1::M +│ │ ----------------------- In this function in 0x6::M │ 27 │ abort 1 -│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 1 originating in the module 0x1::M rooted here +│ │ ^^^^^^^ Test was not expected to error, but it aborted with code 1 originating in the module 0x6::M rooted here │ │ │ stack trace @@ -62,9 +62,9 @@ Failures in 0x1::M: │ ┌─ unexpected_abort.move:10:9 │ │ │ 9 │ public fun wrong_abort_code() { -│ │ ---------------- In this function in 0x1::M +│ │ ---------------- In this function in 0x6::M │ 10 │ abort 0 -│ │ ^^^^^^^ Test did not error as expected. Expected test to abort with code 1 originating in the module 0x1::M but instead it aborted with code 0 originating in the module 0x1::M rooted here +│ │ ^^^^^^^ Test did not error as expected. Expected test to abort with code 1 originating in the module 0x6::M but instead it aborted with code 0 originating in the module 0x6::M rooted here │ │ └────────────────── diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.move b/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.move index 1b81628abfd15..b88279071c7e0 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/unexpected_abort.move @@ -1,17 +1,17 @@ -module 0x1::M { +module 0x6::M { #[test] public fun unexpected_abort() { abort 0 } #[test] - #[expected_failure(abort_code=1, location=0x1::M)] + #[expected_failure(abort_code=1, location=0x6::M)] public fun wrong_abort_code() { abort 0 } #[test] - #[expected_failure(abort_code=0, location=0x1::M)] + #[expected_failure(abort_code=0, location=0x6::M)] public fun correct_abort_code() { abort 0 } diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.exp b/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.exp index 1b5eb5fc70d8c..e7e6223899027 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.exp +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.exp @@ -1,4 +1,4 @@ Running Move unit tests -[ PASS ] 0x1::M::poison_call -[ PASS ] 0x1::M::poison_call_OLD +[ PASS ] 0x6::M::poison_call +[ PASS ] 0x6::M::poison_call_OLD Test result: OK. Total tests: 2; passed: 2; failed: 0 diff --git a/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.move b/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.move index 7f92796a3a180..b52e3ece51969 100644 --- a/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.move +++ b/external-crates/move/crates/move-unit-test/tests/test_sources/use_unit_test_module.move @@ -1,4 +1,4 @@ -module 0x1::M { +module 0x6::M { use std::unit_test; #[test] diff --git a/external-crates/move/crates/move-vm-config/src/runtime.rs b/external-crates/move/crates/move-vm-config/src/runtime.rs index d51578a3d208d..a3c763e760ed3 100644 --- a/external-crates/move/crates/move-vm-config/src/runtime.rs +++ b/external-crates/move/crates/move-vm-config/src/runtime.rs @@ -36,6 +36,9 @@ pub struct VMConfig { // Whether value serialization errors when generating type layouts should be rethrown or // converted to a different error. pub rethrow_serialization_type_layout_errors: bool, + /// Maximal nodes which are allowed when converting to layout. This includes the types of + /// fields for struct types. + pub max_type_to_layout_nodes: Option, } impl Default for VMConfig { @@ -50,6 +53,7 @@ impl Default for VMConfig { error_execution_state: true, binary_config: BinaryConfig::with_extraneous_bytes_check(false), rethrow_serialization_type_layout_errors: false, + max_type_to_layout_nodes: Some(512), } } } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/compiler.rs b/external-crates/move/crates/move-vm-integration-tests/src/compiler.rs index ecc9af9ffe333..2bf2856ab3d03 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/compiler.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/compiler.rs @@ -3,7 +3,7 @@ // SPDX-License-Identifier: Apache-2.0 use anyhow::Result; -use move_binary_format::file_format::CompiledModule; +use move_binary_format::{file_format::CompiledModule, file_format_common::VERSION_MAX}; use move_compiler::{compiled_unit::AnnotatedCompiledUnit, Compiler as MoveCompiler}; use std::{fs::File, io::Write, path::Path}; use tempfile::tempdir; @@ -58,3 +58,10 @@ pub fn compile_modules(s: &str) -> Result> { pub fn as_module(unit: AnnotatedCompiledUnit) -> CompiledModule { unit.named_module.module } + +pub fn serialize_module_at_max_version( + module: &CompiledModule, + binary: &mut Vec, +) -> Result<()> { + module.serialize_with_version(VERSION_MAX, binary) +} diff --git a/external-crates/move/crates/move-vm-integration-tests/src/lib.rs b/external-crates/move/crates/move-vm-integration-tests/src/lib.rs index ed93b8f6f3543..5130b0f424e53 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/lib.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/lib.rs @@ -2,7 +2,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -#![cfg(test)] - +#[cfg(test)] mod compiler; +#[cfg(test)] mod tests; diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_entry_point_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_entry_point_tests.rs index bde1cfa1b6844..9edadaf020c36 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_entry_point_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_entry_point_tests.rs @@ -2,7 +2,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_core_types::{ account_address::AccountAddress, identifier::Identifier, @@ -48,7 +48,7 @@ fn call_non_existent_function() { let mut units = compile_units(&code).unwrap(); let m = as_module(units.pop().unwrap()); let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); let mut storage = InMemoryStorage::new(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("M").unwrap()); diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_storage_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_storage_tests.rs index 923b50e4b3437..82dfe78f4f565 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_storage_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/bad_storage_tests.rs @@ -2,7 +2,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_binary_format::errors::{Location, PartialVMError, VMError}; use move_core_types::{ account_address::AccountAddress, @@ -32,7 +32,7 @@ fn test_malformed_module() { let m = as_module(units.pop().unwrap()); let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("M").unwrap()); let fun_name = Identifier::new("foo").unwrap(); @@ -102,7 +102,7 @@ fn test_unverifiable_module() { let mut storage = InMemoryStorage::new(); let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); storage.publish_or_overwrite_module(m.self_id(), blob); let vm = MoveVM::new(vec![]).unwrap(); @@ -126,7 +126,7 @@ fn test_unverifiable_module() { let mut m = m; m.function_defs[0].code.as_mut().unwrap().code = vec![]; let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); storage.publish_or_overwrite_module(m.self_id(), blob); let vm = MoveVM::new(vec![]).unwrap(); @@ -166,9 +166,9 @@ fn test_missing_module_dependency() { let m = as_module(units.pop().unwrap()); let mut blob_m = vec![]; - m.serialize(&mut blob_m).unwrap(); + serialize_module_at_max_version(&m, &mut blob_m).unwrap(); let mut blob_n = vec![]; - n.serialize(&mut blob_n).unwrap(); + serialize_module_at_max_version(&n, &mut blob_n).unwrap(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("N").unwrap()); let fun_name = Identifier::new("bar").unwrap(); @@ -236,9 +236,9 @@ fn test_malformed_module_dependency() { let m = as_module(units.pop().unwrap()); let mut blob_m = vec![]; - m.serialize(&mut blob_m).unwrap(); + serialize_module_at_max_version(&m, &mut blob_m).unwrap(); let mut blob_n = vec![]; - n.serialize(&mut blob_n).unwrap(); + serialize_module_at_max_version(&n, &mut blob_n).unwrap(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("N").unwrap()); let fun_name = Identifier::new("bar").unwrap(); @@ -312,7 +312,7 @@ fn test_unverifiable_module_dependency() { let m = as_module(units.pop().unwrap()); let mut blob_n = vec![]; - n.serialize(&mut blob_n).unwrap(); + serialize_module_at_max_version(&n, &mut blob_n).unwrap(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("N").unwrap()); let fun_name = Identifier::new("bar").unwrap(); @@ -320,7 +320,7 @@ fn test_unverifiable_module_dependency() { // Publish M and N and call N::bar. Everything should work. { let mut blob_m = vec![]; - m.serialize(&mut blob_m).unwrap(); + serialize_module_at_max_version(&m, &mut blob_m).unwrap(); let mut storage = InMemoryStorage::new(); @@ -345,7 +345,7 @@ fn test_unverifiable_module_dependency() { let mut m = m; m.function_defs[0].code.as_mut().unwrap().code = vec![]; let mut blob_m = vec![]; - m.serialize(&mut blob_m).unwrap(); + serialize_module_at_max_version(&m, &mut blob_m).unwrap(); let mut storage = InMemoryStorage::new(); diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/depth_tests_modules.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/depth_tests_modules.move index 639d337290280..4b0cef722cbb5 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/depth_tests_modules.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/depth_tests_modules.move @@ -1,7 +1,7 @@ -module 0x2::A { +module 0x7::A { public struct S has copy, drop { - f1: 0x2::B::S, - f2: 0x2::C::S, + f1: 0x7::B::S, + f2: 0x7::C::S, } public struct Box has copy, drop, store { x: T } @@ -13,58 +13,58 @@ module 0x2::A { public struct Box127 has copy, drop, store { x: Box63> } } -module 0x2::B { +module 0x7::B { public struct S has copy, drop { f1: u64, f2: u128, } } -module 0x2::C { +module 0x7::C { public struct S has copy, drop { f1: address, f2: bool, } } -module 0x2::D { +module 0x7::D { public struct S has copy, drop { - f1: 0x2::B::S, + f1: 0x7::B::S, } } -module 0x2::E { +module 0x7::E { public struct S has copy, drop { - f1: 0x2::F::S, + f1: 0x7::F::S, f2: u64, } } -module 0x2::F { +module 0x7::F { public struct S has copy, drop { f1: T, f2: u64, } } -module 0x2::G { +module 0x7::G { public struct S has copy, drop { - f1: 0x2::H::S, + f1: 0x7::H::S, f2: u64, } } -module 0x2::H { +module 0x7::H { public struct S has copy, drop { - f1: 0x2::F::S, - f2: 0x2::E::S, - f3: 0x2::E::S<0x2::F::S>, + f1: 0x7::F::S, + f2: 0x7::E::S, + f3: 0x7::E::S<0x7::F::S>, f4: A, f5: B, f6: u64, } } -module 0x2::I { +module 0x7::I { public struct S { f1: F, f2: E, diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/exec_func_effects_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/exec_func_effects_tests.rs index eee44028d5ac5..eb5145f1b24e3 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/exec_func_effects_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/exec_func_effects_tests.rs @@ -4,7 +4,7 @@ use std::convert::TryInto; -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_binary_format::errors::VMResult; use move_core_types::{ account_address::AccountAddress, @@ -127,7 +127,7 @@ fn compile_module(storage: &mut InMemoryStorage, mod_id: &ModuleId, code: &str) let mut units = compile_units(code).unwrap(); let module = as_module(units.pop().unwrap()); let mut blob = vec![]; - module.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&module, &mut blob).unwrap(); storage.publish_or_overwrite_module(mod_id.clone(), blob); } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/function_arg_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/function_arg_tests.rs index 1953109d2c136..bb7448be1e59d 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/function_arg_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/function_arg_tests.rs @@ -2,7 +2,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_binary_format::errors::VMResult; use move_core_types::{ account_address::AccountAddress, @@ -51,7 +51,7 @@ fn run( let mut units = compile_units(&code).unwrap(); let m = as_module(units.pop().unwrap()); let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); let mut storage = InMemoryStorage::new(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("M").unwrap()); diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/invariant_violation_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/invariant_violation_tests.rs index 17b696cce8feb..5e9e006e81dc0 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/invariant_violation_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/invariant_violation_tests.rs @@ -10,6 +10,8 @@ use move_core_types::{account_address::AccountAddress, vm_status::StatusCode}; use move_vm_runtime::move_vm::MoveVM; use move_vm_test_utils::{gas_schedule::GasStatus, InMemoryStorage}; +use crate::compiler::serialize_module_at_max_version; + #[test] fn merge_borrow_states_infinite_loop() { let mut m = empty_module(); @@ -80,7 +82,7 @@ fn merge_borrow_states_infinite_loop() { let storage: InMemoryStorage = InMemoryStorage::new(); let mut session = vm.new_session(&storage); let mut module_bytes = vec![]; - m.serialize(&mut module_bytes).unwrap(); + serialize_module_at_max_version(&m, &mut module_bytes).unwrap(); let meter = &mut GasStatus::new_unmetered(); session .publish_module(module_bytes, AccountAddress::ZERO, meter) diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/leak_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/leak_tests.rs index ee323c7afd1df..fc89777a46cfb 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/leak_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/leak_tests.rs @@ -9,6 +9,8 @@ use move_core_types::account_address::AccountAddress; use move_vm_runtime::move_vm::MoveVM; use move_vm_test_utils::{gas_schedule::GasStatus, InMemoryStorage}; +use crate::compiler::serialize_module_at_max_version; + #[test] fn leak_with_abort() { let mut locals = vec![U128, MutableReference(Box::new(U128))]; @@ -56,7 +58,7 @@ fn leak_with_abort() { let storage: InMemoryStorage = InMemoryStorage::new(); let mut session = vm.new_session(&storage); let mut module_bytes = vec![]; - m.serialize(&mut module_bytes).unwrap(); + serialize_module_at_max_version(&m, &mut module_bytes).unwrap(); let meter = &mut GasStatus::new_unmetered(); session .publish_module(module_bytes, AccountAddress::ZERO, meter) diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests.rs index bc99f58f1932a..ee61b9abe1cf7 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests.rs @@ -5,7 +5,7 @@ // Simplifies logic around re-using ModuleIds. #![allow(clippy::redundant_clone)] -use crate::compiler::{compile_modules_in_file, expect_modules}; +use crate::compiler::{compile_modules_in_file, expect_modules, serialize_module_at_max_version}; use move_binary_format::{ file_format::{ empty_module, AddressIdentifierIndex, IdentifierIndex, ModuleHandle, TableIndex, @@ -32,15 +32,19 @@ use move_vm_types::{ use std::{collections::BTreeMap, path::PathBuf, str::FromStr, sync::Arc, thread}; -const DEFAULT_ACCOUNT: AccountAddress = AccountAddress::TWO; +const DEFAULT_ACCOUNT: AccountAddress = { + let mut address = [0u8; AccountAddress::LENGTH]; + address[AccountAddress::LENGTH - 1] = 7u8; + AccountAddress::new(address) +}; const UPGRADE_ACCOUNT: AccountAddress = { let mut address = [0u8; AccountAddress::LENGTH]; - address[AccountAddress::LENGTH - 1] = 3u8; + address[AccountAddress::LENGTH - 1] = 8u8; AccountAddress::new(address) }; const UPGRADE_ACCOUNT_2: AccountAddress = { let mut address = [0u8; AccountAddress::LENGTH]; - address[AccountAddress::LENGTH - 1] = 4u8; + address[AccountAddress::LENGTH - 1] = 9u8; AccountAddress::new(address) }; @@ -129,7 +133,7 @@ impl Adapter { for module in modules { let mut binary = vec![]; - module.serialize(&mut binary).unwrap_or_else(|e| { + serialize_module_at_max_version(&module, &mut binary).unwrap_or_else(|e| { panic!("failure in module serialization: {e:?}\n{:#?}", module) }); session @@ -147,7 +151,7 @@ impl Adapter { for module in modules { let mut binary = vec![]; - module.serialize(&mut binary).unwrap_or_else(|e| { + serialize_module_at_max_version(&module, &mut binary).unwrap_or_else(|e| { panic!("failure in module serialization: {e:?}\n{:#?}", module) }); session @@ -162,7 +166,7 @@ impl Adapter { .into_iter() .map(|module| { let mut binary = vec![]; - module.serialize(&mut binary).unwrap_or_else(|e| { + serialize_module_at_max_version(&module, &mut binary).unwrap_or_else(|e| { panic!("failure in module serialization: {e:?}\n{:#?}", module) }); binary @@ -185,7 +189,7 @@ impl Adapter { .into_iter() .map(|module| { let mut binary = vec![]; - module.serialize(&mut binary).unwrap_or_else(|e| { + serialize_module_at_max_version(&module, &mut binary).unwrap_or_else(|e| { panic!("failure in module serialization: {e:?}\n{:#?}", module) }); binary @@ -774,7 +778,7 @@ fn relink_type_identity() { let b1_modules = get_relinker_tests_modules_with_deps("b_v1", ["c_v1"]).unwrap(); adapter.publish_modules(c0_modules); - let c0_s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); + let c0_s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); let mut adapter = adapter.relink( UPGRADE_ACCOUNT, @@ -791,8 +795,8 @@ fn relink_type_identity() { adapter.publish_modules(c1_modules); adapter.publish_modules(b1_modules); - let c1_s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); - let b1_s = adapter.load_type(&TypeTag::from_str("0x2::b::S").unwrap()); + let c1_s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); + let b1_s = adapter.load_type(&TypeTag::from_str("0x7::b::S").unwrap()); assert_eq!(c0_s, c1_s); assert_ne!(c1_s, b1_s); @@ -813,7 +817,7 @@ fn relink_defining_module_successive() { let c2_modules = get_relinker_tests_modules_with_deps("c_v2", []).unwrap(); adapter.publish_modules(c0_modules); - let c0_s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); + let c0_s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); let mut adapter = adapter.relink( UPGRADE_ACCOUNT, @@ -826,8 +830,8 @@ fn relink_defining_module_successive() { ); adapter.publish_modules(c1_modules); - let c1_s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); - let c1_r = adapter.load_type(&TypeTag::from_str("0x2::c::R").unwrap()); + let c1_s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); + let c1_r = adapter.load_type(&TypeTag::from_str("0x7::c::R").unwrap()); let mut adapter = adapter.relink( UPGRADE_ACCOUNT_2, @@ -841,9 +845,9 @@ fn relink_defining_module_successive() { ); adapter.publish_modules(c2_modules); - let c2_s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); - let c2_r = adapter.load_type(&TypeTag::from_str("0x2::c::R").unwrap()); - let c2_q = adapter.load_type(&TypeTag::from_str("0x2::c::Q").unwrap()); + let c2_s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); + let c2_r = adapter.load_type(&TypeTag::from_str("0x7::c::R").unwrap()); + let c2_q = adapter.load_type(&TypeTag::from_str("0x7::c::Q").unwrap()); for s in &[c0_s, c1_s, c2_s] { let TypeTag::Struct(st) = adapter.get_type_tag(s) else { @@ -895,9 +899,9 @@ fn relink_defining_module_oneshot() { ); adapter.publish_modules(c2_modules); - let s = adapter.load_type(&TypeTag::from_str("0x2::c::S").unwrap()); - let r = adapter.load_type(&TypeTag::from_str("0x2::c::R").unwrap()); - let q = adapter.load_type(&TypeTag::from_str("0x2::c::Q").unwrap()); + let s = adapter.load_type(&TypeTag::from_str("0x7::c::S").unwrap()); + let r = adapter.load_type(&TypeTag::from_str("0x7::c::R").unwrap()); + let q = adapter.load_type(&TypeTag::from_str("0x7::c::Q").unwrap()); let TypeTag::Struct(s) = adapter.get_type_tag(&s) else { panic!("Not a struct: {s:?}") diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests_modules.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests_modules.move index 74d980a6c0e28..1e35583dfeaec 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests_modules.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/loader_tests_modules.move @@ -1,14 +1,14 @@ -module 0x2::A { +module 0x7::A { public struct S has copy, drop { - f1: 0x2::B::S, - f2: 0x2::C::S, + f1: 0x7::B::S, + f2: 0x7::C::S, } - public fun new(f1: 0x2::B::S, f2: 0x2::C::S): S { + public fun new(f1: 0x7::B::S, f2: 0x7::C::S): S { Self::S { f1, f2 } } - public fun destroy(v: S): (0x2::B::S, 0x2::C::S) { + public fun destroy(v: S): (0x7::B::S, 0x7::C::S) { let S { f1: v1, f2: v2 } = v; (v1, v2) } @@ -16,22 +16,22 @@ module 0x2::A { public fun entry_a() { let mut i = 0; while (i < 10) { - let b = 0x2::B::new(20, 100); - let c = 0x2::C::new(@0x42, true); - let another_b = 0x2::B::b_and_c(&b, c); - let (_, _) = 0x2::B::destroy(another_b); - let another_c = 0x2::C::new(@0x42, false); - 0x2::C::destroy(another_c); + let b = 0x7::B::new(20, 100); + let c = 0x7::C::new(@0x42, true); + let another_b = 0x7::B::b_and_c(&b, c); + let (_, _) = 0x7::B::destroy(another_b); + let another_c = 0x7::C::new(@0x42, false); + 0x7::C::destroy(another_c); i = i + 1; } } - public fun get_field_1(s: &S): 0x2::B::S { + public fun get_field_1(s: &S): 0x7::B::S { *&s.f1 } } -module 0x2::B { +module 0x7::B { public struct S has copy, drop { f1: u64, f2: u128, @@ -44,8 +44,8 @@ module 0x2::B { (val1, val2) } - public fun b_and_c(b: &S, c: 0x2::C::S): S { - let _ = 0x2::C::destroy(c); + public fun b_and_c(b: &S, c: 0x7::C::S): S { + let _ = 0x7::C::destroy(c); let another_b = S { f1: 0, f2: b.f2, @@ -54,7 +54,7 @@ module 0x2::B { } } -module 0x2::C { +module 0x7::C { public struct S has copy, drop { f1: address, f2: bool, @@ -83,65 +83,65 @@ module 0x2::C { } -module 0x2::D { +module 0x7::D { public struct S has copy, drop { - f1: 0x2::B::S, + f1: 0x7::B::S, } - public fun new(): 0x2::D::S { + public fun new(): 0x7::D::S { Self::S { - f1: 0x2::B::new(20, 100), + f1: 0x7::B::new(20, 100), } } public fun entry_d() { let mut i = 0; while (i < 10) { - let b = 0x2::B::new(20, 100); - let c = 0x2::C::new(@0x45, false); - let another_b = 0x2::B::b_and_c(&b, c); - let (_, _) = 0x2::B::destroy(another_b); - let another_c = 0x2::C::new(@0x46, true); - 0x2::C::destroy(another_c); + let b = 0x7::B::new(20, 100); + let c = 0x7::C::new(@0x45, false); + let another_b = 0x7::B::b_and_c(&b, c); + let (_, _) = 0x7::B::destroy(another_b); + let another_c = 0x7::C::new(@0x46, true); + 0x7::C::destroy(another_c); i = i + 1; } } } -module 0x2::E { +module 0x7::E { public struct S { f1: u64, } - public fun new(): 0x2::E::S { Self::S { f1: 20 } } + public fun new(): 0x7::E::S { Self::S { f1: 20 } } public fun entry_e() { let mut i = 0; while (i < 10) { - let b = 0x2::B::new(20, 100); - let c = 0x2::C::new(@0x100, false); - let another_b = 0x2::B::b_and_c(&b, c); - let (_, _) = 0x2::B::destroy(another_b); - let another_c = 0x2::C::new(@0x101, true); - 0x2::C::destroy(another_c); + let b = 0x7::B::new(20, 100); + let c = 0x7::C::new(@0x100, false); + let another_b = 0x7::B::b_and_c(&b, c); + let (_, _) = 0x7::B::destroy(another_b); + let another_c = 0x7::C::new(@0x101, true); + 0x7::C::destroy(another_c); i = i + 1; }; } } -module 0x2::F { +module 0x7::F { public struct S { f1: u64, } - public fun new(): 0x2::F::S { Self::S { f1: 20 } } + public fun new(): 0x7::F::S { Self::S { f1: 20 } } public fun entry_f() { - 0x2::A::entry_a(); + 0x7::A::entry_a(); } } -module 0x2::G { +module 0x7::G { public struct S has copy, drop { f1: u64, f2: u128, @@ -157,8 +157,8 @@ module 0x2::G { (val1, val2, val3, val4, val5) } - public fun b_and_c(b: &S, c: 0x2::C::S): S { - let _ = 0x2::C::destroy(c); + public fun b_and_c(b: &S, c: 0x7::C::S): S { + let _ = 0x7::C::destroy(c); let another_b = S { f1: 0, f2: b.f2, diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/nested_loop_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/nested_loop_tests.rs index 05b5110ad5bad..fcb4c3ea9ac10 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/nested_loop_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/nested_loop_tests.rs @@ -1,7 +1,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_core_types::account_address::AccountAddress; use move_vm_config::{runtime::VMConfig, verifier::VerifierConfig}; use move_vm_runtime::move_vm::MoveVM; @@ -33,7 +33,7 @@ fn test_publish_module_with_nested_loops() { let m = as_module(units.pop().unwrap()); let mut m_blob = vec![]; - m.serialize(&mut m_blob).unwrap(); + serialize_module_at_max_version(&m, &mut m_blob).unwrap(); // Should succeed with max_loop_depth = 2 { diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_a_v0.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_a_v0.move index 270ccc213d43d..7a15a696273bb 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_a_v0.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_a_v0.move @@ -1,6 +1,6 @@ /// Dependencies: [B v0+, C v1+] -module 0x2::a { +module 0x7::a { public fun a(): u64 { - 0x2::b::b() + 0x2::c::d() + 0x7::b::b() + 0x7::c::d() } } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v0.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v0.move index abbc19add9e04..9395c6f183ecf 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v0.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v0.move @@ -1,6 +1,6 @@ /// Dependencies: [C v0+] -module 0x2::b { +module 0x7::b { public fun b(): u64 { - 0x2::c::c() + 1 + 0x7::c::c() + 1 } } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v1.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v1.move index bf93efa2b63d3..a24c27d5e46b7 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v1.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_b_v1.move @@ -1,8 +1,8 @@ /// Dependencies: [C v0+] -module 0x2::b { +module 0x7::b { public struct S { x: u64 } public fun b(): u64 { - 0x2::c::c() * 0x2::c::d() + 0x7::c::c() * 0x7::c::d() } } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v0.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v0.move index 6efe3748c3795..ebd04f1507119 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v0.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v0.move @@ -1,5 +1,5 @@ /// Dependencies: [] -module 0x2::c { +module 0x7::c { public struct S { x: u64 } public fun c(): u64 { diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v1.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v1.move index b36c4e9549b78..446fc1ebcfe59 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v1.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v1.move @@ -1,5 +1,5 @@ /// Dependencies: [] -module 0x2::c { +module 0x7::c { public struct S { x: u64 } public struct R { x: u64, y: u64 } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v2.move b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v2.move index ba0318bef1cfe..826d4dd5fd943 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v2.move +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/relinking_tests_c_v2.move @@ -1,5 +1,5 @@ /// Dependencies: [] -module 0x2::c { +module 0x7::c { public struct S { x: u64 } public struct R { x: u64, y: u64 } public struct Q { x: u64, y: u64, z: u64 } diff --git a/external-crates/move/crates/move-vm-integration-tests/src/tests/return_value_tests.rs b/external-crates/move/crates/move-vm-integration-tests/src/tests/return_value_tests.rs index 2a13acbdd94e8..bac4dc209d5d3 100644 --- a/external-crates/move/crates/move-vm-integration-tests/src/tests/return_value_tests.rs +++ b/external-crates/move/crates/move-vm-integration-tests/src/tests/return_value_tests.rs @@ -2,7 +2,7 @@ // Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::compiler::{as_module, compile_units}; +use crate::compiler::{as_module, compile_units, serialize_module_at_max_version}; use move_binary_format::errors::VMResult; use move_core_types::{ account_address::AccountAddress, @@ -41,7 +41,7 @@ fn run( let mut units = compile_units(&code).unwrap(); let m = as_module(units.pop().unwrap()); let mut blob = vec![]; - m.serialize(&mut blob).unwrap(); + serialize_module_at_max_version(&m, &mut blob).unwrap(); let mut storage = InMemoryStorage::new(); let module_id = ModuleId::new(TEST_ADDR, Identifier::new("M").unwrap()); diff --git a/external-crates/move/crates/move-vm-runtime/src/loader.rs b/external-crates/move/crates/move-vm-runtime/src/loader.rs index dd23015d2ab64..b0c1a24dee64d 100644 --- a/external-crates/move/crates/move-vm-runtime/src/loader.rs +++ b/external-crates/move/crates/move-vm-runtime/src/loader.rs @@ -2353,11 +2353,9 @@ impl TypeCache { /// Maximal depth of a value in terms of type depth. pub const VALUE_DEPTH_MAX: u64 = 128; -/// Maximal nodes which are allowed when converting to layout. This includes the types of +/// [Historical] Maximal nodes which are allowed when converting to layout. This includes the types of /// fields for struct types. -/// Maximal nodes which are allowed when converting to layout. This includes the types of -/// fields for datatypes. -const MAX_TYPE_TO_LAYOUT_NODES: u64 = 256; +const HISTORICAL_MAX_TYPE_TO_LAYOUT_NODES: u64 = 256; /// Maximal nodes which are all allowed when instantiating a generic type. This does not include /// field types of datatypes. @@ -2526,7 +2524,7 @@ impl Loader { .collect::>>()?; variant_layouts.push(field_layouts); } - R::MoveDatatypeLayout::Enum(R::MoveEnumLayout(variant_layouts)) + R::MoveDatatypeLayout::Enum(Box::new(R::MoveEnumLayout(Box::new(variant_layouts)))) } Datatype::Struct(ref sinfo) => { let field_tys = sinfo @@ -2539,7 +2537,7 @@ impl Loader { .map(|ty| self.type_to_type_layout_impl(ty, count, depth + 1)) .collect::>>()?; - R::MoveDatatypeLayout::Struct(R::MoveStructLayout::new(field_layouts)) + R::MoveDatatypeLayout::Struct(Box::new(R::MoveStructLayout::new(field_layouts))) } }; @@ -2564,14 +2562,19 @@ impl Loader { count: &mut u64, depth: u64, ) -> PartialVMResult { - if *count > MAX_TYPE_TO_LAYOUT_NODES { + if *count + > self + .vm_config() + .max_type_to_layout_nodes + .unwrap_or(HISTORICAL_MAX_TYPE_TO_LAYOUT_NODES) + { return Err(PartialVMError::new(StatusCode::TOO_MANY_TYPE_NODES)); } if depth > VALUE_DEPTH_MAX { return Err(PartialVMError::new(StatusCode::VM_MAX_VALUE_DEPTH_REACHED)); } *count += 1; - Ok(match ty { + let ty = match ty { Type::Bool => R::MoveTypeLayout::Bool, Type::U8 => R::MoveTypeLayout::U8, Type::U16 => R::MoveTypeLayout::U16, @@ -2598,7 +2601,8 @@ impl Loader { .with_message(format!("no type layout for {:?}", ty)), ); } - }) + }; + Ok(ty) } fn datatype_gidx_to_fully_annotated_layout( @@ -2651,10 +2655,10 @@ impl Loader { field_layouts, ); } - A::MoveDatatypeLayout::Enum(A::MoveEnumLayout { + A::MoveDatatypeLayout::Enum(Box::new(A::MoveEnumLayout { type_: struct_tag.clone(), variants: variant_layouts, - }) + })) } Datatype::Struct(struct_type) => { if struct_type.fields.len() != struct_type.field_names.len() { @@ -2676,7 +2680,10 @@ impl Loader { Ok(A::MoveFieldLayout::new(n.clone(), l)) }) .collect::>>()?; - A::MoveDatatypeLayout::Struct(A::MoveStructLayout::new(struct_tag, field_layouts)) + A::MoveDatatypeLayout::Struct(Box::new(A::MoveStructLayout::new( + struct_tag, + field_layouts, + ))) } }; @@ -2701,7 +2708,12 @@ impl Loader { count: &mut u64, depth: u64, ) -> PartialVMResult { - if *count > MAX_TYPE_TO_LAYOUT_NODES { + if *count + > self + .vm_config() + .max_type_to_layout_nodes + .unwrap_or(HISTORICAL_MAX_TYPE_TO_LAYOUT_NODES) + { return Err(PartialVMError::new(StatusCode::TOO_MANY_TYPE_NODES)); } if depth > VALUE_DEPTH_MAX { diff --git a/external-crates/move/crates/move-vm-runtime/src/native_functions.rs b/external-crates/move/crates/move-vm-runtime/src/native_functions.rs index 631168c8091f8..436730074ddee 100644 --- a/external-crates/move/crates/move-vm-runtime/src/native_functions.rs +++ b/external-crates/move/crates/move-vm-runtime/src/native_functions.rs @@ -5,7 +5,10 @@ use crate::{ interpreter::Interpreter, loader::Resolver, native_extensions::NativeContextExtensions, }; -use move_binary_format::errors::{ExecutionState, PartialVMError, PartialVMResult}; +use move_binary_format::{ + errors::{ExecutionState, PartialVMError, PartialVMResult}, + file_format::AbilitySet, +}; use move_core_types::{ account_address::AccountAddress, annotated_value as A, @@ -155,6 +158,10 @@ impl<'a, 'b> NativeContext<'a, 'b> { } } + pub fn type_to_abilities(&self, ty: &Type) -> PartialVMResult { + self.resolver.loader().abilities(ty) + } + pub fn extensions(&self) -> &NativeContextExtensions<'b> { self.extensions } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/signer_runtime_dummy.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/signer_runtime_dummy.mvir index f4ca4998555bb..d379360088195 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/signer_runtime_dummy.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/signer_runtime_dummy.mvir @@ -1,5 +1,5 @@ //# run --signers 0x1 -module 0x1.m { +module 0x5.m { entry foo(s: signer) { label b0: @@ -8,7 +8,7 @@ label b0: } //# run --signers 0x1 --args 0 -module 0x2.m { +module 0x6.m { entry foo(s: signer, u: u64) { label b0: @@ -17,7 +17,7 @@ label b0: } //# run --signers 0x1 --args 0 @0x1 -module 0x3.m { +module 0x7.m { entry foo(s: signer, u: u64, b: address) { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_borrow_and_modify.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_borrow_and_modify.mvir index 85033dc4c0705..96b245fddef62 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_borrow_and_modify.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_borrow_and_modify.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { import 0x1.vector; public new(): vector { @@ -25,8 +25,8 @@ module 0x1.M { } //# publish -module 0x2.test { - import 0x1.M; +module 0x6.test { + import 0x5.M; public test() { let v: vector; label b0: @@ -37,4 +37,4 @@ module 0x2.test { } } -//# run 0x2::test::test +//# run 0x6::test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_pop.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_pop.mvir index 55c59d4175682..08feb8f824aee 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_pop.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_pop.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { import 0x1.vector; public new(): vector { @@ -25,8 +25,8 @@ module 0x1.M { } //# publish -module 0x2.test { - import 0x1.M; +module 0x6.test { + import 0x5.M; public test() { let v: vector; label b0: @@ -37,4 +37,4 @@ module 0x2.test { } } -//# run 0x2::test::test +//# run 0x6::test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_push.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_push.mvir index de00433effc0b..eac2be70ec19a 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_push.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_push.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { import 0x1.vector; public new(): vector { @@ -23,8 +23,8 @@ module 0x1.M { } //# publish -module 0x2.test { - import 0x1.M; +module 0x6.test { + import 0x5.M; public test() { let v: vector; label b0: @@ -35,4 +35,4 @@ module 0x2.test { } } -//# run 0x2::test::test +//# run 0x6::test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_swap.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_swap.mvir index 2532ed5599aef..91079e9dc4baf 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_swap.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vec_swap.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { import 0x1.vector; public new(): vector { @@ -26,8 +26,8 @@ module 0x1.M { } //# publish -module 0x2.test { - import 0x1.M; +module 0x6.test { + import 0x5.M; public test() { let v: vector; label b0: @@ -38,4 +38,4 @@ module 0x2.test { } } -//# run 0x2::test::test +//# run 0x6::test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_borrow_and_modify_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_borrow_and_modify_ok.mvir index 692081b64bd73..29a79e2c8f9af 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_borrow_and_modify_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_borrow_and_modify_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { public new(): vector { let v: vector; @@ -23,8 +23,8 @@ module 0x1.M { } //# publish -module 0x2.test { - import 0x1.M; +module 0x6.test { + import 0x5.M; public test() { let v: vector; label b0: @@ -35,4 +35,4 @@ module 0x2.test { } } -//# run 0x2::test::test +//# run 0x6::test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_bound_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_bound_ok.mvir index 9e82c815f1906..9f3c5601a6a11 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_bound_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_bound_ok.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { let v: vector; @@ -13,7 +13,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { let v: vector; @@ -26,7 +26,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { let v: vector; @@ -40,7 +40,7 @@ label b0: } //# run -module 0x4.m { +module 0x9.m { entry foo() { let v: vector; @@ -53,7 +53,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { let v: vector; @@ -67,7 +67,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_len_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_len_ok.mvir index 578ea3dfe63a1..dd5442a954828 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_len_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_len_ok.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -12,7 +12,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; @@ -25,7 +25,7 @@ label b0: } //# run -module 0x3.m { +module 0x7.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.exp index 7762627f9d1c1..451d2e9a19e6e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.exp @@ -5,7 +5,7 @@ task 0, lines 1-14: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(1), - location: 0x1::m, + location: 0x5::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 7)], } @@ -15,7 +15,7 @@ task 1, lines 15-28: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(1), - location: 0x2::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 7)], } @@ -25,7 +25,7 @@ task 2, lines 29-41: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(1), - location: 0x3::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 5)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.mvir index 8ee3e8c82c873..d67ae9e94a433 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_out_of_bound.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -13,7 +13,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; @@ -27,7 +27,7 @@ label b0: } //# run -module 0x3.m { +module 0x7.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pack_unpack_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pack_unpack_ok.mvir index c22505d68cf36..a1b6643bab666 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pack_unpack_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pack_unpack_ok.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v1: vector; @@ -16,7 +16,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v1: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.exp index c8d9fff04e741..bbb0c744ee81f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.exp @@ -5,7 +5,7 @@ task 0, lines 1-13: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(2), - location: 0x1::m, + location: 0x5::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 3)], } @@ -15,7 +15,7 @@ task 1, lines 14-27: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(2), - location: 0x2::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 7)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.mvir index 3f85985204ff7..f109463eb5e50 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_empty.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -12,7 +12,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_ok.mvir index e3c5ef62fcc2b..9cfe0f5dd0112 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_pop_ok.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -11,8 +11,9 @@ label b0: } } + //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.exp index d3a05993394d4..75552becc8fc7 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.exp @@ -5,7 +5,7 @@ task 0, lines 1-13: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x1::m, + location: 0x5::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 4)], } @@ -15,7 +15,7 @@ task 1, lines 14-26: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x2::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 5)], } @@ -25,7 +25,7 @@ task 2, lines 27-40: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x3::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 7)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.mvir index ea0d0a56b243f..e8ec936fd7890 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_less.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -12,7 +12,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; @@ -25,7 +25,7 @@ label b0: } //# run -module 0x3.m { +module 0x7.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.exp index 0e73733403bb2..06dd5011d5fe3 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.exp @@ -5,7 +5,7 @@ task 0, lines 1-13: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x1::m, + location: 0x5::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 3)], } @@ -15,7 +15,7 @@ task 1, lines 14-26: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x2::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 4)], } @@ -25,7 +25,7 @@ task 2, lines 27-40: Error: Function execution failed with VMError: { major_status: VECTOR_OPERATION_ERROR, sub_status: Some(3), - location: 0x3::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 7)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.mvir index 1435bd8a8ce7c..d407be0d3d66f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_more.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -12,7 +12,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; @@ -25,7 +25,7 @@ label b0: } //# run -module 0x3.m { +module 0x7.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_ok.mvir index a9112e3c85ee8..317bb3e21943c 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/builtins/vector_ops_unpack_ok.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x5.m { entry foo() { let v: vector; @@ -12,7 +12,7 @@ label b0: } //# run -module 0x2.m { +module 0x6.m { entry foo() { let v: vector; @@ -25,7 +25,7 @@ label b0: } //# run -module 0x3.m { +module 0x7.m { entry foo() { let v: vector; @@ -40,7 +40,7 @@ label b0: } //# run -module 0x4.m { +module 0x8.m { entry foo() { let v: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.exp index 2fdf39797a6e2..9124a5b50c95d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.exp @@ -5,7 +5,7 @@ task 1, lines 10-20: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(22), - location: 0x1::M, + location: 0x5::M, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.mvir index 014f4d67c67a3..f723a0f073228 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/commands/abort_in_module.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { public foo() { label b0: abort 22; @@ -9,7 +9,7 @@ module 0x1.M { //# run module 0x42.m { -import 0x1.M; +import 0x5.M; entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.exp index b404d5e02081d..ebd37c53c3422 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.exp @@ -5,7 +5,7 @@ task 1, lines 19-28: Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(42), - location: 0x1::M, + location: 0x6::M, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.mvir index 9d65194b5673d..6052b38836559 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/fields_packed_in_order.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct T has drop { b: u64, a: u64 @@ -18,7 +18,7 @@ module 0x1.M { //# run module 0x42.m { -import 0x1.M; +import 0x6.M; entry foo() { let m: M.T; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken.mvir index cd9253e1ffe48..705f6f63ca69e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { public t(): u64 { let x: u64; label b0: @@ -14,7 +14,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.mvir index 98c2d044950de..5e67212b772c7 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/control_flow/return_in_if_branch_taken_no_else.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x6.Test { public t(): u64 { label b0: jump_if_false (true) b2; @@ -12,7 +12,7 @@ module 0x1.Test { //# run module 0x42.m { -import 0x1.Test; +import 0x6.Test; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/address_arg_is_not_signer.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/address_arg_is_not_signer.mvir index 8ab5af8cd1229..83e408cfd7db3 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/address_arg_is_not_signer.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/address_arg_is_not_signer.mvir @@ -1,5 +1,5 @@ //# run --args @0x1 -module 0x1.m { +module 0x5.m { entry foo(s: signer) { label b0: @@ -9,7 +9,7 @@ label b0: }// DEPRECATED signers can now be passed in as addrs //# run --args @0x0 @0x2 -module 0x2.m { +module 0x6.m { entry foo(s: signer, s2: signer) { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/expected_2_signer_args_got_1.move b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/expected_2_signer_args_got_1.move index 32c12bd69ec42..d0ccfd8bf9d2c 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/expected_2_signer_args_got_1.move +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/expected_2_signer_args_got_1.move @@ -1,6 +1,6 @@ //# run --signers 0x1 // should fail, missing signer -module 0x42::m { +module 0x47::m { fun main(_s1: signer, _s2: signer) { } } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/mixed_signer_inputs_scripts.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/mixed_signer_inputs_scripts.mvir index a6cb00ffc6361..9ff4b7695ec8c 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/mixed_signer_inputs_scripts.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/mixed_signer_inputs_scripts.mvir @@ -1,5 +1,5 @@ //# run --args @0x1 @0x1 @0x1 -module 0x1.m { +module 0x5.m { // tests various mixed usage of signer in script arguments entry foo(s1: signer, s2: &signer, s3: signer) { @@ -9,7 +9,7 @@ entry foo(s1: signer, s2: &signer, s3: signer) { } //# run --args @0x1 0 @0x1 false @0x1 -module 0x2.m { +module 0x6.m { entry foo(s1: &signer, u: u64, s2: signer, f: bool, s3: &signer) { label l0: @@ -18,7 +18,7 @@ entry foo(s1: &signer, u: u64, s2: signer, f: bool, s3: &signer) { } //# run --args 0 false @0x1 @0x2 @0x3 -module 0x3.m { +module 0x7.m { entry foo(u: u64, f: bool, a: address, s1: signer, s2: &signer) { label l0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/modify_mutable_ref_inputs.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/modify_mutable_ref_inputs.mvir index 7fb30e4fa309d..13fb4e1f60e98 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/modify_mutable_ref_inputs.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/modify_mutable_ref_inputs.mvir @@ -26,7 +26,7 @@ module 0x42.M { } //# run --args 0 1 b"xyz" -module 0x1.m { +module 0x5.m { import 0x42.M; entry foo( u: &mut u64, diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/ref_inputs.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/ref_inputs.mvir index 80d481d2b67e6..8d0b5d13f8eb7 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/ref_inputs.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/ref_inputs.mvir @@ -27,7 +27,7 @@ module 0x42.M { } //# run --args 0 -module 0x2.m { +module 0x6.m { entry foo(r1: &u64) { label l0: @@ -36,7 +36,7 @@ entry foo(r1: &u64) { } //# run --args false -module 0x3.m { +module 0x7.m { entry foo(r1: &mut bool) { label l0: @@ -45,7 +45,7 @@ entry foo(r1: &mut bool) { } //# run --args 0 false 0 0 -module 0x5.m { +module 0x8.m { import 0x42.M; entry foo(r1: &u64, r2: &mut bool, r3: &M.S, r4: &mut M.S) { label l0: @@ -54,7 +54,7 @@ entry foo(r1: &u64, r2: &mut bool, r3: &M.S, r4: &mut M.S) { } //# run --type-args u64 bool 0x42::M::S 0x42::M::S --args 0 false 0 0 -module 0x6.m { +module 0x9.m { entry foo(r1: &T1, r2: &mut T2, r3: &T3, r4: &mut T4) { label l0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.exp index dd7e266900f13..4adf6d72dde37 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.exp @@ -1,7 +1,7 @@ processed 2 tasks task 1, lines 6-14: -//# run --type-args 0x1::M::Ex +//# run --type-args 0x5::M::Ex Error: Function execution failed with VMError: { major_status: NUMBER_OF_TYPE_ARGUMENTS_MISMATCH, sub_status: None, diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.mvir index ba93f6dd42e1f..332454bcf03d3 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_few_type_args_inner.mvir @@ -1,9 +1,9 @@ //# publish -module 0x1.M { +module 0x5.M { struct Ex { flag: bool } } -//# run --type-args 0x1::M::Ex +//# run --type-args 0x5::M::Ex module 0x42.m { entry foo() { // too few type args for inner type diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.exp index 5984802c3ed5b..303587fef3fd5 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.exp @@ -1,7 +1,7 @@ processed 2 tasks task 1, lines 6-14: -//# run --type-args 0x1::M::Ex +//# run --type-args 0x5::M::Ex Error: Function execution failed with VMError: { major_status: NUMBER_OF_TYPE_ARGUMENTS_MISMATCH, sub_status: None, diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.mvir index e399c87303506..138dd43fcab57 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_too_many_type_args_inner.mvir @@ -1,9 +1,9 @@ //# publish -module 0x1.M { +module 0x5.M { struct Ex { flag: bool } } -//# run --type-args 0x1::M::Ex +//# run --type-args 0x5::M::Ex module 0x42.m { entry foo() { // too many type args diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.exp index ffe63af74549c..d5b446b5c9207 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.exp @@ -1,7 +1,7 @@ processed 2 tasks task 1, lines 20-28: -//# run --type-args 0x1::Coin::Coin +//# run --type-args 0x5::Coin::Coin Error: Function execution failed with VMError: { major_status: CONSTRAINT_NOT_SATISFIED, sub_status: None, diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.mvir index 334277b0f9cd5..d6cc95f7347a7 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/script_type_arg_kind_mismatch_1.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Coin { +module 0x5.Coin { struct Coin { value: u64 } public value(c: &Self.Coin): u64 { label b0: @@ -17,7 +17,7 @@ module 0x1.Coin { } } -//# run --type-args 0x1::Coin::Coin +//# run --type-args 0x5::Coin::Coin module 0x42.m { entry foo() { // coin does not have copy or drop diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/struct_arguments.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/struct_arguments.mvir index dba8e75bc13c0..c05c46dded88e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/struct_arguments.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/entry_points/struct_arguments.mvir @@ -17,7 +17,7 @@ module 0x42.M { } //# run --args 0 0 0 -module 0x1.m { +module 0x5.m { import 0x42.M; entry foo(s: M.S, i: &M.S, m: &mut M.S) { label l0: @@ -26,7 +26,7 @@ entry foo(s: M.S, i: &M.S, m: &mut M.S) { } //# run --type-args 0x42::M::S --args 0 0 0 -module 0x2.m { +module 0x6.m { entry foo(s: T, i: &T, m: &mut T) { label l0: return; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.exp index efd9aedd8abdb..25a39137befbe 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.exp @@ -3,7 +3,7 @@ processed 2 tasks task 0, lines 1-24: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithTwoVariants { @@ -39,10 +39,10 @@ Jump tables: task 1, lines 26-49: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::MonomorphicEnums'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::MonomorphicEnums'. Got VMError: { major_status: PACK_TYPE_MISMATCH_ERROR, sub_status: None, - location: 0x1::MonomorphicEnums, + location: 0x6::MonomorphicEnums, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.mvir index efef81018cc5e..df6e0a0e20ada 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/basic_poly_enum_type_mismatch.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: u64 } @@ -24,7 +24,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: u64 } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.exp index 1130075ce5566..4fd9f3b026d1d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.exp @@ -3,7 +3,7 @@ processed 5 tasks task 0, lines 1-43: //# print-bytecode // Move bytecode v7 -module 1.Enums { +module 6.Enums { enum EnumWithThreeVariants { @@ -62,21 +62,21 @@ Jump tables: } task 3, line 101: -//# run 0x1::Enums::call_fail_1 --type-args u64 +//# run 0x6::Enums::call_fail_1 --type-args u64 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(3), 3)], } task 4, line 103: -//# run 0x1::Enums::call_fail_3 --type-args u64 +//# run 0x6::Enums::call_fail_3 --type-args u64 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(3), 3)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.mvir index 72dbbebe69942..b008635ea5e2b 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_jump_same_label.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.Enums { +module 0x6.Enums { enum EnumWithThreeVariants { One { }, Two { x: T }, @@ -43,7 +43,7 @@ module 0x1.Enums { } //# publish -module 0x1.Enums { +module 0x6.Enums { enum EnumWithThreeVariants { One { }, Two { x: T }, @@ -87,8 +87,8 @@ module 0x1.Enums { } //# run -module 0x2.m { -import 0x1.Enums; +module 0x7.m { +import 0x6.Enums; entry foo() { let x: u64; label b0: @@ -98,6 +98,6 @@ entry foo() { } } -//# run 0x1::Enums::call_fail_1 --type-args u64 +//# run 0x6::Enums::call_fail_1 --type-args u64 -//# run 0x1::Enums::call_fail_3 --type-args u64 +//# run 0x6::Enums::call_fail_3 --type-args u64 diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.exp index 5da6e6f7992c6..3ec9d80f1c71f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-24: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithTwoVariants { @@ -42,7 +42,7 @@ task 2, lines 51-61: Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::MonomorphicEnums, + location: 0x6::MonomorphicEnums, indices: [], offsets: [(FunctionDefinitionIndex(0), 6)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.mvir index e5d310bd0260a..fb2be7f9c90e8 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_mismatch.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: T } @@ -24,7 +24,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: T } @@ -49,8 +49,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.exp index 711880ea71291..3464c885b9dcf 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.exp @@ -1,121 +1,121 @@ processed 14 tasks task 2, line 222: -//# run 0x1::Enums::poly_invalid1_mut_ref +//# run 0x6::Enums::poly_invalid1_mut_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(4), 1)], } task 3, line 224: -//# run 0x1::Enums::poly_invalid1_imm_ref +//# run 0x6::Enums::poly_invalid1_imm_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(5), 1)], } task 4, line 226: -//# run 0x1::Enums::poly_invalid1 +//# run 0x6::Enums::poly_invalid1 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(3), 1)], } task 5, line 228: -//# run 0x1::Enums::poly_invalid2_mut_ref +//# run 0x6::Enums::poly_invalid2_mut_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(1), 1)], } task 6, line 230: -//# run 0x1::Enums::poly_invalid2_imm_ref +//# run 0x6::Enums::poly_invalid2_imm_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(2), 1)], } task 7, line 232: -//# run 0x1::Enums::poly_invalid2 +//# run 0x6::Enums::poly_invalid2 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } task 8, line 234: -//# run 0x1::Enums::mono_invalid1_mut_ref +//# run 0x6::Enums::mono_invalid1_mut_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(10), 1)], } task 9, line 236: -//# run 0x1::Enums::mono_invalid1_imm_ref +//# run 0x6::Enums::mono_invalid1_imm_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(11), 1)], } task 10, line 238: -//# run 0x1::Enums::mono_invalid1 +//# run 0x6::Enums::mono_invalid1 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(9), 1)], } task 11, line 240: -//# run 0x1::Enums::mono_invalid2_mut_ref +//# run 0x6::Enums::mono_invalid2_mut_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(7), 1)], } task 12, line 242: -//# run 0x1::Enums::mono_invalid2_imm_ref +//# run 0x6::Enums::mono_invalid2_imm_ref Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(8), 1)], } task 13, line 244: -//# run 0x1::Enums::mono_invalid2 +//# run 0x6::Enums::mono_invalid2 Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::Enums, + location: 0x6::Enums, indices: [], offsets: [(FunctionDefinitionIndex(6), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.mvir index 4ea010e4a204a..77c51f044ba4a 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/enum_variant_tag_unpack_invalid.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Enums { +module 0x6.Enums { enum PolyEnum { One { }, Two { x: T }, @@ -217,28 +217,28 @@ module 0x1.Enums { } } -//# run 0x1::Enums::all_valid +//# run 0x6::Enums::all_valid -//# run 0x1::Enums::poly_invalid1_mut_ref +//# run 0x6::Enums::poly_invalid1_mut_ref -//# run 0x1::Enums::poly_invalid1_imm_ref +//# run 0x6::Enums::poly_invalid1_imm_ref -//# run 0x1::Enums::poly_invalid1 +//# run 0x6::Enums::poly_invalid1 -//# run 0x1::Enums::poly_invalid2_mut_ref +//# run 0x6::Enums::poly_invalid2_mut_ref -//# run 0x1::Enums::poly_invalid2_imm_ref +//# run 0x6::Enums::poly_invalid2_imm_ref -//# run 0x1::Enums::poly_invalid2 +//# run 0x6::Enums::poly_invalid2 -//# run 0x1::Enums::mono_invalid1_mut_ref +//# run 0x6::Enums::mono_invalid1_mut_ref -//# run 0x1::Enums::mono_invalid1_imm_ref +//# run 0x6::Enums::mono_invalid1_imm_ref -//# run 0x1::Enums::mono_invalid1 +//# run 0x6::Enums::mono_invalid1 -//# run 0x1::Enums::mono_invalid2_mut_ref +//# run 0x6::Enums::mono_invalid2_mut_ref -//# run 0x1::Enums::mono_invalid2_imm_ref +//# run 0x6::Enums::mono_invalid2_imm_ref -//# run 0x1::Enums::mono_invalid2 +//# run 0x6::Enums::mono_invalid2 diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.exp index a5d7e0983a397..570a42624f4af 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-33: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithOneVariant { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.mvir index 5aaee4a507c7d..5177598f3772e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithOneVariant { One { } } @@ -33,7 +33,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithOneVariant { One { } } @@ -67,8 +67,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.exp index a81fde32caf57..548a1975c6f16 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.exp @@ -2,14 +2,14 @@ processed 2 tasks task 0, lines 1-23: //# publish -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000001::MonomorphicEnums'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000006::MonomorphicEnums'. Got VMError: { major_status: UNSAFE_RET_UNUSED_VALUES_WITHOUT_DROP, sub_status: None, - location: 0x1::MonomorphicEnums, + location: 0x6::MonomorphicEnums, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 6)], } task 1, lines 25-35: //# run -Error: Dependency not provided for 0000000000000000000000000000000000000000000000000000000000000001.MonomorphicEnums +Error: Dependency not provided for 0000000000000000000000000000000000000000000000000000000000000006.MonomorphicEnums diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.mvir index 5121fd928a18a..1ccda56650e20 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_fail_unpack.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: u64 } @@ -23,8 +23,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.exp index 424c48fe43006..bca76469d3c57 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.exp @@ -5,7 +5,7 @@ task 1, lines 26-36: Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::MonomorphicEnums, + location: 0x6::MonomorphicEnums, indices: [], offsets: [(FunctionDefinitionIndex(0), 11)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.mvir index 66c89f2c445cd..8ae734c21a1bb 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/enum_decl_monomorphic_mismatched_variants.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: u64 } @@ -24,8 +24,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.exp index 88a0cd62b78c0..cb63034528db2 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 3-50: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithTwoVariants has drop { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.mvir index 5adacaa279730..7dd87477d2b3f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update.mvir @@ -1,7 +1,7 @@ // Tests that mutable borrows from enum variants work correctly. //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants has drop { One { x: u64 }, Two { x: u64, y: u64 } @@ -50,7 +50,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants has drop { One { x: u64 }, Two { x: u64, y: u64 } @@ -99,8 +99,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: MonomorphicEnums.EnumWithTwoVariants; let y: &u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.exp index 7350d286796e6..2522ca6dd4891 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 3-34: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithTwoVariants has drop { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.mvir index 8364394dee8a0..2ab13c6ea16ef 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/mono/mut_ref_update_variant.mvir @@ -1,7 +1,7 @@ // Tests that if taking a mutable enum that the variant inside cane be mutated. //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants has drop { One { x: u64 }, Two { x: u64, y: u64 } @@ -34,7 +34,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants has drop { One { x: u64 }, Two { x: u64, y: u64 } @@ -67,8 +67,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x8.m { +import 0x6.MonomorphicEnums; entry foo() { let x: MonomorphicEnums.EnumWithTwoVariants; let y: bool; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.exp index a8432bc48ba64..630d930a87363 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-24: //# print-bytecode // Move bytecode v7 -module 1.MonomorphicEnums { +module 6.MonomorphicEnums { enum EnumWithTwoVariants { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.mvir index 843c3296f08f1..d80e2e0a19626 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/basic_poly_enum.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: T } @@ -24,7 +24,7 @@ module 0x1.MonomorphicEnums { } //# publish -module 0x1.MonomorphicEnums { +module 0x6.MonomorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: T } @@ -49,8 +49,8 @@ module 0x1.MonomorphicEnums { } //# run -module 0x2.m { -import 0x1.MonomorphicEnums; +module 0x7.m { +import 0x6.MonomorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.exp index dff9569efa68e..9c37f257b204d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.exp @@ -5,7 +5,7 @@ task 1, lines 26-36: Error: Function execution failed with VMError: { major_status: VARIANT_TAG_MISMATCH, sub_status: None, - location: 0x1::PolymorphicEnums, + location: 0x6::PolymorphicEnums, indices: [], offsets: [(FunctionDefinitionIndex(0), 11)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.mvir index 0a10ebb63b4bc..ef8fdca745fee 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/enum_decl_poly_mismatched_variants.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.PolymorphicEnums { +module 0x6.PolymorphicEnums { enum EnumWithTwoVariants { One { }, Two { x: T } @@ -24,8 +24,8 @@ module 0x1.PolymorphicEnums { } //# run -module 0x2.m { - import 0x1.PolymorphicEnums; +module 0x7.m { + import 0x6.PolymorphicEnums; entry foo() { let x: u64; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.exp index 1609996dfc501..9899743890fcf 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 3-50: //# print-bytecode // Move bytecode v7 -module 1.PolymorphicEnums { +module 6.PolymorphicEnums { enum EnumWithTwoVariants has drop { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.mvir index d33b90f9b8626..0b247dcc5da2d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update.mvir @@ -1,7 +1,7 @@ // Tests that mutable borrows from enum variants work correctly. //# print-bytecode -module 0x1.PolymorphicEnums { +module 0x6.PolymorphicEnums { enum EnumWithTwoVariants has drop { One { x: T }, Two { x: T, y: u64 } @@ -50,7 +50,7 @@ module 0x1.PolymorphicEnums { } //# publish -module 0x1.PolymorphicEnums { +module 0x6.PolymorphicEnums { enum EnumWithTwoVariants has drop { One { x: T }, Two { x: T, y: u64 } @@ -99,8 +99,8 @@ module 0x1.PolymorphicEnums { } //# run -module 0x2.m { -import 0x1.PolymorphicEnums; +module 0x7.m { +import 0x6.PolymorphicEnums; entry foo() { let x: PolymorphicEnums.EnumWithTwoVariants; let y: &u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.exp index 973254dceca2c..7b46f4aa1feab 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 3-34: //# print-bytecode // Move bytecode v7 -module 1.PolymorphicEnums { +module 6.PolymorphicEnums { enum EnumWithTwoVariants has drop { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.mvir index 60c48171b0ff6..9e65f36623449 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/poly/poly_mut_ref_update_variant.mvir @@ -1,7 +1,7 @@ // Tests that if taking a mutable enum that the variant inside cane be mutated. //# print-bytecode -module 0x1.PolymorphicEnums { +module 0x6.PolymorphicEnums { enum EnumWithTwoVariants has drop { One { x: T }, Two { x: T, y: T } @@ -34,7 +34,7 @@ module 0x1.PolymorphicEnums { } //# publish -module 0x1.PolymorphicEnums { +module 0x6.PolymorphicEnums { enum EnumWithTwoVariants has drop { One { x: T }, Two { x: T, y: T } @@ -67,8 +67,8 @@ module 0x1.PolymorphicEnums { } //# run -module 0x2.m { -import 0x1.PolymorphicEnums; +module 0x7.m { +import 0x6.PolymorphicEnums; entry foo() { let x: PolymorphicEnums.EnumWithTwoVariants; let y: bool; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/unpack_mut_ref_alias.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/unpack_mut_ref_alias.mvir index a7f3191d0e69b..9abe27d3a216e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/unpack_mut_ref_alias.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/unpack_mut_ref_alias.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.o { +module 0x6.o { enum X has drop { One { x: u64, y: u64 }, Two { x: u64, y: u64}, @@ -60,6 +60,6 @@ module 0x1.o { } -//# run 0x1::o::test1 +//# run 0x6::o::test1 -//# run 0x1::o::test2 +//# run 0x6::o::test2 diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.exp index fb83a96e61d7c..c33392e2bad06 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.exp @@ -3,7 +3,7 @@ processed 3 tasks task 0, lines 1-22: //# print-bytecode // Move bytecode v7 -module 1.InfiniteSwith { +module 6.InfiniteSwith { enum EnumWithOneVariant { @@ -37,7 +37,7 @@ task 2, lines 48-58: Error: Function execution failed with VMError: { major_status: OUT_OF_GAS, sub_status: None, - location: 0x1::InfiniteSwith, + location: 0x6::InfiniteSwith, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.mvir index 07ab9642c5d6f..8ceeb01b02da0 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/enums/variant_switch_loop.mvir @@ -1,5 +1,5 @@ //# print-bytecode -module 0x1.InfiniteSwith { +module 0x6.InfiniteSwith { enum EnumWithOneVariant { One { } } @@ -22,7 +22,7 @@ module 0x1.InfiniteSwith { } //# publish -module 0x1.InfiniteSwith { +module 0x6.InfiniteSwith { enum EnumWithOneVariant { One { } } @@ -46,8 +46,8 @@ module 0x1.InfiniteSwith { //# run --gas-budget 10000 -module 0x2.m { -import 0x1.InfiniteSwith; +module 0x7.m { +import 0x6.InfiniteSwith; entry foo() { let x: InfiniteSwith.EnumWithOneVariant; label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.exp index 836503a7365a7..4d9e42e584656 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -35,7 +35,7 @@ task 5, lines 70-79: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x42::m, + location: 0x47::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 99-108: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 109-118: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 139-148: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 149-158: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 161-170: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 190-199: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 200-209: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 210-219: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.mvir index 6fc272a3533a3..dd64baabfb1a2 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u128.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -68,7 +68,7 @@ label b0: } //# run -module 0x42.m { +module 0x47.m { entry foo() { label b0: @@ -80,7 +80,7 @@ label b0: //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -97,7 +97,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -107,7 +107,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -119,7 +119,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -137,7 +137,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -147,7 +147,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -159,7 +159,7 @@ label b0: //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -170,7 +170,7 @@ label b0: } //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -188,7 +188,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -198,7 +198,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -208,7 +208,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.exp index 8845a3099df53..e7087cab6e300 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 98-107: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 108-117: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 138-147: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 148-157: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 158-167: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 187-196: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 197-206: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 207-216: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.mvir index d84cdb4aa9258..aa1619b31cb1b 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u16.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -96,7 +96,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -106,7 +106,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -118,7 +118,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -136,7 +136,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -146,7 +146,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -156,7 +156,7 @@ label b0: } //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -167,7 +167,7 @@ label b0: } //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -185,7 +185,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -195,7 +195,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -205,7 +205,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.exp index 836503a7365a7..6bdad25cbcef4 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 99-108: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 109-118: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 139-148: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 149-158: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 161-170: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 190-199: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 200-209: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 210-219: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.mvir index b555ef55084cc..777fe8456e1ed 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u256.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -80,7 +80,7 @@ label b0: //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -97,7 +97,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -107,7 +107,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -119,7 +119,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -137,7 +137,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -147,7 +147,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -159,7 +159,7 @@ label b0: //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -170,7 +170,7 @@ label b0: } //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -188,7 +188,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -198,7 +198,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -208,7 +208,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.exp index 8845a3099df53..e7087cab6e300 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 98-107: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 108-117: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 138-147: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 148-157: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 158-167: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 187-196: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 197-206: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 207-216: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.mvir index 0c3aace54b01b..84c840bfe1243 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u32.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -96,7 +96,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -106,7 +106,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -118,7 +118,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -136,7 +136,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -146,7 +146,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -156,7 +156,7 @@ label b0: } //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -167,7 +167,7 @@ label b0: } //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -185,7 +185,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -195,7 +195,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -205,7 +205,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.exp index 8845a3099df53..e7087cab6e300 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 98-107: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 108-117: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 138-147: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 148-157: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 158-167: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 187-196: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 197-206: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 207-216: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.mvir index 3d6dcfa32ad2a..a3899be8c6a2f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u64.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -96,7 +96,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -106,7 +106,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -118,7 +118,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -136,7 +136,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -146,7 +146,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -156,7 +156,7 @@ label b0: } //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -167,7 +167,7 @@ label b0: } //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -185,7 +185,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -195,7 +195,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -205,7 +205,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.exp index c54c3461396c9..37c2089c9afc1 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.exp @@ -5,7 +5,7 @@ task 1, lines 20-29: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 2, lines 30-39: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 4, lines 60-69: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 7, lines 98-107: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 8, lines 108-117: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 10, lines 138-147: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 11, lines 148-157: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 12, lines 158-167: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 14, lines 188-197: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -105,7 +105,7 @@ task 15, lines 198-207: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -115,7 +115,7 @@ task 16, lines 208-217: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.mvir index 0111987aa11fb..afb19cb552545 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/arithmetic_operators_u8.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -18,7 +18,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -28,7 +28,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -40,7 +40,7 @@ label b0: //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -58,7 +58,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -96,7 +96,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -106,7 +106,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -118,7 +118,7 @@ label b0: //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -136,7 +136,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -146,7 +146,7 @@ label b0: } //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -156,7 +156,7 @@ label b0: } //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -168,7 +168,7 @@ label b0: //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -186,7 +186,7 @@ label b0: } //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -196,7 +196,7 @@ label b0: } //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -206,7 +206,7 @@ label b0: } //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.exp index 1dd4bd705ba93..b6c034d0be133 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.exp @@ -2,10 +2,10 @@ processed 2 tasks task 1, lines 27-49: //# run -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: CALL_BORROWED_MUTABLE_REFERENCE_ERROR, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 19)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.mvir index 5f12d7f9c0703..4901f6c288f93 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/assign_struct_field.mvir @@ -25,7 +25,7 @@ module 0x42.Tester { } //# run -module 0x42.m { +module 0x43.m { import 0x42.Tester; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/bitwise_operators.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/bitwise_operators.mvir index bfa04d89657b3..e0139eebde31f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/bitwise_operators.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/bitwise_operators.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -106,7 +106,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.exp index 1dff1e7d8229a..48b640809eb6f 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.exp @@ -5,7 +5,7 @@ task 6, lines 193-202: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -15,7 +15,7 @@ task 7, lines 204-213: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -25,7 +25,7 @@ task 8, lines 215-224: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x9::m, + location: 0xe::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -35,7 +35,7 @@ task 9, lines 226-235: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -45,7 +45,7 @@ task 10, lines 237-246: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x11::m, + location: 0x16::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -55,7 +55,7 @@ task 11, lines 248-257: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x12::m, + location: 0x17::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -65,7 +65,7 @@ task 12, lines 259-268: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x13::m, + location: 0x18::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -75,7 +75,7 @@ task 13, lines 270-279: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x14::m, + location: 0x19::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -85,7 +85,7 @@ task 14, lines 281-290: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x15::m, + location: 0x1a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -95,7 +95,7 @@ task 15, lines 292-301: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x16::m, + location: 0x1b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -105,7 +105,7 @@ task 16, lines 303-312: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x17::m, + location: 0x1c::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -115,7 +115,7 @@ task 17, lines 314-325: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x18::m, + location: 0x1d::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -125,7 +125,7 @@ task 18, lines 326-335: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x19::m, + location: 0x1e::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -135,7 +135,7 @@ task 19, lines 337-346: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x20::m, + location: 0x25::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -145,7 +145,7 @@ task 20, lines 348-357: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x21::m, + location: 0x26::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -155,7 +155,7 @@ task 21, lines 359-368: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x22::m, + location: 0x27::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -165,7 +165,7 @@ task 22, lines 370-379: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x23::m, + location: 0x28::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -175,7 +175,7 @@ task 23, lines 381-392: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x24::m, + location: 0x29::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -185,7 +185,7 @@ task 24, lines 393-402: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x25::m, + location: 0x2a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -195,7 +195,7 @@ task 25, lines 404-413: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x26::m, + location: 0x2b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -205,7 +205,7 @@ task 26, lines 415-424: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x27::m, + location: 0x2c::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -215,7 +215,7 @@ task 27, lines 426-435: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x28::m, + location: 0x2d::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -225,7 +225,7 @@ task 28, lines 437-446: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x29::m, + location: 0x2e::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -235,7 +235,7 @@ task 29, lines 448-457: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x30::m, + location: 0x35::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -245,7 +245,7 @@ task 30, lines 459-468: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x31::m, + location: 0x36::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -255,7 +255,7 @@ task 31, lines 470-481: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x32::m, + location: 0x37::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -265,7 +265,7 @@ task 32, lines 482-491: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x33::m, + location: 0x38::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -275,7 +275,7 @@ task 33, lines 493-502: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x34::m, + location: 0x39::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -285,7 +285,7 @@ task 34, lines 504-513: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x35::m, + location: 0x3a::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -295,7 +295,7 @@ task 35, lines 515-524: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x36::m, + location: 0x3b::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -305,7 +305,7 @@ task 36, lines 526-535: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x37::m, + location: 0x3c::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } @@ -315,7 +315,7 @@ task 37, lines 537-546: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x38::m, + location: 0x3d::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 1)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.mvir index dfa2dab4240d9..00645aa26fdc1 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/casting_operators.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { // Casting to u8. entry foo() { @@ -35,7 +35,7 @@ label b0: // Casting to u16. //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -70,7 +70,7 @@ label b0: // Casting to u32. //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -105,7 +105,7 @@ label b0: // Casting to u64. //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -130,7 +130,7 @@ label b0: // Casting to u128. //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -156,7 +156,7 @@ label b0: // Casting to u256. //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -191,7 +191,7 @@ label b0: // Casting to u8, overflowing. //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -202,7 +202,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -213,7 +213,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -224,7 +224,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -235,7 +235,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -246,7 +246,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -257,7 +257,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -268,7 +268,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -279,7 +279,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -290,7 +290,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: @@ -301,7 +301,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x17.m { +module 0x1c.m { entry foo() { label b0: @@ -312,7 +312,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x18.m { +module 0x1d.m { entry foo() { label b0: @@ -324,7 +324,7 @@ label b0: // Casting to u64, overflowing. //# run -module 0x19.m { +module 0x1e.m { entry foo() { label b0: @@ -335,7 +335,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x20.m { +module 0x25.m { entry foo() { label b0: @@ -346,7 +346,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x21.m { +module 0x26.m { entry foo() { label b0: @@ -357,7 +357,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x22.m { +module 0x27.m { entry foo() { label b0: @@ -368,7 +368,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x23.m { +module 0x28.m { entry foo() { label b0: @@ -379,7 +379,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x24.m { +module 0x29.m { entry foo() { label b0: @@ -391,7 +391,7 @@ label b0: // Casting to u16, overflowing. //# run -module 0x25.m { +module 0x2a.m { entry foo() { label b0: @@ -402,7 +402,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x26.m { +module 0x2b.m { entry foo() { label b0: @@ -413,7 +413,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x27.m { +module 0x2c.m { entry foo() { label b0: @@ -424,7 +424,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x28.m { +module 0x2d.m { entry foo() { label b0: @@ -435,7 +435,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x29.m { +module 0x2e.m { entry foo() { label b0: @@ -446,7 +446,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x30.m { +module 0x35.m { entry foo() { label b0: @@ -457,7 +457,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x31.m { +module 0x36.m { entry foo() { label b0: @@ -468,7 +468,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x32.m { +module 0x37.m { entry foo() { label b0: @@ -480,7 +480,7 @@ label b0: // Casting to u32, overflowing. //# run -module 0x33.m { +module 0x38.m { entry foo() { label b0: @@ -491,7 +491,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x34.m { +module 0x39.m { entry foo() { label b0: @@ -502,7 +502,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x35.m { +module 0x3a.m { entry foo() { label b0: @@ -513,7 +513,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x36.m { +module 0x3b.m { entry foo() { label b0: @@ -524,7 +524,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x37.m { +module 0x3c.m { entry foo() { label b0: @@ -535,7 +535,7 @@ label b0: }// check: ARITHMETIC_ERROR //# run -module 0x38.m { +module 0x3d.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/comparison_operators.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/comparison_operators.mvir index 6a296632c7f97..bd42710996b62 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/comparison_operators.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/comparison_operators.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { label b0: @@ -19,7 +19,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -39,7 +39,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -59,7 +59,7 @@ label b0: } //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -99,7 +99,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value.mvir index a0836820812b8..2018ecee9f715 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { struct T has drop {f: u64} public new(f: u64): Self.T { @@ -21,7 +21,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x6.A; entry foo() { let x: A.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value_nested.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value_nested.mvir index f2fdef33fb521..eb86bc680a19a 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value_nested.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/deref_value_nested.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.B { +module 0x6.B { struct T has drop {g: u64} public new(g: u64): Self.T { @@ -19,8 +19,8 @@ module 0x1.B { } //# publish -module 0x1.A { - import 0x1.B; +module 0x7.A { + import 0x6.B; struct T has drop {f: B.T} @@ -40,8 +40,8 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; -import 0x1.B; +import 0x7.A; +import 0x6.B; entry foo() { let b: B.T; let x: A.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.exp index d045a82ca3aba..0efd7445c235a 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.exp @@ -1,15 +1,15 @@ processed 3 tasks task 1, line 15: -//# run 0x1::test::test --args 0 +//# run 0x6::test::test --args 0 mutable inputs after call: local#0: 0 task 2, line 17: -//# run 0x1::test::test --args 1 +//# run 0x6::test::test --args 1 Error: Function execution failed with VMError: { major_status: ABORTED, sub_status: Some(42), - location: 0x1::test, + location: 0x6::test, indices: [], offsets: [(FunctionDefinitionIndex(0), 10)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.mvir index 83b4bbfa1cdfe..b7e7c7aa05bbd 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/equality_reference_value.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.test { +module 0x6.test { public test(external: &mut u64) { let local: u64; let local_ref: &mut u64; @@ -12,6 +12,6 @@ module 0x1.test { } } -//# run 0x1::test::test --args 0 +//# run 0x6::test::test --args 0 -//# run 0x1::test::test --args 1 +//# run 0x6::test::test --args 1 diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_reads.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_reads.mvir index 066dbdfb35272..ba575c82694b5 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_reads.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_reads.mvir @@ -34,7 +34,7 @@ module 0x42.VTest { //# publish -module 0x42.RTest { +module 0x43.RTest { struct T{fint: u64, fv: bool} public new(x: u64, y: bool): Self.T { @@ -77,8 +77,8 @@ module 0x42.RTest { //# run -module 0x42.m { -import 0x42.RTest; +module 0x44.m { +import 0x43.RTest; import 0x42.VTest; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_writes.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_writes.mvir index 8f024050c9229..32a2609aba8f8 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_writes.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/field_writes.mvir @@ -42,7 +42,7 @@ module 0x42.VTest { //# publish -module 0x42.RTest { +module 0x43.RTest { struct T{fint: u64, fr: bool} public new(x: u64, y: bool): Self.T { @@ -94,9 +94,9 @@ module 0x42.RTest { } //# run -module 0x42.m { +module 0x44.m { -import 0x42.RTest; +import 0x43.RTest; import 0x42.VTest; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/inaccessible_borrowed_local.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/inaccessible_borrowed_local.mvir index 8f450d189cda1..d67840859c804 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/inaccessible_borrowed_local.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/inaccessible_borrowed_local.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { entry foo() { let x: vector; @@ -16,7 +16,7 @@ entry foo() { } //# run -module 0x2.m { +module 0x7.m { entry foo() { let x: vector; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/pack_unpack.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/pack_unpack.mvir index bf604e969e9e1..59a655cb37330 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/pack_unpack.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/pack_unpack.mvir @@ -18,7 +18,7 @@ module 0x42.Test { } //# run -module 0x42.m { +module 0x43.m { import 0x42.Test; entry foo() { let t: Test.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.exp index e11e90d9beea6..3574f2256dea4 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.exp @@ -5,7 +5,7 @@ task 0, lines 1-9: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x1::m, + location: 0x6::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -15,7 +15,7 @@ task 1, lines 11-20: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x2::m, + location: 0x7::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -25,7 +25,7 @@ task 2, lines 21-30: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x3::m, + location: 0x8::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -35,7 +35,7 @@ task 3, lines 31-40: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x4::m, + location: 0x9::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -45,7 +45,7 @@ task 4, lines 41-50: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x5::m, + location: 0xa::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -55,7 +55,7 @@ task 5, lines 51-60: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x6::m, + location: 0xb::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -65,7 +65,7 @@ task 6, lines 61-70: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x7::m, + location: 0xc::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -75,7 +75,7 @@ task 7, lines 71-80: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x8::m, + location: 0xd::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -85,7 +85,7 @@ task 8, lines 81-90: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x9::m, + location: 0xe::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } @@ -95,7 +95,7 @@ task 9, lines 91-102: Error: Function execution failed with VMError: { major_status: ARITHMETIC_ERROR, sub_status: None, - location: 0x10::m, + location: 0x15::m, indices: [], offsets: [(FunctionDefinitionIndex(0), 2)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.mvir index f68a773376033..09246106d3449 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/shift_operators.mvir @@ -1,5 +1,5 @@ //# run -module 0x1.m { +module 0x6.m { // Number of bits shifted >= total number of bits in the number. entry foo() { label b0: @@ -9,7 +9,7 @@ label b0: } //# run -module 0x2.m { +module 0x7.m { entry foo() { label b0: @@ -19,7 +19,7 @@ label b0: } //# run -module 0x3.m { +module 0x8.m { entry foo() { label b0: @@ -29,7 +29,7 @@ label b0: } //# run -module 0x4.m { +module 0x9.m { entry foo() { label b0: @@ -39,7 +39,7 @@ label b0: } //# run -module 0x5.m { +module 0xa.m { entry foo() { label b0: @@ -49,7 +49,7 @@ label b0: } //# run -module 0x6.m { +module 0xb.m { entry foo() { label b0: @@ -59,7 +59,7 @@ label b0: } //# run -module 0x7.m { +module 0xc.m { entry foo() { label b0: @@ -69,7 +69,7 @@ label b0: } //# run -module 0x8.m { +module 0xd.m { entry foo() { label b0: @@ -79,7 +79,7 @@ label b0: } //# run -module 0x9.m { +module 0xe.m { entry foo() { label b0: @@ -89,7 +89,7 @@ label b0: } //# run -module 0x10.m { +module 0x15.m { entry foo() { label b0: @@ -101,7 +101,7 @@ label b0: // Shifting 0 results in 0. //# run -module 0x11.m { +module 0x16.m { entry foo() { label b0: @@ -126,7 +126,7 @@ label b0: // Shifting by 0 bits results in the same number. //# run -module 0x12.m { +module 0x17.m { entry foo() { label b0: @@ -151,7 +151,7 @@ label b0: // shl/shr by 1 equivalent to mul/div by 2. //# run -module 0x13.m { +module 0x18.m { entry foo() { label b0: @@ -176,7 +176,7 @@ label b0: // Underflowing results in 0. //# run -module 0x14.m { +module 0x19.m { entry foo() { label b0: @@ -194,7 +194,7 @@ label b0: // Overflowing results are truncated. //# run -module 0x15.m { +module 0x1a.m { entry foo() { label b0: @@ -212,7 +212,7 @@ label b0: // Some random tests. //# run -module 0x16.m { +module 0x1b.m { entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/vec_copy_nested.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/vec_copy_nested.mvir index c207849e00035..cf03f472103c0 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/vec_copy_nested.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/instructions/vec_copy_nested.mvir @@ -98,7 +98,7 @@ module 0x42.DeepCopy { } //# run -module 0x42.m { +module 0x43.m { import 0x42.DeepCopy; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use.mvir index aa7c37474cf5c..ba2e172bb722e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Coin { +module 0x5.Coin { struct Coin { value: u64 } public value(c: &Self.Coin): u64 { label b0: @@ -18,8 +18,8 @@ module 0x1.Coin { } //# publish -module 0x42.MoneyHolder { - import 0x1.Coin; +module 0x43.MoneyHolder { + import 0x5.Coin; struct T { money: Coin.Coin } @@ -48,9 +48,9 @@ module 0x42.MoneyHolder { } //# run -module 0x42.m { -import 0x42.MoneyHolder; -import 0x1.Coin; +module 0x44.m { +import 0x43.MoneyHolder; +import 0x5.Coin; entry foo() { let coin: Coin.Coin; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_2.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_2.mvir index d2e5c0a1624ee..6f8edc96c08dc 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_2.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_2.mvir @@ -18,7 +18,7 @@ module 0x42.M { } //# run -module 0x42.m { +module 0x43.m { import 0x42.M; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_3.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_3.mvir index 6ddeb8dba1e91..d65c70d680025 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_3.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_3.mvir @@ -13,7 +13,7 @@ module 0x42.M { } //# run -module 0x42.m { +module 0x43.m { import 0x42.M; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_with_multiple_return_values.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_with_multiple_return_values.mvir index d4047b58f7a96..236f32f502d0c 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_with_multiple_return_values.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_module_and_use_with_multiple_return_values.mvir @@ -7,7 +7,7 @@ module 0x42.M { } //# run -module 0x42.m { +module 0x43.m { import 0x42.M; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_two_modules.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_two_modules.mvir index 8f0d9387075bd..b133d087cbfa3 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_two_modules.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/publish_two_modules.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Coin { +module 0x5.Coin { struct Coin { value: u64 } public value(c: &Self.Coin): u64 { label b0: @@ -19,7 +19,7 @@ module 0x1.Coin { //# publish module 0x42.MoneyHolder { - import 0x1.Coin; + import 0x5.Coin; struct T { money: Coin.Coin } @@ -48,7 +48,7 @@ module 0x42.MoneyHolder { } //# publish -module 0x42.Bar { +module 0x43.Bar { struct T has drop {baz: u64} public new(m: u64): Self.T { label b0: @@ -63,10 +63,10 @@ module 0x42.Bar { } //# run -module 0x42.m { +module 0x44.m { import 0x42.MoneyHolder; -import 0x42.Bar; -import 0x1.Coin; +import 0x43.Bar; +import 0x5.Coin; entry foo() { let coin: Coin.Coin; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/use_modules_published.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/use_modules_published.mvir index dcdebd065d36e..68e3faee41d8b 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/use_modules_published.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/module_publishing/use_modules_published.mvir @@ -31,7 +31,7 @@ module 0x44.M { } //# run -module 0x42.m { +module 0x45.m { import 0x42.M; entry foo() { @@ -42,7 +42,7 @@ label b0: } //# publish -module 0x45.M { +module 0x46.M { public baz(): u64 { label b0: return 9; @@ -50,7 +50,7 @@ module 0x45.M { } //# run -module 0x42.m { +module 0x47.m { import 0x42.M as M1; import 0x43.M as M2; import 0x44.M as M3; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/native_functions/vector_module.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/native_functions/vector_module.mvir index a34e162230ca6..83102440b63f1 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/native_functions/vector_module.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/native_functions/vector_module.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Coin { +module 0x5.Coin { struct Coin has store { value: u64 } public value(c: &Self.Coin): u64 { label b0: @@ -19,7 +19,7 @@ module 0x1.Coin { //# publish module 0x42.M { - import 0x1.Coin; + import 0x5.Coin; import 0x1.vector; struct Coins has key { f: vector } @@ -56,9 +56,9 @@ module 0x42.M { } //# publish -module 0x42.Test { +module 0x43.Test { import 0x42.M; - import 0x1.Coin; + import 0x5.Coin; test() { let coins: M.Coins; label b0: @@ -69,4 +69,4 @@ module 0x42.Test { } } -//# run 0x42::Test::test +//# run 0x43::Test::test diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/direct_recursion.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/direct_recursion.mvir index e08ad04383638..aecb629fb3c1d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/direct_recursion.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/direct_recursion.mvir @@ -26,7 +26,7 @@ module 0x42.Math { } //# run -module 0x42.m { +module 0x43.m { import 0x42.Math; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/mutual_recursion.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/mutual_recursion.mvir index d10b4ba60731a..25e80f542b751 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/mutual_recursion.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/mutual_recursion.mvir @@ -25,7 +25,7 @@ module 0x42.Math { } //# run -module 0x42.m { +module 0x43.m { import 0x42.Math; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/runtime_type_deeply_nested.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/runtime_type_deeply_nested.mvir index 66a7ce0076d3a..4acff3c9076af 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/runtime_type_deeply_nested.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/recursion/runtime_type_deeply_nested.mvir @@ -70,7 +70,7 @@ module 0x42.M { // check: "Keep(EXECUTED)" //# run -module 0x1.m { +module 0x43.m { import 0x42.M; entry foo() { @@ -81,7 +81,7 @@ label b0: } //# run -module 0x2.m { +module 0x44.m { import 0x42.M; entry foo() { diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.exp b/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.exp index 12d7f4b3fe6bb..5fdcd3de1af67 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.exp +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.exp @@ -22,10 +22,10 @@ Error: Function execution failed with VMError: { task 2, lines 27-45: //# run --gas-budget 100000 -Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000042::m'. Got VMError: { +Error: Unable to publish module '0000000000000000000000000000000000000000000000000000000000000043::m'. Got VMError: { major_status: STLOC_UNSAFE_TO_DESTROY_ERROR, sub_status: None, - location: 0x42::m, + location: 0x43::m, indices: [(FunctionDefinition, 0)], offsets: [(FunctionDefinitionIndex(0), 5)], } diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.mvir index fd08cbacccb14..e7fef2669e1b2 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/references/borrow_in_loop.mvir @@ -25,7 +25,7 @@ public foo() { //# run 0x42::m::foo --gas-budget 100000 //# run --gas-budget 100000 -module 0x42.m { +module 0x43.m { entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/references/deref_move_module_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/references/deref_move_module_ok.mvir index b4e1654487247..70903e9a9358c 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/references/deref_move_module_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/references/deref_move_module_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x6.M { struct T has drop {v : u64} public new(v: u64): Self.T { @@ -18,7 +18,7 @@ module 0x1.M { //# run module 0x42.m { -import 0x1.M; +import 0x6.M; entry foo() { let x: M.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/references/mixed_lvalue.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/references/mixed_lvalue.mvir index 73af8a2084f50..269ae5edbd2cb 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/references/mixed_lvalue.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/references/mixed_lvalue.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x6.A { struct S { f: u64 } @@ -47,7 +47,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x6.A; entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/references/mutate_move_ok.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/references/mutate_move_ok.mvir index e503936458f06..f1579072bc286 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/references/mutate_move_ok.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/references/mutate_move_ok.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.B { +module 0x6.B { struct T has drop {g: u64} public new(v: u64): Self.T { @@ -26,7 +26,7 @@ module 0x1.B { //# run module 0x42.m { -import 0x1.B; +import 0x6.B; entry foo() { let x: B.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/add_function_calls.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/add_function_calls.mvir index 638dc3d0193f5..e2413c7a8d5fd 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/add_function_calls.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/add_function_calls.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.M { +module 0x5.M { public foo(u: u64): u64 * u64 * u64 { let twice: u64; let quadruple: u64; @@ -17,8 +17,8 @@ module 0x1.M { //# run -module 0x42.m { -import 0x1.M; +module 0x43.m { +import 0x5.M; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/assign_function_call.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/assign_function_call.mvir index 23de157015730..eeb8dbf86041d 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/assign_function_call.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/assign_function_call.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public ones_tens(f: u64): u64 * u64 { let k: u64; let m: u64; @@ -12,8 +12,8 @@ module 0x1.A { //# run -module 0x42.m { -import 0x1.A; +module 0x43.m { +import 0x5.A; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/binop_function_calls_as_args.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/binop_function_calls_as_args.mvir index a5ab28c31b8da..43e479849cdb2 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/binop_function_calls_as_args.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/binop_function_calls_as_args.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public div_by_two(v: u64): bool { label b0: jump_if (move(v) % 2 == 0) b2; @@ -27,7 +27,7 @@ module 0x1.A { //# run module 0x42.m { -import 0x1.A; +import 0x5.A; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/function_composition.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/function_composition.mvir index 9e2d582a416ad..1fcb87dc09414 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/function_composition.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/function_composition.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.Test { +module 0x5.Test { public foo(v: u64): u64 * u64 { let one_less: u64; let one_more: u64; @@ -19,8 +19,8 @@ module 0x1.Test { //# run -module 0x42.m { -import 0x1.Test; +module 0x43.m { +import 0x5.Test; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/many_function_calls_as_args.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/many_function_calls_as_args.mvir index 7c91b3b266b60..8cf8459454da3 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/many_function_calls_as_args.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/many_function_calls_as_args.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { struct T has drop {value: u64, even: bool} public new(): Self.T { @@ -40,8 +40,8 @@ module 0x1.A { //# run -module 0x42.m { -import 0x1.A; +module 0x43.m { +import 0x5.A; entry foo() { let x: A.T; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/multiple_composite_functions.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/multiple_composite_functions.mvir index 95159be8b192e..7f05f023aeb78 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/multiple_composite_functions.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/multiple_composite_functions.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { struct T has drop {value: u64} public new(v: u64): Self.T { @@ -22,8 +22,8 @@ module 0x1.A { //# run -module 0x42.m { -import 0x1.A; +module 0x43.m { +import 0x5.A; entry foo() { let z: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pass_args_on_stack_as_expressions.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pass_args_on_stack_as_expressions.mvir index fc5e86cf6ef83..e5337fbeedf26 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pass_args_on_stack_as_expressions.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pass_args_on_stack_as_expressions.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public sum(o: u64, k: u64, t: u64): u64 { label b0: return move(o) + move(k) + move(t); @@ -8,8 +8,8 @@ module 0x1.A { //# run -module 0x42.m { -import 0x1.A; +module 0x43.m { +import 0x5.A; entry foo() { let s: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pop_weird.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pop_weird.mvir index ae5a2d1e0ad84..c3f603b9e3c65 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pop_weird.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/pop_weird.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { three(): u64 * u64 * u64 { label b0: return 0, 1, 2; @@ -16,8 +16,8 @@ module 0x1.A { //# run -module 0x42.m { -import 0x1.A; +module 0x43.m { +import 0x5.A; entry foo() { label b0: diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_call.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_call.mvir index 3e0ad21708063..6cafc621c2358 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_call.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_call.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public push_u64(): u64 { label b0: return 42; @@ -22,9 +22,9 @@ module 0x1.A { //# run -module 0x42.m { +module 0x43.m { -import 0x1.A; +import 0x5.A; entry foo() { let ans: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_composition.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_composition.mvir index 98e21227bf0a0..8c8043bb320dc 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_composition.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/push_args_before_function_composition.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public push_u64(): u64 { label b0: return 42; @@ -22,9 +22,9 @@ module 0x1.A { //# run -module 0x42.m { +module 0x43.m { -import 0x1.A; +import 0x5.A; entry foo() { let k: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_expression_lists.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_expression_lists.mvir index a7066128939ca..6307ef9c70d69 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_expression_lists.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_expression_lists.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public foo(t: u64): u64 * u64 { let k: u64; label b0: @@ -17,9 +17,9 @@ module 0x1.A { //# run -module 0x42.m { +module 0x43.m { -import 0x1.A; +import 0x5.A; entry foo() { let y: u64; diff --git a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_function_in_if_binop_in_else.mvir b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_function_in_if_binop_in_else.mvir index 8c6f8b6da1ef7..803acdb48084e 100644 --- a/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_function_in_if_binop_in_else.mvir +++ b/external-crates/move/crates/move-vm-transactional-tests/tests/stack_and_function_calls/return_function_in_if_binop_in_else.mvir @@ -1,5 +1,5 @@ //# publish -module 0x1.A { +module 0x5.A { public foo(t: u64): u64 * u64 { label b0: return (copy(t), 2 * move(t)); @@ -17,9 +17,9 @@ module 0x1.A { //# run -module 0x1.m { +module 0x6.m { -import 0x1.A; +import 0x5.A; entry foo() { let x: u64; diff --git a/external-crates/move/crates/move-vm-types/src/values/values_impl.rs b/external-crates/move/crates/move-vm-types/src/values/values_impl.rs index aab6897fdc244..19e79444fcf7c 100644 --- a/external-crates/move/crates/move-vm-types/src/values/values_impl.rs +++ b/external-crates/move/crates/move-vm-types/src/values/values_impl.rs @@ -3306,7 +3306,7 @@ impl<'a, 'b> serde::Serialize for AnnotatedValue<'a, 'b, MoveTypeLayout, ValueIm (MoveTypeLayout::Struct(struct_layout), ValueImpl::Container(Container::Struct(r))) => { (AnnotatedValue { - layout: struct_layout, + layout: &**struct_layout, val: &*r.borrow(), }) .serialize(serializer) @@ -3314,7 +3314,7 @@ impl<'a, 'b> serde::Serialize for AnnotatedValue<'a, 'b, MoveTypeLayout, ValueIm (MoveTypeLayout::Enum(enum_layout), ValueImpl::Container(Container::Variant(r))) => { (AnnotatedValue { - layout: enum_layout, + layout: &**enum_layout, val: &*r.borrow(), }) .serialize(serializer) @@ -3483,12 +3483,12 @@ impl<'d> serde::de::DeserializeSeed<'d> for SeedWrapper<&MoveTypeLayout> { L::Signer => AccountAddress::deserialize(deserializer).map(Value::signer), L::Struct(struct_layout) => Ok(SeedWrapper { - layout: struct_layout, + layout: &**struct_layout, } .deserialize(deserializer)?), L::Enum(enum_layout) => Ok(SeedWrapper { - layout: enum_layout, + layout: &**enum_layout, } .deserialize(deserializer)?), @@ -4138,10 +4138,11 @@ impl ValueImpl { (L::Bool, ValueImpl::Bool(x)) => MoveValue::Bool(*x), (L::Address, ValueImpl::Address(x)) => MoveValue::Address(*x), - (L::Enum(MoveEnumLayout(variants)), ValueImpl::Container(Container::Variant(r))) => { + (L::Enum(enum_layout), ValueImpl::Container(Container::Variant(r))) => { + let MoveEnumLayout(variants) = &**enum_layout; let (tag, values) = &*r.borrow(); let tag = *tag; - let field_layouts = &variants[tag as usize]; + let field_layouts = &variants.as_slice()[tag as usize]; let mut fields = vec![]; for (v, field_layout) in values.iter().zip(field_layouts) { fields.push(v.as_move_value(field_layout)); diff --git a/external-crates/move/move-execution/v0/crates/move-vm-runtime/src/loader.rs b/external-crates/move/move-execution/v0/crates/move-vm-runtime/src/loader.rs index 294f7e108cf8a..bb189d673c964 100644 --- a/external-crates/move/move-execution/v0/crates/move-vm-runtime/src/loader.rs +++ b/external-crates/move/move-execution/v0/crates/move-vm-runtime/src/loader.rs @@ -2186,17 +2186,14 @@ impl Loader { Type::Vector(ty) => R::MoveTypeLayout::Vector(Box::new( self.type_to_type_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => R::MoveTypeLayout::Struct(self.struct_gidx_to_type_layout( - *gidx, - &[], - count, - depth, - )?), + Type::Datatype(gidx) => R::MoveTypeLayout::Struct(Box::new( + self.struct_gidx_to_type_layout(*gidx, &[], count, depth)?, + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - R::MoveTypeLayout::Struct( + R::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_type_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( @@ -2289,14 +2286,14 @@ impl Loader { Type::Vector(ty) => A::MoveTypeLayout::Vector(Box::new( self.type_to_fully_annotated_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => A::MoveTypeLayout::Struct( + Type::Datatype(gidx) => A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, &[], count, depth)?, - ), + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - A::MoveTypeLayout::Struct( + A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( diff --git a/external-crates/move/move-execution/v1/crates/move-vm-runtime/src/loader.rs b/external-crates/move/move-execution/v1/crates/move-vm-runtime/src/loader.rs index ee65647069e9e..7744987a8a955 100644 --- a/external-crates/move/move-execution/v1/crates/move-vm-runtime/src/loader.rs +++ b/external-crates/move/move-execution/v1/crates/move-vm-runtime/src/loader.rs @@ -2188,17 +2188,14 @@ impl Loader { Type::Vector(ty) => R::MoveTypeLayout::Vector(Box::new( self.type_to_type_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => R::MoveTypeLayout::Struct(self.struct_gidx_to_type_layout( - *gidx, - &[], - count, - depth, - )?), + Type::Datatype(gidx) => R::MoveTypeLayout::Struct(Box::new( + self.struct_gidx_to_type_layout(*gidx, &[], count, depth)?, + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - R::MoveTypeLayout::Struct( + R::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_type_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( @@ -2291,14 +2288,14 @@ impl Loader { Type::Vector(ty) => A::MoveTypeLayout::Vector(Box::new( self.type_to_fully_annotated_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => A::MoveTypeLayout::Struct( + Type::Datatype(gidx) => A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, &[], count, depth)?, - ), + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - A::MoveTypeLayout::Struct( + A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( diff --git a/external-crates/move/move-execution/v2/crates/move-vm-runtime/src/loader.rs b/external-crates/move/move-execution/v2/crates/move-vm-runtime/src/loader.rs index 3d0f98b4f6247..65117a1c3c426 100644 --- a/external-crates/move/move-execution/v2/crates/move-vm-runtime/src/loader.rs +++ b/external-crates/move/move-execution/v2/crates/move-vm-runtime/src/loader.rs @@ -2157,17 +2157,14 @@ impl Loader { Type::Vector(ty) => R::MoveTypeLayout::Vector(Box::new( self.type_to_type_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => R::MoveTypeLayout::Struct(self.struct_gidx_to_type_layout( - *gidx, - &[], - count, - depth, - )?), + Type::Datatype(gidx) => R::MoveTypeLayout::Struct(Box::new( + self.struct_gidx_to_type_layout(*gidx, &[], count, depth)?, + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - R::MoveTypeLayout::Struct( + R::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_type_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( @@ -2260,14 +2257,14 @@ impl Loader { Type::Vector(ty) => A::MoveTypeLayout::Vector(Box::new( self.type_to_fully_annotated_layout_impl(ty, count, depth + 1)?, )), - Type::Datatype(gidx) => A::MoveTypeLayout::Struct( + Type::Datatype(gidx) => A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, &[], count, depth)?, - ), + )), Type::DatatypeInstantiation(struct_inst) => { let (gidx, ty_args) = &**struct_inst; - A::MoveTypeLayout::Struct( + A::MoveTypeLayout::Struct(Box::new( self.struct_gidx_to_fully_annotated_layout(*gidx, ty_args, count, depth)?, - ) + )) } Type::Reference(_) | Type::MutableReference(_) | Type::TyParam(_) => { return Err( diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 6fe93ff432deb..3dae324991f38 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -45,7 +45,7 @@ importers: version: 8.8.0(eslint@8.45.0) eslint-config-react-app: specifier: ^7.0.1 - version: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.24.7))(@babel/plugin-transform-react-jsx@7.24.7(@babel/core@7.24.7))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0))(eslint@8.45.0)(typescript@5.5.3) + version: 7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.25.2))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.25.2))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0))(eslint@8.45.0)(typescript@5.5.3) eslint-import-resolver-typescript: specifier: ^3.6.1 version: 3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0) @@ -57,7 +57,7 @@ importers: version: 2.29.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.45.0) eslint-plugin-prettier: specifier: ^5.1.3 - version: 5.1.3(@types/eslint@8.56.10)(eslint-config-prettier@8.8.0(eslint@8.45.0))(eslint@8.45.0)(prettier@3.3.2) + version: 5.1.3(@types/eslint@9.6.1)(eslint-config-prettier@8.8.0(eslint@8.45.0))(eslint@8.45.0)(prettier@3.3.2) eslint-plugin-require-extensions: specifier: ^0.1.3 version: 0.1.3(eslint@8.45.0) @@ -139,34 +139,34 @@ importers: devDependencies: '@headlessui/tailwindcss': specifier: ^0.1.3 - version: 0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) '@tailwindcss/aspect-ratio': specifier: ^0.4.2 - version: 0.4.2(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 0.4.2(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) '@tailwindcss/forms': specifier: ^0.5.7 - version: 0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) '@types/react': specifier: ^18.3.3 version: 18.3.3 '@vanilla-extract/vite-plugin': specifier: ^4.0.13 - version: 4.0.13(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.0.13(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) postcss: specifier: ^8.4.39 version: 8.4.39 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) typescript: specifier: ^5.5.3 version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) apps/icons: devDependencies: @@ -274,7 +274,7 @@ importers: version: 1.1.0(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@reduxjs/toolkit': specifier: ^1.9.5 - version: 1.9.5(react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1))(react@18.3.1) + version: 1.9.5(react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@5.0.1))(react@18.3.1) '@scure/bip32': specifier: ^1.4.0 version: 1.4.0 @@ -352,7 +352,7 @@ importers: version: 5.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react-redux: specifier: ^8.1.1 - version: 8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1) + version: 8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@5.0.1) react-router-dom: specifier: ^6.24.1 version: 6.24.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -388,7 +388,7 @@ importers: version: 3.23.8 zustand: specifier: ^4.5.4 - version: 4.5.4(@types/react@18.3.3)(immer@9.0.21)(react@18.3.1) + version: 4.5.4(@types/react@18.3.3)(immer@10.1.1)(react@18.3.1) zxcvbn: specifier: ^4.4.2 version: 4.4.2 @@ -443,7 +443,7 @@ importers: version: 7.6.20(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.5.3) '@storybook/react-webpack5': specifier: ^7.1.0 - version: 7.6.20(@babel/core@7.24.7)(@swc/core@1.6.13(@swc/helpers@0.5.5))(@swc/helpers@0.5.5)(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.21.0)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1) + version: 7.6.20(@babel/core@7.24.7)(@swc/core@1.7.24(@swc/helpers@0.5.13))(@swc/helpers@0.5.13)(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.26.1)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1) '@storybook/theming': specifier: ^7.1.0 version: 7.6.17(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -452,7 +452,7 @@ importers: version: 7.0.0(typescript@5.5.3) '@types/dotenv-webpack': specifier: ^7.0.4 - version: 7.0.7(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + version: 7.0.7(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)) '@types/git-rev-sync': specifier: ^2.0.0 version: 2.0.2 @@ -479,31 +479,31 @@ importers: version: 0.10.7 '@types/webpack': specifier: ^5.28.1 - version: 5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + version: 5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)) '@types/zxcvbn': specifier: ^4.4.1 version: 4.4.4 '@vitejs/plugin-react': specifier: ^4.3.1 - version: 4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0)) copy-webpack-plugin: specifier: ^11.0.0 - version: 11.0.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 11.0.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) cross-env: specifier: ^7.0.3 version: 7.0.3 css-loader: specifier: ^6.7.3 - version: 6.11.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 6.11.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) dotenv-webpack: specifier: ^8.0.0 - version: 8.1.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 8.1.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) eslint: specifier: ^8.45.0 version: 8.45.0 eslint-webpack-plugin: specifier: ^4.0.1 - version: 4.2.0(eslint@8.45.0)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 4.2.0(eslint@8.45.0)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) git-rev-sync: specifier: ^3.0.2 version: 3.0.2 @@ -512,10 +512,10 @@ importers: version: 14.12.3 html-webpack-plugin: specifier: ^5.5.3 - version: 5.6.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 5.6.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) mini-css-extract-plugin: specifier: ^2.7.6 - version: 2.9.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 2.9.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) onchange: specifier: ^7.1.0 version: 7.1.0 @@ -524,7 +524,7 @@ importers: version: 8.4.39 postcss-loader: specifier: ^7.3.3 - version: 7.3.3(postcss@8.4.39)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 7.3.3(postcss@8.4.39)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) postcss-preset-env: specifier: ^9.0.0 version: 9.6.0(postcss@8.4.39) @@ -533,22 +533,22 @@ importers: version: 1.77.6 sass-loader: specifier: ^13.3.2 - version: 13.3.2(sass@1.77.6)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 13.3.2(sass@1.77.6)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) storybook: specifier: ^7.1.0 version: 7.1.0 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3)) tailwindcss-animate: specifier: ^1.0.7 - version: 1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3))) ts-loader: specifier: ^9.4.4 - version: 9.5.1(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + version: 9.5.1(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3) tsconfig-paths: specifier: ^4.2.0 version: 4.2.0 @@ -557,19 +557,19 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0)) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@25.0.0)(sass@1.77.6)(terser@5.32.0) web-ext: specifier: ^7.6.2 - version: 7.6.2(body-parser@1.20.2) + version: 7.6.2 webpack: specifier: ^5.79.0 - version: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + version: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-cli: specifier: ^5.0.1 version: 5.1.4(webpack@5.92.1) @@ -612,7 +612,7 @@ importers: devDependencies: '@headlessui/tailwindcss': specifier: ^0.1.3 - version: 0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) '@types/react': specifier: ^18.3.3 version: 18.3.3 @@ -621,7 +621,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react': specifier: ^4.3.1 - version: 4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.1(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) autoprefixer: specifier: ^10.4.19 version: 10.4.19(postcss@8.4.39) @@ -630,13 +630,13 @@ importers: version: 8.4.39 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) typescript: specifier: ^5.5.3 version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) dapps/kiosk-cli: dependencies: @@ -724,7 +724,7 @@ importers: devDependencies: '@tailwindcss/forms': specifier: ^0.5.7 - version: 0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) '@tsconfig/docusaurus': specifier: ^2.0.3 version: 2.0.3 @@ -736,7 +736,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react': specifier: ^4.3.1 - version: 4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.1(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) autoprefixer: specifier: ^10.4.19 version: 10.4.19(postcss@8.4.39) @@ -745,19 +745,19 @@ importers: version: 8.4.39 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) tailwindcss-animate: specifier: ^1.0.7 - version: 1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))) + version: 1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))) typescript: specifier: ^5.5.3 version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) dapps/sponsored-transactions: dependencies: @@ -785,7 +785,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react': specifier: ^4.3.1 - version: 4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.1(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) autoprefixer: specifier: ^10.4.19 version: 10.4.19(postcss@8.4.39) @@ -794,13 +794,13 @@ importers: version: 8.4.39 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) typescript: specifier: ^5.5.3 version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) examples/mev_bot: dependencies: @@ -816,7 +816,7 @@ importers: devDependencies: ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3) typescript: specifier: ^5.5.3 version: 5.5.3 @@ -868,7 +868,7 @@ importers: version: 6.1.0(eslint@8.45.0)(typescript@5.5.3) '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) eslint: specifier: ^8.45.0 version: 8.45.0 @@ -886,10 +886,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) examples/trading/api: dependencies: @@ -903,8 +903,8 @@ importers: specifier: ^2.8.5 version: 2.8.5 express: - specifier: ^4.19.2 - version: 4.19.2 + specifier: ^4.20.0 + version: 4.20.0 devDependencies: '@types/cors': specifier: ^2.8.17 @@ -923,7 +923,7 @@ importers: version: 5.16.2 ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3) typescript: specifier: ^5.5.3 version: 5.5.3 @@ -975,7 +975,7 @@ importers: version: 6.1.0(eslint@8.45.0)(typescript@5.5.3) '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) autoprefixer: specifier: ^10.4.19 version: 10.4.19(postcss@8.4.39) @@ -996,13 +996,13 @@ importers: version: 3.3.2 tailwindcss: specifier: ^3.4.4 - version: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + version: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) typescript: specifier: ^5.5.3 version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) sdk/bcs: dependencies: @@ -1024,7 +1024,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) sdk/build-scripts: dependencies: @@ -1037,7 +1037,7 @@ importers: version: 1.16.3 '@vanilla-extract/esbuild-plugin': specifier: ^2.3.8 - version: 2.3.8(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(esbuild@0.23.0)(sass@1.77.6)(terser@5.31.1) + version: 2.3.8(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(esbuild@0.23.0)(sass@1.78.0)(terser@5.32.0) autoprefixer: specifier: ^10.4.19 version: 10.4.19(postcss@8.4.39) @@ -1117,7 +1117,7 @@ importers: version: 7.16.0(eslint@9.6.0)(typescript@5.5.3) '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) eslint: specifier: ^9.6.0 version: 9.6.0 @@ -1135,7 +1135,7 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) sdk/create-dapp/templates/react-e2e-counter: dependencies: @@ -1178,7 +1178,7 @@ importers: version: 6.1.0(eslint@8.45.0)(typescript@5.5.3) '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) eslint: specifier: ^8.45.0 version: 8.45.0 @@ -1196,7 +1196,7 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) sdk/dapp-kit: dependencies: @@ -1232,7 +1232,7 @@ importers: version: 2.1.1 zustand: specifier: ^4.5.4 - version: 4.5.4(@types/react@18.3.3)(immer@9.0.21)(react@18.3.1) + version: 4.5.4(@types/react@18.3.3)(immer@10.1.1)(react@18.3.1) devDependencies: '@mysten/build-scripts': specifier: workspace:* @@ -1248,7 +1248,7 @@ importers: version: 10.3.1 '@testing-library/jest-dom': specifier: ^6.4.6 - version: 6.4.6(vitest@2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1)) + version: 6.4.6(vitest@2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@24.1.0)(sass@1.78.0)(terser@5.32.0)) '@testing-library/react': specifier: ^16.0.0 version: 16.0.0(@testing-library/dom@10.3.1)(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -1260,10 +1260,10 @@ importers: version: 18.3.3 '@vanilla-extract/esbuild-plugin': specifier: ^2.3.8 - version: 2.3.8(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(esbuild@0.23.0)(sass@1.77.6)(terser@5.31.1) + version: 2.3.8(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(esbuild@0.23.1)(sass@1.78.0)(terser@5.32.0) '@vanilla-extract/vite-plugin': specifier: ^4.0.13 - version: 4.0.13(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 4.0.13(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)) jsdom: specifier: ^24.1.0 version: 24.1.0 @@ -1281,10 +1281,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@24.1.0)(sass@1.78.0)(terser@5.32.0) sdk/deepbook: dependencies: @@ -1303,7 +1303,7 @@ importers: version: 0.2.3 ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3) ts-retry-promise: specifier: ^0.8.1 version: 0.8.1 @@ -1312,10 +1312,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wait-on: specifier: ^7.2.0 version: 7.2.0 @@ -1340,7 +1340,7 @@ importers: version: 0.2.3 ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3) ts-retry-promise: specifier: ^0.8.1 version: 0.8.1 @@ -1349,10 +1349,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wait-on: specifier: ^7.2.0 version: 7.2.0 @@ -1397,13 +1397,13 @@ importers: version: 20.14.10 next: specifier: ^14.2.4 - version: 14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6) + version: 14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0) nextra: specifier: ^2.13.4 - version: 2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) nextra-theme-docs: specifier: ^2.13.4 - version: 2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(nextra@2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(nextra@2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: specifier: ^18.3.1 version: 18.3.1 @@ -1416,7 +1416,7 @@ importers: version: 18.3.3 typedoc: specifier: ^0.26.3 - version: 0.26.3(typescript@5.5.3) + version: 0.26.3(typescript@5.6.2) sdk/enoki: dependencies: @@ -1450,7 +1450,7 @@ importers: version: 18.3.0 '@vitejs/plugin-react-swc': specifier: ^3.7.0 - version: 3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)) + version: 3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0)) react: specifier: ^18.3.1 version: 18.3.1 @@ -1462,7 +1462,7 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) sdk/graphql-transport: dependencies: @@ -1514,7 +1514,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wait-on: specifier: ^7.2.0 version: 7.2.0 @@ -1542,10 +1542,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wait-on: specifier: ^7.2.0 version: 7.2.0 @@ -1579,7 +1579,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) sdk/move-bytecode-template: devDependencies: @@ -1594,7 +1594,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wasm-pack: specifier: ^0.13.0 version: 0.13.0 @@ -1613,13 +1613,13 @@ importers: version: link:../build-scripts ts-node: specifier: ^10.9.2 - version: 10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3) + version: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3) typescript: specifier: ^5.5.3 version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) sdk/typescript: dependencies: @@ -1713,10 +1713,10 @@ importers: version: 5.5.3 vite: specifier: ^5.3.3 - version: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + version: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) wait-on: specifier: ^7.2.0 version: 7.2.0 @@ -1741,7 +1741,7 @@ importers: version: 5.5.3 typescript-json-schema: specifier: ^0.64.0 - version: 0.64.0(@swc/core@1.6.13) + version: 0.64.0(@swc/core@1.7.24(@swc/helpers@0.5.13)) sdk/zklogin: dependencies: @@ -1772,7 +1772,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) sdk/zksend: dependencies: @@ -1803,7 +1803,7 @@ importers: version: 5.5.3 vitest: specifier: ^2.0.1 - version: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + version: 2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0) packages: @@ -1907,10 +1907,18 @@ packages: resolution: {integrity: sha512-qJzAIcv03PyaWqxRgO4mSU3lihncDT296vnyuE2O8uA4w3UHWI4S3hgeZd1L8W1Bft40w9JxJ2b412iDUFFRhw==} engines: {node: '>=6.9.0'} + '@babel/compat-data@7.25.4': + resolution: {integrity: sha512-+LGRog6RAsCJrrrg/IO6LGmpphNe5DiK30dGjCoxxeGv49B10/3XYGxPsAwrDlMFcFEvdAUavDT8r9k/hSyQqQ==} + engines: {node: '>=6.9.0'} + '@babel/core@7.24.7': resolution: {integrity: sha512-nykK+LEK86ahTkX/3TgauT0ikKoNCfKHEaZYTUVupJdTLzGNvrblu4u6fa7DhZONAltdf8e662t/abY8idrd/g==} engines: {node: '>=6.9.0'} + '@babel/core@7.25.2': + resolution: {integrity: sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==} + engines: {node: '>=6.9.0'} + '@babel/eslint-parser@7.18.9': resolution: {integrity: sha512-KzSGpMBggz4fKbRbWLNyPVTuQr6cmCcBhOyXTw/fieOVaw5oYAwcAj4a7UKcDYCPxQq+CG1NCDZH9e2JTXquiQ==} engines: {node: ^10.13.0 || ^12.13.0 || >=14.0.0} @@ -1926,6 +1934,10 @@ packages: resolution: {integrity: sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==} engines: {node: '>=6.9.0'} + '@babel/generator@7.25.6': + resolution: {integrity: sha512-VPC82gr1seXOpkjAAKoLhP50vx4vGNlF4msF64dSFq1P8RfB+QAuJWGHPXXPc8QyfVWwwB/TNNU4+ayZmHNbZw==} + engines: {node: '>=6.9.0'} + '@babel/helper-annotate-as-pure@7.24.7': resolution: {integrity: sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==} engines: {node: '>=6.9.0'} @@ -1938,6 +1950,10 @@ packages: resolution: {integrity: sha512-ctSdRHBi20qWOfy27RUb4Fhp07KSJ3sXcuSvTrXrc4aG8NSYDo1ici3Vhg9bg69y5bj0Mr1lh0aeEgTvc12rMg==} engines: {node: '>=6.9.0'} + '@babel/helper-compilation-targets@7.25.2': + resolution: {integrity: sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==} + engines: {node: '>=6.9.0'} + '@babel/helper-create-class-features-plugin@7.24.7': resolution: {integrity: sha512-kTkaDl7c9vO80zeX1rJxnuRpEsD5tA81yh11X1gQo+PhSti3JS+7qeZo9U4RHobKRiFPKaGK3svUAeb8D0Q7eg==} engines: {node: '>=6.9.0'} @@ -1986,6 +2002,12 @@ packages: peerDependencies: '@babel/core': ^7.0.0 + '@babel/helper-module-transforms@7.25.2': + resolution: {integrity: sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + '@babel/helper-optimise-call-expression@7.24.7': resolution: {integrity: sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==} engines: {node: '>=6.9.0'} @@ -1998,6 +2020,10 @@ packages: resolution: {integrity: sha512-Rq76wjt7yz9AAc1KnlRKNAi/dMSVWgDRx43FHoJEbcYU6xOWaE2dVPwcdTukJrjxS65GITyfbvEYHvkirZ6uEg==} engines: {node: '>=6.9.0'} + '@babel/helper-plugin-utils@7.24.8': + resolution: {integrity: sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==} + engines: {node: '>=6.9.0'} + '@babel/helper-remap-async-to-generator@7.24.7': resolution: {integrity: sha512-9pKLcTlZ92hNZMQfGCHImUpDOlAgkkpqalWEeftW5FBya75k8Li2ilerxkM/uBEj01iBZXcCIB/bwvDYgWyibA==} engines: {node: '>=6.9.0'} @@ -2030,6 +2056,10 @@ packages: resolution: {integrity: sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==} engines: {node: '>=6.9.0'} + '@babel/helper-string-parser@7.24.8': + resolution: {integrity: sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==} + engines: {node: '>=6.9.0'} + '@babel/helper-validator-identifier@7.22.20': resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} engines: {node: '>=6.9.0'} @@ -2046,6 +2076,10 @@ packages: resolution: {integrity: sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==} engines: {node: '>=6.9.0'} + '@babel/helper-validator-option@7.24.8': + resolution: {integrity: sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==} + engines: {node: '>=6.9.0'} + '@babel/helper-wrap-function@7.24.7': resolution: {integrity: sha512-N9JIYk3TD+1vq/wn77YnJOqMtfWhNewNE+DJV4puD2X7Ew9J4JvrzrFDfTfyv5EgEXVy9/Wt8QiOErzEmv5Ifw==} engines: {node: '>=6.9.0'} @@ -2054,6 +2088,10 @@ packages: resolution: {integrity: sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==} engines: {node: '>=6.9.0'} + '@babel/helpers@7.25.6': + resolution: {integrity: sha512-Xg0tn4HcfTijTwfDwYlvVCl43V6h4KyVVX2aEm4qdO/PC6L2YvzLHFdmxhoeSA3eslcE6+ZVXHgWwopXYLNq4Q==} + engines: {node: '>=6.9.0'} + '@babel/highlight@7.22.5': resolution: {integrity: sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==} engines: {node: '>=6.9.0'} @@ -2081,6 +2119,11 @@ packages: engines: {node: '>=6.0.0'} hasBin: true + '@babel/parser@7.25.6': + resolution: {integrity: sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==} + engines: {node: '>=6.0.0'} + hasBin: true + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.7': resolution: {integrity: sha512-TiT1ss81W80eQsN+722OaeQMY/G4yTb4G9JrqeiDADs3N8lbPMGldWi9x8tyqCW5NLx1Jh2AvkE6r6QvEltMMQ==} engines: {node: '>=6.9.0'} @@ -2628,6 +2671,12 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 + '@babel/plugin-transform-react-jsx@7.25.2': + resolution: {integrity: sha512-KQsqEAVBpU82NM/B/N9j9WOdphom1SZH3R+2V7INrQUH+V9EBFwZsEJl8eBIVeQE62FxJCc70jzEZwqU7RcVqA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + '@babel/plugin-transform-react-pure-annotations@7.18.6': resolution: {integrity: sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ==} engines: {node: '>=6.9.0'} @@ -2808,6 +2857,10 @@ packages: resolution: {integrity: sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==} engines: {node: '>=6.9.0'} + '@babel/template@7.25.0': + resolution: {integrity: sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==} + engines: {node: '>=6.9.0'} + '@babel/traverse@7.23.9': resolution: {integrity: sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg==} engines: {node: '>=6.9.0'} @@ -2816,6 +2869,10 @@ packages: resolution: {integrity: sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA==} engines: {node: '>=6.9.0'} + '@babel/traverse@7.25.6': + resolution: {integrity: sha512-9Vrcx5ZW6UwK5tvqsj0nGpp/XzqthkT0dqIc9g1AdtygFToNtTF67XzYS//dm+SAK9cp3B9R4ZO/46p63SCjlQ==} + engines: {node: '>=6.9.0'} + '@babel/types@7.23.9': resolution: {integrity: sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==} engines: {node: '>=6.9.0'} @@ -2824,6 +2881,10 @@ packages: resolution: {integrity: sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==} engines: {node: '>=6.9.0'} + '@babel/types@7.25.6': + resolution: {integrity: sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==} + engines: {node: '>=6.9.0'} + '@base2/pretty-print-object@1.0.1': resolution: {integrity: sha512-4iri8i1AqYHJE2DstZYkyEprg6Pq6sKx3xn5FpySk9sNhH7qN2LLlHJCfDTZRILNwQNPD7mATWM0TBui7uC1pA==} @@ -3188,6 +3249,12 @@ packages: cpu: [ppc64] os: [aix] + '@esbuild/aix-ppc64@0.23.1': + resolution: {integrity: sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + '@esbuild/android-arm64@0.18.20': resolution: {integrity: sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ==} engines: {node: '>=12'} @@ -3206,6 +3273,12 @@ packages: cpu: [arm64] os: [android] + '@esbuild/android-arm64@0.23.1': + resolution: {integrity: sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + '@esbuild/android-arm@0.18.20': resolution: {integrity: sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw==} engines: {node: '>=12'} @@ -3224,6 +3297,12 @@ packages: cpu: [arm] os: [android] + '@esbuild/android-arm@0.23.1': + resolution: {integrity: sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + '@esbuild/android-x64@0.18.20': resolution: {integrity: sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg==} engines: {node: '>=12'} @@ -3242,6 +3321,12 @@ packages: cpu: [x64] os: [android] + '@esbuild/android-x64@0.23.1': + resolution: {integrity: sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + '@esbuild/darwin-arm64@0.18.20': resolution: {integrity: sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA==} engines: {node: '>=12'} @@ -3260,6 +3345,12 @@ packages: cpu: [arm64] os: [darwin] + '@esbuild/darwin-arm64@0.23.1': + resolution: {integrity: sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + '@esbuild/darwin-x64@0.18.20': resolution: {integrity: sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ==} engines: {node: '>=12'} @@ -3278,6 +3369,12 @@ packages: cpu: [x64] os: [darwin] + '@esbuild/darwin-x64@0.23.1': + resolution: {integrity: sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + '@esbuild/freebsd-arm64@0.18.20': resolution: {integrity: sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw==} engines: {node: '>=12'} @@ -3296,6 +3393,12 @@ packages: cpu: [arm64] os: [freebsd] + '@esbuild/freebsd-arm64@0.23.1': + resolution: {integrity: sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + '@esbuild/freebsd-x64@0.18.20': resolution: {integrity: sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ==} engines: {node: '>=12'} @@ -3314,6 +3417,12 @@ packages: cpu: [x64] os: [freebsd] + '@esbuild/freebsd-x64@0.23.1': + resolution: {integrity: sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + '@esbuild/linux-arm64@0.18.20': resolution: {integrity: sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA==} engines: {node: '>=12'} @@ -3332,6 +3441,12 @@ packages: cpu: [arm64] os: [linux] + '@esbuild/linux-arm64@0.23.1': + resolution: {integrity: sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + '@esbuild/linux-arm@0.18.20': resolution: {integrity: sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg==} engines: {node: '>=12'} @@ -3350,6 +3465,12 @@ packages: cpu: [arm] os: [linux] + '@esbuild/linux-arm@0.23.1': + resolution: {integrity: sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + '@esbuild/linux-ia32@0.18.20': resolution: {integrity: sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA==} engines: {node: '>=12'} @@ -3368,6 +3489,12 @@ packages: cpu: [ia32] os: [linux] + '@esbuild/linux-ia32@0.23.1': + resolution: {integrity: sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + '@esbuild/linux-loong64@0.18.20': resolution: {integrity: sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg==} engines: {node: '>=12'} @@ -3386,6 +3513,12 @@ packages: cpu: [loong64] os: [linux] + '@esbuild/linux-loong64@0.23.1': + resolution: {integrity: sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + '@esbuild/linux-mips64el@0.18.20': resolution: {integrity: sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ==} engines: {node: '>=12'} @@ -3404,6 +3537,12 @@ packages: cpu: [mips64el] os: [linux] + '@esbuild/linux-mips64el@0.23.1': + resolution: {integrity: sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + '@esbuild/linux-ppc64@0.18.20': resolution: {integrity: sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA==} engines: {node: '>=12'} @@ -3422,6 +3561,12 @@ packages: cpu: [ppc64] os: [linux] + '@esbuild/linux-ppc64@0.23.1': + resolution: {integrity: sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + '@esbuild/linux-riscv64@0.18.20': resolution: {integrity: sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A==} engines: {node: '>=12'} @@ -3440,6 +3585,12 @@ packages: cpu: [riscv64] os: [linux] + '@esbuild/linux-riscv64@0.23.1': + resolution: {integrity: sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + '@esbuild/linux-s390x@0.18.20': resolution: {integrity: sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ==} engines: {node: '>=12'} @@ -3458,6 +3609,12 @@ packages: cpu: [s390x] os: [linux] + '@esbuild/linux-s390x@0.23.1': + resolution: {integrity: sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + '@esbuild/linux-x64@0.18.20': resolution: {integrity: sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w==} engines: {node: '>=12'} @@ -3476,6 +3633,12 @@ packages: cpu: [x64] os: [linux] + '@esbuild/linux-x64@0.23.1': + resolution: {integrity: sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + '@esbuild/netbsd-x64@0.18.20': resolution: {integrity: sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A==} engines: {node: '>=12'} @@ -3494,12 +3657,24 @@ packages: cpu: [x64] os: [netbsd] + '@esbuild/netbsd-x64@0.23.1': + resolution: {integrity: sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + '@esbuild/openbsd-arm64@0.23.0': resolution: {integrity: sha512-suXjq53gERueVWu0OKxzWqk7NxiUWSUlrxoZK7usiF50C6ipColGR5qie2496iKGYNLhDZkPxBI3erbnYkU0rQ==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] + '@esbuild/openbsd-arm64@0.23.1': + resolution: {integrity: sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + '@esbuild/openbsd-x64@0.18.20': resolution: {integrity: sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg==} engines: {node: '>=12'} @@ -3518,6 +3693,12 @@ packages: cpu: [x64] os: [openbsd] + '@esbuild/openbsd-x64@0.23.1': + resolution: {integrity: sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + '@esbuild/sunos-x64@0.18.20': resolution: {integrity: sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ==} engines: {node: '>=12'} @@ -3536,6 +3717,12 @@ packages: cpu: [x64] os: [sunos] + '@esbuild/sunos-x64@0.23.1': + resolution: {integrity: sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + '@esbuild/win32-arm64@0.18.20': resolution: {integrity: sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg==} engines: {node: '>=12'} @@ -3554,6 +3741,12 @@ packages: cpu: [arm64] os: [win32] + '@esbuild/win32-arm64@0.23.1': + resolution: {integrity: sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + '@esbuild/win32-ia32@0.18.20': resolution: {integrity: sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g==} engines: {node: '>=12'} @@ -3572,6 +3765,12 @@ packages: cpu: [ia32] os: [win32] + '@esbuild/win32-ia32@0.23.1': + resolution: {integrity: sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + '@esbuild/win32-x64@0.18.20': resolution: {integrity: sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==} engines: {node: '>=12'} @@ -3590,6 +3789,12 @@ packages: cpu: [x64] os: [win32] + '@esbuild/win32-x64@0.23.1': + resolution: {integrity: sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.4.0': resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -4514,6 +4719,11 @@ packages: engines: {node: '>=18'} hasBin: true + '@playwright/test@1.47.0': + resolution: {integrity: sha512-SgAdlSwYVpToI4e/IH19IHHWvoijAYH5hu2MWSXptRypLSnzj51PcGD+rsOXFayde4P9ZLi+loXVwArg6IUkCA==} + engines: {node: '>=18'} + hasBin: true + '@pmmmwh/react-refresh-webpack-plugin@0.5.15': resolution: {integrity: sha512-LFWllMA55pzB9D34w/wXUCf8+c+IYKuJDgxiZ3qMhl64KRMBHYM1I3VdGaD2BV5FNPV2/S2596bppxHbv2ZydQ==} engines: {node: '>= 10.13'} @@ -6130,60 +6340,120 @@ packages: cpu: [arm64] os: [darwin] + '@swc/core-darwin-arm64@1.7.24': + resolution: {integrity: sha512-s0k09qAcsoa8jIncwgRRd43VApYqXu28R4OmICtDffV4S01HtsRLRarXsMuLutoZk3tbxqitep+A8MPBuqNgdg==} + engines: {node: '>=10'} + cpu: [arm64] + os: [darwin] + '@swc/core-darwin-x64@1.6.13': resolution: {integrity: sha512-AW8akFSC+tmPE6YQQvK9S2A1B8pjnXEINg+gGgw0KRUUXunvu1/OEOeC5L2Co1wAwhD7bhnaefi06Qi9AiwOag==} engines: {node: '>=10'} cpu: [x64] os: [darwin] + '@swc/core-darwin-x64@1.7.24': + resolution: {integrity: sha512-1dlsulJ/fiOoJoJyQgaCewIEaZ7Sh6aJN4r5Uhl4lIZuNWa27XOb28A3K29/6HDO9JML3IJrvXPnl5o0vxDQuQ==} + engines: {node: '>=10'} + cpu: [x64] + os: [darwin] + '@swc/core-linux-arm-gnueabihf@1.6.13': resolution: {integrity: sha512-f4gxxvDXVUm2HLYXRd311mSrmbpQF2MZ4Ja6XCQz1hWAxXdhRl1gpnZ+LH/xIfGSwQChrtLLVrkxdYUCVuIjFg==} engines: {node: '>=10'} cpu: [arm] os: [linux] + '@swc/core-linux-arm-gnueabihf@1.7.24': + resolution: {integrity: sha512-2ft1NmxyvHCu5CY4r2rNVybPqZtJaxpRSzvCcPlVjN/2D5Q3QgM5kBoo1t+0RCFfk4TS2V0KWJhtqKz0CNX62Q==} + engines: {node: '>=10'} + cpu: [arm] + os: [linux] + '@swc/core-linux-arm64-gnu@1.6.13': resolution: {integrity: sha512-Nf/eoW2CbG8s+9JoLtjl9FByBXyQ5cjdBsA4efO7Zw4p+YSuXDgc8HRPC+E2+ns0praDpKNZtLvDtmF2lL+2Gg==} engines: {node: '>=10'} cpu: [arm64] os: [linux] + '@swc/core-linux-arm64-gnu@1.7.24': + resolution: {integrity: sha512-v/Z8I9tUUNkNHKa1Sw4r1Q7Wp66ezbRhe6xMIxvPNKVJQFaMOsRpe0t8T5qbk5sV2hJGOCKpQynSpZqQXLcJDQ==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + '@swc/core-linux-arm64-musl@1.6.13': resolution: {integrity: sha512-2OysYSYtdw79prJYuKIiux/Gj0iaGEbpS2QZWCIY4X9sGoETJ5iMg+lY+YCrIxdkkNYd7OhIbXdYFyGs/w5LDg==} engines: {node: '>=10'} cpu: [arm64] os: [linux] + '@swc/core-linux-arm64-musl@1.7.24': + resolution: {integrity: sha512-0jJx0IcajcyOXaJsx1jXy86lYVrbupyy2VUj/OiJux/ic4oBJLjfL+WOuc8T8/hZj2p6X0X4jvfSCqWSuic4kA==} + engines: {node: '>=10'} + cpu: [arm64] + os: [linux] + '@swc/core-linux-x64-gnu@1.6.13': resolution: {integrity: sha512-PkR4CZYJNk5hcd2+tMWBpnisnmYsUzazI1O5X7VkIGFcGePTqJ/bWlfUIVVExWxvAI33PQFzLbzmN5scyIUyGQ==} engines: {node: '>=10'} cpu: [x64] os: [linux] + '@swc/core-linux-x64-gnu@1.7.24': + resolution: {integrity: sha512-2+3aKQpSGjVnWKDTKUPuJzitQlTQrGorg+PVFMRkv6l+RcNCHZQNe/8VYpMhyBhxDMb3LUlbp7776FRevcruxg==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + '@swc/core-linux-x64-musl@1.6.13': resolution: {integrity: sha512-OdsY7wryTxCKwGQcwW9jwWg3cxaHBkTTHi91+5nm7hFPpmZMz1HivJrWAMwVE7iXFw+M4l6ugB/wCvpYrUAAjA==} engines: {node: '>=10'} cpu: [x64] os: [linux] + '@swc/core-linux-x64-musl@1.7.24': + resolution: {integrity: sha512-PMQ6SkCtMoj0Ks77DiishpEmIuHpYjFLDuVOzzJCzGeGoii0yRP5lKy/VeglFYLPqJzmhK9BHlpVehVf/8ZpvA==} + engines: {node: '>=10'} + cpu: [x64] + os: [linux] + '@swc/core-win32-arm64-msvc@1.6.13': resolution: {integrity: sha512-ap6uNmYjwk9M/+bFEuWRNl3hq4VqgQ/Lk+ID/F5WGqczNr0L7vEf+pOsRAn0F6EV+o/nyb3ePt8rLhE/wjHpPg==} engines: {node: '>=10'} cpu: [arm64] os: [win32] + '@swc/core-win32-arm64-msvc@1.7.24': + resolution: {integrity: sha512-SNdCa4DtGXNWrPVHqctVUxgEVZVETuqERpqF50KFHO0Bvf5V/m1IJ4hFr2BxXlrzgnIW4t1Dpi6YOJbcGbEmnA==} + engines: {node: '>=10'} + cpu: [arm64] + os: [win32] + '@swc/core-win32-ia32-msvc@1.6.13': resolution: {integrity: sha512-IJ8KH4yIUHTnS/U1jwQmtbfQals7zWPG0a9hbEfIr4zI0yKzjd83lmtS09lm2Q24QBWOCFGEEbuZxR4tIlvfzA==} engines: {node: '>=10'} cpu: [ia32] os: [win32] + '@swc/core-win32-ia32-msvc@1.7.24': + resolution: {integrity: sha512-5p3olHqwibMfrVFg2yVuSIPh9HArDYYlJXNZ9JKqeZk23A19J1pl9MuPmXDw+sxsiPfYJ/nUedIGeUHPF/+EDw==} + engines: {node: '>=10'} + cpu: [ia32] + os: [win32] + '@swc/core-win32-x64-msvc@1.6.13': resolution: {integrity: sha512-f6/sx6LMuEnbuxtiSL/EkR0Y6qUHFw1XVrh6rwzKXptTipUdOY+nXpKoh+1UsBm/r7H0/5DtOdrn3q5ZHbFZjQ==} engines: {node: '>=10'} cpu: [x64] os: [win32] + '@swc/core-win32-x64-msvc@1.7.24': + resolution: {integrity: sha512-gRyPIxDznS8d2ClfmWbytjp2d48bij6swHnDLWhukNuOvXdQkEmaIzjEsionFG/zhcFLnz8zKfTvjEjInAMzxg==} + engines: {node: '>=10'} + cpu: [x64] + os: [win32] + '@swc/core@1.6.13': resolution: {integrity: sha512-eailUYex6fkfaQTev4Oa3mwn0/e3mQU4H8y1WPuImYQESOQDtVrowwUGDSc19evpBbHpKtwM+hw8nLlhIsF+Tw==} engines: {node: '>=10'} @@ -6193,12 +6463,27 @@ packages: '@swc/helpers': optional: true + '@swc/core@1.7.24': + resolution: {integrity: sha512-FzJaai6z6DYdICAY1UKNN5pzTn296ksK2zzEjjaXlpZtoMkGktWT0ttS7hbdBCPGhLOu5Q9TA2zdPejKUFjgig==} + engines: {node: '>=10'} + peerDependencies: + '@swc/helpers': '*' + peerDependenciesMeta: + '@swc/helpers': + optional: true + '@swc/counter@0.1.3': resolution: {integrity: sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==} + '@swc/helpers@0.5.13': + resolution: {integrity: sha512-UoKGxQ3r5kYI9dALKJapMmuK+1zWM/H17Z1+iwnNmzcJRnfFuevZs375TA5rW31pu4BS4NoSy1fRsexDXfWn5w==} + '@swc/helpers@0.5.5': resolution: {integrity: sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==} + '@swc/types@0.1.12': + resolution: {integrity: sha512-wBJA+SdtkbFhHjTMYH+dEH1y4VpfGdAc2Kw/LK09i9bXd/K6j6PkDcFCEzb6iVfZMkPRrl/q0e3toqTAJdkIVA==} + '@swc/types@0.1.9': resolution: {integrity: sha512-qKnCno++jzcJ4lM4NTfYifm1EFSCeIfKiAHAfkENZAV5Kl9PjJIyd2yeeVv6c/2CckuLyv2NmRC5pv6pm2WQBg==} @@ -6403,6 +6688,9 @@ packages: '@types/eslint@8.56.10': resolution: {integrity: sha512-Shavhk87gCtY2fhXDctcfS3e6FdxWkCx1iUZ9eEUbh7rTqlZT0/IzOkCOVt0fCjcFuZ9FPYfuezTBImfHCDBGQ==} + '@types/eslint@9.6.1': + resolution: {integrity: sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==} + '@types/estree-jsx@1.0.5': resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} @@ -6529,6 +6817,9 @@ packages: '@types/node@20.14.10': resolution: {integrity: sha512-MdiXf+nDuMvY0gJKxyfZ7/6UFsETO7mGKF54MVD/ekJS6HdFtpZFBgrh6Pseu64XTb2MLyFPlbW6hj8HYRQNOQ==} + '@types/node@22.5.4': + resolution: {integrity: sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg==} + '@types/normalize-package-data@2.4.1': resolution: {integrity: sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw==} @@ -7566,8 +7857,8 @@ packages: bluebird@3.7.2: resolution: {integrity: sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==} - body-parser@1.20.2: - resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==} + body-parser@1.20.3: + resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} boolbase@1.0.0: @@ -7605,6 +7896,11 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + browserslist@4.23.3: + resolution: {integrity: sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + bs58@6.0.0: resolution: {integrity: sha512-PD0wEnEYg6ijszw/u8s+iI3H17cTymlrwkKhDhPZq+Sokl3AU4htyBFTjAeNAlCCmg0f53g6ih3jATyCKftTfw==} @@ -7703,6 +7999,9 @@ packages: caniuse-lite@1.0.30001640: resolution: {integrity: sha512-lA4VMpW0PSUrFnkmVuEKBUovSWKhj7puyCg8StBChgu298N1AtuF1sKWEvfDuimSEDbhlb/KqPKC3fs1HbuQUA==} + caniuse-lite@1.0.30001660: + resolution: {integrity: sha512-GacvNTTuATm26qC74pt+ad1fW15mlQ/zuTzzY1ZoIzECTP8HURDfF43kNxPgf7H1jmelCBQTTbBNxdSXOA7Bqg==} + capital-case@1.0.4: resolution: {integrity: sha512-ds37W8CytHgwnhGGTi88pcPyR15qoNkOpYwmMMfnWqqWgESapLqvDx6huFjQ5vqWSn2Z06173XNA7LtMOeUh1A==} @@ -8206,6 +8505,10 @@ packages: resolution: {integrity: sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==} engines: {node: '>=18'} + cssstyle@4.1.0: + resolution: {integrity: sha512-h66W1URKpBS5YMI/V8PyXvTMFT8SupJ1IzoIV8IeBC/ji8WVmrO8dGlTi+2dh6whmdk6BiKJLD/ZBkhWbcg6nA==} + engines: {node: '>=18'} + csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} @@ -8445,6 +8748,15 @@ packages: supports-color: optional: true + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + decamelize@1.2.0: resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} engines: {node: '>=0.10.0'} @@ -8733,6 +9045,9 @@ packages: electron-to-chromium@1.4.819: resolution: {integrity: sha512-8RwI6gKUokbHWcN3iRij/qpvf/wCbIVY5slODi85werwqUQwpFXM+dvUBND93Qh7SB0pW3Hlq3/wZsqQ3M9Jaw==} + electron-to-chromium@1.5.18: + resolution: {integrity: sha512-1OfuVACu+zKlmjsNdcJuVQuVE61sZOLbNM4JAQ1Rvh6EOj0/EUKhMJjRH73InPlXSh8HIJk1cVZ8pyOV/FMdUQ==} + elkjs@0.9.3: resolution: {integrity: sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==} @@ -8750,6 +9065,10 @@ packages: resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==} engines: {node: '>= 0.8'} + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + end-of-stream@1.4.4: resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} @@ -8867,6 +9186,11 @@ packages: engines: {node: '>=18'} hasBin: true + esbuild@0.23.1: + resolution: {integrity: sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==} + engines: {node: '>=18'} + hasBin: true + escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} @@ -9203,8 +9527,8 @@ packages: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} - express@4.19.2: - resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} + express@4.20.0: + resolution: {integrity: sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw==} engines: {node: '>= 0.10.0'} extend-shallow@2.0.1: @@ -9802,6 +10126,10 @@ packages: resolution: {integrity: sha512-vsYlEs3E9gLwA1Hp+w3qzu+RUDFf4VTT8cyKqVICoZ2k7WM++Qyd2LwzyTi5bqMJFiIC/vNpTDYuxdreENRK/g==} engines: {node: '>=16.0.0'} + happy-dom@15.7.3: + resolution: {integrity: sha512-w3RUaYNXFJX5LiNVhOJLK4GqCB1bFj1FvELtpon3HrN8gUpS09V0Vvm4/BBRRj7mLUE1+ch8PKv1JxEp/0IHjA==} + engines: {node: '>=18.0.0'} + har-schema@2.0.0: resolution: {integrity: sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q==} engines: {node: '>=4'} @@ -9832,10 +10160,6 @@ packages: has-property-descriptors@1.0.2: resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} - has-proto@1.0.1: - resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} - engines: {node: '>= 0.4'} - has-proto@1.0.3: resolution: {integrity: sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==} engines: {node: '>= 0.4'} @@ -9867,10 +10191,6 @@ packages: resolution: {integrity: sha512-FwO1BUVWkyHasWDW4S8o0ssQXjvyghLV2rfVhnN36b2bbcj45eGiuzdn9XOvOpjV3TKQD7Gm2BWNXdE9V4KKYg==} engines: {node: '>=12'} - hasown@2.0.0: - resolution: {integrity: sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==} - engines: {node: '>= 0.4'} - hasown@2.0.2: resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} engines: {node: '>= 0.4'} @@ -10065,6 +10385,9 @@ packages: immediate@3.0.6: resolution: {integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==} + immer@10.1.1: + resolution: {integrity: sha512-s2MPrmjovJcoMaHtx6K11Ra7oD05NT97w1IC5zpMkT6Atjr7H8LjaDd81iIxUYpMKSRRNMJE703M1Fhr/TctHw==} + immer@9.0.21: resolution: {integrity: sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA==} @@ -10075,6 +10398,9 @@ packages: immutable@4.3.6: resolution: {integrity: sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ==} + immutable@4.3.7: + resolution: {integrity: sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==} + import-fresh@3.3.0: resolution: {integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==} engines: {node: '>=6'} @@ -10573,6 +10899,15 @@ packages: canvas: optional: true + jsdom@25.0.0: + resolution: {integrity: sha512-OhoFVT59T7aEq75TVw9xxEfkXgacpqAhQaYgP9y/fDqWQCMB/b1H66RfmPm/MaeaAIU9nDwMOVTlPN51+ao6CQ==} + engines: {node: '>=18'} + peerDependencies: + canvas: ^2.11.2 + peerDependenciesMeta: + canvas: + optional: true + jsesc@0.5.0: resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} hasBin: true @@ -11003,8 +11338,8 @@ packages: memoizerific@1.11.3: resolution: {integrity: sha512-/EuHYwAPdLtXwAwSZkh/Gutery6pD2KYd44oQLhAvQp/50mpyduZh8Q7PYHXTCJ+wuXxt7oij2LXyIJOOYFPog==} - merge-descriptors@1.0.1: - resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==} + merge-descriptors@1.0.3: + resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==} merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} @@ -11472,6 +11807,9 @@ packages: node-releases@2.0.14: resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + node-releases@2.0.18: + resolution: {integrity: sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==} + non-layered-tidy-tree-layout@2.0.2: resolution: {integrity: sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==} @@ -11527,6 +11865,9 @@ packages: nwsapi@2.2.10: resolution: {integrity: sha512-QK0sRs7MKv0tKe1+5uZIQk/C8XGza4DAnztJG8iD+TpJIORARrCxczA738awHrZoHeTjSSoHqao2teO0dC/gFQ==} + nwsapi@2.2.12: + resolution: {integrity: sha512-qXDmcVlZV4XRtKFzddidpfVP4oMSGhga+xdMc25mv8kaLUHtgzCDhUxkrN8exkGdTlLNaXj7CV3GtON7zuGZ+w==} + oauth-sign@0.9.0: resolution: {integrity: sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==} @@ -11847,8 +12188,8 @@ packages: resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} engines: {node: '>=16 || 14 >=14.18'} - path-to-regexp@0.1.7: - resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==} + path-to-regexp@0.1.10: + resolution: {integrity: sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==} path-to-regexp@6.2.2: resolution: {integrity: sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw==} @@ -11943,11 +12284,21 @@ packages: engines: {node: '>=18'} hasBin: true + playwright-core@1.47.0: + resolution: {integrity: sha512-1DyHT8OqkcfCkYUD9zzUTfg7EfTd+6a8MkD/NWOvjo0u/SCNd5YmY/lJwFvUZOxJbWNds+ei7ic2+R/cRz/PDg==} + engines: {node: '>=18'} + hasBin: true + playwright@1.45.1: resolution: {integrity: sha512-Hjrgae4kpSQBr98nhCj3IScxVeVUixqj+5oyif8TdIn2opTCPEzqAqNMeK42i3cWDCVu9MI+ZsGWw+gVR4ISBg==} engines: {node: '>=18'} hasBin: true + playwright@1.47.0: + resolution: {integrity: sha512-jOWiRq2pdNAX/mwLiwFYnPHpEZ4rM+fRSQpRHwEwZlP2PUANvL3+aJOF/bvISMhFD30rqMxUB4RJx9aQbfh4Ww==} + engines: {node: '>=18'} + hasBin: true + polished@4.3.1: resolution: {integrity: sha512-OBatVyC/N7SCW/FaDHrSd+vn0o5cS855TOmYi4OkdWUMSJCET/xip//ch8xGUvtr3i44X9LVyWwQlRMTN3pwSA==} engines: {node: '>=10'} @@ -12422,6 +12773,10 @@ packages: resolution: {integrity: sha512-AWJm14H1vVaO/iNZ4/hO+HyaTehuy9nRqVdkTqlJt0HWvBiBIEXFmb4C0DGeYo3Xes9rrEW+TxHsaigCbN5ICQ==} engines: {node: '>=0.6'} + qs@6.13.0: + resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==} + engines: {node: '>=0.6'} + qs@6.5.3: resolution: {integrity: sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==} engines: {node: '>=0.6'} @@ -12705,6 +13060,9 @@ packages: redux@4.2.1: resolution: {integrity: sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==} + redux@5.0.1: + resolution: {integrity: sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==} + regenerate-unicode-properties@10.1.1: resolution: {integrity: sha512-X007RyZLsCJVVrjgEFVpLUTZwyOZk3oiL75ZcuYjlIWd6rNJtOjkBwQc5AsRrpbKVkxN6sklw/k/9m2jJYOf8Q==} engines: {node: '>=4'} @@ -12990,6 +13348,11 @@ packages: engines: {node: '>=14.0.0'} hasBin: true + sass@1.78.0: + resolution: {integrity: sha512-AaIqGSrjo5lA2Yg7RvFZrlXDBCp3nV4XP73GrLGvdRWWwk+8H3l0SDvq/5bA4eF+0RFPLuWUk3E+P1U/YqnpsQ==} + engines: {node: '>=14.0.0'} + hasBin: true + sax@1.2.4: resolution: {integrity: sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==} @@ -13030,10 +13393,19 @@ packages: engines: {node: '>=10'} hasBin: true + semver@7.6.3: + resolution: {integrity: sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==} + engines: {node: '>=10'} + hasBin: true + send@0.18.0: resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==} engines: {node: '>= 0.8.0'} + send@0.19.0: + resolution: {integrity: sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==} + engines: {node: '>= 0.8.0'} + sentence-case@3.0.4: resolution: {integrity: sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==} @@ -13044,8 +13416,8 @@ packages: resolution: {integrity: sha512-FMW2RvqNr03x+C0WxTyu6sOv21oOjkq5j8tjquWccwa6ScNyGFOGJVpuS1NmTVGBAHS07xnSKotgf2ehQmf9iA==} engines: {node: '>= 0.8.0'} - serve-static@1.15.0: - resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==} + serve-static@1.16.0: + resolution: {integrity: sha512-pDLK8zwl2eKaYrs8mrPZBJua4hMplRWJ1tIFksVC3FtBEBnl8dxgeHtsaMS8DhS9i4fLObaon6ABoc4/hQGdPA==} engines: {node: '>= 0.8.0'} servie@4.3.3: @@ -13116,9 +13488,6 @@ packages: shiki@1.10.3: resolution: {integrity: sha512-eneCLncGuvPdTutJuLyUGS8QNPAVFO5Trvld2wgEq1e002mwctAhJKeMGWtWVXOIEzmlcLRqcgPSorR6AVzOmQ==} - side-channel@1.0.4: - resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} - side-channel@1.0.6: resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==} engines: {node: '>= 0.4'} @@ -13186,6 +13555,10 @@ packages: resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} engines: {node: '>=0.10.0'} + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + source-map-support@0.5.21: resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} @@ -13567,6 +13940,11 @@ packages: engines: {node: '>=10'} hasBin: true + terser@5.32.0: + resolution: {integrity: sha512-v3Gtw3IzpBJ0ugkxEX8U0W6+TnPKRRCWGh1jC/iM/e3Ki5+qvO1L1EAZ56bZasc64aXHwRHNIQEzm6//i5cemQ==} + engines: {node: '>=10'} + hasBin: true + test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} @@ -13775,6 +14153,9 @@ packages: tslib@2.6.3: resolution: {integrity: sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==} + tslib@2.7.0: + resolution: {integrity: sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==} + tsutils@3.21.0: resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} engines: {node: '>= 6'} @@ -13869,6 +14250,10 @@ packages: resolution: {integrity: sha512-ADn2w7hVPcK6w1I0uWnM//y1rLXZhzB9mr0a3OirzclKF1Wp6VzevUmzz/NRAWunOT6E8HrnpGY7xOfc6K57fA==} engines: {node: '>=16'} + type-fest@4.26.1: + resolution: {integrity: sha512-yOGpmOAL7CkKe/91I5O3gPICmJNLJ1G4zFYVAsRHg7M64biSnPtRj0WNQt++bRkjYOqjWXrhnUw1utzmVErAdg==} + engines: {node: '>=16'} + type-is@1.6.18: resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} engines: {node: '>= 0.6'} @@ -13924,6 +14309,11 @@ packages: engines: {node: '>=14.17'} hasBin: true + typescript@5.6.2: + resolution: {integrity: sha512-NW8ByodCSNCwZeghjN3o+JX5OFH0Ojg6sadjEKY4huZ52TqbJTJnDo5+Tw98lSy63NZvi4n+ez5m2u5d4PkZyw==} + engines: {node: '>=14.17'} + hasBin: true + ua-parser-js@1.0.37: resolution: {integrity: sha512-bhTyI94tZofjo+Dn8SN6Zv8nBDvyXTymAdM3LDI/0IboIUwTu1rEhW7v2TfiVsoYWgkQ4kOVqnI8APUFbIQIFQ==} @@ -13953,6 +14343,9 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@6.19.8: + resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + unicode-canonical-property-names-ecmascript@2.0.0: resolution: {integrity: sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ==} engines: {node: '>=4'} @@ -14904,6 +15297,8 @@ snapshots: '@babel/compat-data@7.24.7': {} + '@babel/compat-data@7.25.4': {} + '@babel/core@7.24.7': dependencies: '@ampproject/remapping': 2.3.0 @@ -14924,12 +15319,32 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/eslint-parser@7.18.9(@babel/core@7.24.7)(eslint@8.45.0)': + '@babel/core@7.25.2': dependencies: - '@babel/core': 7.24.7 - eslint: 8.45.0 - eslint-scope: 5.1.1 - eslint-visitor-keys: 2.1.0 + '@ampproject/remapping': 2.3.0 + '@babel/code-frame': 7.24.7 + '@babel/generator': 7.25.6 + '@babel/helper-compilation-targets': 7.25.2 + '@babel/helper-module-transforms': 7.25.2(@babel/core@7.25.2) + '@babel/helpers': 7.25.6 + '@babel/parser': 7.25.6 + '@babel/template': 7.25.0 + '@babel/traverse': 7.25.6 + '@babel/types': 7.25.6 + convert-source-map: 2.0.0 + debug: 4.3.7 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 7.6.3 + transitivePeerDependencies: + - supports-color + + '@babel/eslint-parser@7.18.9(@babel/core@7.24.7)(eslint@8.45.0)': + dependencies: + '@babel/core': 7.24.7 + eslint: 8.45.0 + eslint-scope: 5.1.1 + eslint-visitor-keys: 2.1.0 semver: 7.6.2 '@babel/generator@7.23.6': @@ -14946,6 +15361,13 @@ snapshots: '@jridgewell/trace-mapping': 0.3.25 jsesc: 2.5.2 + '@babel/generator@7.25.6': + dependencies: + '@babel/types': 7.25.6 + '@jridgewell/gen-mapping': 0.3.5 + '@jridgewell/trace-mapping': 0.3.25 + jsesc: 2.5.2 + '@babel/helper-annotate-as-pure@7.24.7': dependencies: '@babel/types': 7.24.7 @@ -14965,6 +15387,14 @@ snapshots: lru-cache: 5.1.1 semver: 7.6.2 + '@babel/helper-compilation-targets@7.25.2': + dependencies: + '@babel/compat-data': 7.25.4 + '@babel/helper-validator-option': 7.24.8 + browserslist: 4.23.3 + lru-cache: 5.1.1 + semver: 7.6.3 + '@babel/helper-create-class-features-plugin@7.24.7(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -14992,7 +15422,7 @@ snapshots: '@babel/core': 7.24.7 '@babel/helper-compilation-targets': 7.24.7 '@babel/helper-plugin-utils': 7.24.7 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 lodash.debounce: 4.0.8 resolve: 1.22.8 semver: 7.6.2 @@ -15004,7 +15434,7 @@ snapshots: '@babel/core': 7.24.7 '@babel/helper-compilation-targets': 7.24.7 '@babel/helper-plugin-utils': 7.24.7 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: @@ -15048,6 +15478,16 @@ snapshots: transitivePeerDependencies: - supports-color + '@babel/helper-module-transforms@7.25.2(@babel/core@7.25.2)': + dependencies: + '@babel/core': 7.25.2 + '@babel/helper-module-imports': 7.24.7 + '@babel/helper-simple-access': 7.24.7 + '@babel/helper-validator-identifier': 7.24.7 + '@babel/traverse': 7.25.6 + transitivePeerDependencies: + - supports-color + '@babel/helper-optimise-call-expression@7.24.7': dependencies: '@babel/types': 7.24.7 @@ -15056,6 +15496,8 @@ snapshots: '@babel/helper-plugin-utils@7.24.7': {} + '@babel/helper-plugin-utils@7.24.8': {} + '@babel/helper-remap-async-to-generator@7.24.7(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -15096,6 +15538,8 @@ snapshots: '@babel/helper-string-parser@7.24.7': {} + '@babel/helper-string-parser@7.24.8': {} + '@babel/helper-validator-identifier@7.22.20': {} '@babel/helper-validator-identifier@7.22.5': {} @@ -15104,6 +15548,8 @@ snapshots: '@babel/helper-validator-option@7.24.7': {} + '@babel/helper-validator-option@7.24.8': {} + '@babel/helper-wrap-function@7.24.7': dependencies: '@babel/helper-function-name': 7.24.7 @@ -15118,6 +15564,11 @@ snapshots: '@babel/template': 7.24.7 '@babel/types': 7.24.7 + '@babel/helpers@7.25.6': + dependencies: + '@babel/template': 7.25.0 + '@babel/types': 7.25.6 + '@babel/highlight@7.22.5': dependencies: '@babel/helper-validator-identifier': 7.22.5 @@ -15149,6 +15600,10 @@ snapshots: dependencies: '@babel/types': 7.24.7 + '@babel/parser@7.25.6': + dependencies: + '@babel/types': 7.25.6 + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.24.7(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -15291,6 +15746,11 @@ snapshots: '@babel/core': 7.24.7 '@babel/helper-plugin-utils': 7.24.7 + '@babel/plugin-syntax-flow@7.24.7(@babel/core@7.25.2)': + dependencies: + '@babel/core': 7.25.2 + '@babel/helper-plugin-utils': 7.24.7 + '@babel/plugin-syntax-import-assertions@7.23.3(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -15321,6 +15781,11 @@ snapshots: '@babel/core': 7.24.7 '@babel/helper-plugin-utils': 7.24.7 + '@babel/plugin-syntax-jsx@7.24.7(@babel/core@7.25.2)': + dependencies: + '@babel/core': 7.25.2 + '@babel/helper-plugin-utils': 7.24.7 + '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -15746,6 +16211,17 @@ snapshots: transitivePeerDependencies: - supports-color + '@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.25.2)': + dependencies: + '@babel/core': 7.25.2 + '@babel/helper-annotate-as-pure': 7.24.7 + '@babel/helper-module-imports': 7.24.7 + '@babel/helper-plugin-utils': 7.24.8 + '@babel/plugin-syntax-jsx': 7.24.7(@babel/core@7.25.2) + '@babel/types': 7.25.6 + transitivePeerDependencies: + - supports-color + '@babel/plugin-transform-react-pure-annotations@7.18.6(@babel/core@7.24.7)': dependencies: '@babel/core': 7.24.7 @@ -16047,6 +16523,12 @@ snapshots: '@babel/parser': 7.24.7 '@babel/types': 7.24.7 + '@babel/template@7.25.0': + dependencies: + '@babel/code-frame': 7.24.7 + '@babel/parser': 7.25.6 + '@babel/types': 7.25.6 + '@babel/traverse@7.23.9': dependencies: '@babel/code-frame': 7.24.7 @@ -16057,7 +16539,7 @@ snapshots: '@babel/helper-split-export-declaration': 7.24.7 '@babel/parser': 7.24.7 '@babel/types': 7.24.7 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -16077,6 +16559,18 @@ snapshots: transitivePeerDependencies: - supports-color + '@babel/traverse@7.25.6': + dependencies: + '@babel/code-frame': 7.24.7 + '@babel/generator': 7.25.6 + '@babel/parser': 7.25.6 + '@babel/template': 7.25.0 + '@babel/types': 7.25.6 + debug: 4.3.7 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + '@babel/types@7.23.9': dependencies: '@babel/helper-string-parser': 7.23.4 @@ -16089,6 +16583,12 @@ snapshots: '@babel/helper-validator-identifier': 7.24.7 to-fast-properties: 2.0.0 + '@babel/types@7.25.6': + dependencies: + '@babel/helper-string-parser': 7.24.8 + '@babel/helper-validator-identifier': 7.24.7 + to-fast-properties: 2.0.0 + '@base2/pretty-print-object@1.0.1': {} '@blakeembrey/deque@1.0.5': {} @@ -16516,7 +17016,7 @@ snapshots: '@devicefarmer/adbkit-monkey': 1.2.1 bluebird: 3.7.2 commander: 9.5.0 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 node-forge: 1.3.1 split: 1.0.1 transitivePeerDependencies: @@ -16544,6 +17044,9 @@ snapshots: '@esbuild/aix-ppc64@0.23.0': optional: true + '@esbuild/aix-ppc64@0.23.1': + optional: true + '@esbuild/android-arm64@0.18.20': optional: true @@ -16553,6 +17056,9 @@ snapshots: '@esbuild/android-arm64@0.23.0': optional: true + '@esbuild/android-arm64@0.23.1': + optional: true + '@esbuild/android-arm@0.18.20': optional: true @@ -16562,6 +17068,9 @@ snapshots: '@esbuild/android-arm@0.23.0': optional: true + '@esbuild/android-arm@0.23.1': + optional: true + '@esbuild/android-x64@0.18.20': optional: true @@ -16571,6 +17080,9 @@ snapshots: '@esbuild/android-x64@0.23.0': optional: true + '@esbuild/android-x64@0.23.1': + optional: true + '@esbuild/darwin-arm64@0.18.20': optional: true @@ -16580,6 +17092,9 @@ snapshots: '@esbuild/darwin-arm64@0.23.0': optional: true + '@esbuild/darwin-arm64@0.23.1': + optional: true + '@esbuild/darwin-x64@0.18.20': optional: true @@ -16589,6 +17104,9 @@ snapshots: '@esbuild/darwin-x64@0.23.0': optional: true + '@esbuild/darwin-x64@0.23.1': + optional: true + '@esbuild/freebsd-arm64@0.18.20': optional: true @@ -16598,6 +17116,9 @@ snapshots: '@esbuild/freebsd-arm64@0.23.0': optional: true + '@esbuild/freebsd-arm64@0.23.1': + optional: true + '@esbuild/freebsd-x64@0.18.20': optional: true @@ -16607,6 +17128,9 @@ snapshots: '@esbuild/freebsd-x64@0.23.0': optional: true + '@esbuild/freebsd-x64@0.23.1': + optional: true + '@esbuild/linux-arm64@0.18.20': optional: true @@ -16616,6 +17140,9 @@ snapshots: '@esbuild/linux-arm64@0.23.0': optional: true + '@esbuild/linux-arm64@0.23.1': + optional: true + '@esbuild/linux-arm@0.18.20': optional: true @@ -16625,6 +17152,9 @@ snapshots: '@esbuild/linux-arm@0.23.0': optional: true + '@esbuild/linux-arm@0.23.1': + optional: true + '@esbuild/linux-ia32@0.18.20': optional: true @@ -16634,6 +17164,9 @@ snapshots: '@esbuild/linux-ia32@0.23.0': optional: true + '@esbuild/linux-ia32@0.23.1': + optional: true + '@esbuild/linux-loong64@0.18.20': optional: true @@ -16643,6 +17176,9 @@ snapshots: '@esbuild/linux-loong64@0.23.0': optional: true + '@esbuild/linux-loong64@0.23.1': + optional: true + '@esbuild/linux-mips64el@0.18.20': optional: true @@ -16652,6 +17188,9 @@ snapshots: '@esbuild/linux-mips64el@0.23.0': optional: true + '@esbuild/linux-mips64el@0.23.1': + optional: true + '@esbuild/linux-ppc64@0.18.20': optional: true @@ -16661,6 +17200,9 @@ snapshots: '@esbuild/linux-ppc64@0.23.0': optional: true + '@esbuild/linux-ppc64@0.23.1': + optional: true + '@esbuild/linux-riscv64@0.18.20': optional: true @@ -16670,6 +17212,9 @@ snapshots: '@esbuild/linux-riscv64@0.23.0': optional: true + '@esbuild/linux-riscv64@0.23.1': + optional: true + '@esbuild/linux-s390x@0.18.20': optional: true @@ -16679,6 +17224,9 @@ snapshots: '@esbuild/linux-s390x@0.23.0': optional: true + '@esbuild/linux-s390x@0.23.1': + optional: true + '@esbuild/linux-x64@0.18.20': optional: true @@ -16688,6 +17236,9 @@ snapshots: '@esbuild/linux-x64@0.23.0': optional: true + '@esbuild/linux-x64@0.23.1': + optional: true + '@esbuild/netbsd-x64@0.18.20': optional: true @@ -16697,9 +17248,15 @@ snapshots: '@esbuild/netbsd-x64@0.23.0': optional: true + '@esbuild/netbsd-x64@0.23.1': + optional: true + '@esbuild/openbsd-arm64@0.23.0': optional: true + '@esbuild/openbsd-arm64@0.23.1': + optional: true + '@esbuild/openbsd-x64@0.18.20': optional: true @@ -16709,6 +17266,9 @@ snapshots: '@esbuild/openbsd-x64@0.23.0': optional: true + '@esbuild/openbsd-x64@0.23.1': + optional: true + '@esbuild/sunos-x64@0.18.20': optional: true @@ -16718,6 +17278,9 @@ snapshots: '@esbuild/sunos-x64@0.23.0': optional: true + '@esbuild/sunos-x64@0.23.1': + optional: true + '@esbuild/win32-arm64@0.18.20': optional: true @@ -16727,6 +17290,9 @@ snapshots: '@esbuild/win32-arm64@0.23.0': optional: true + '@esbuild/win32-arm64@0.23.1': + optional: true + '@esbuild/win32-ia32@0.18.20': optional: true @@ -16736,6 +17302,9 @@ snapshots: '@esbuild/win32-ia32@0.23.0': optional: true + '@esbuild/win32-ia32@0.23.1': + optional: true + '@esbuild/win32-x64@0.18.20': optional: true @@ -16745,6 +17314,9 @@ snapshots: '@esbuild/win32-x64@0.23.0': optional: true + '@esbuild/win32-x64@0.23.1': + optional: true + '@eslint-community/eslint-utils@4.4.0(eslint@8.36.0)': dependencies: eslint: 8.36.0 @@ -17272,7 +17844,7 @@ snapshots: '@types/json-stable-stringify': 1.0.36 '@whatwg-node/fetch': 0.9.16 chalk: 4.1.2 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 dotenv: 16.4.1 graphql: 16.9.0 graphql-request: 6.1.0(graphql@16.9.0) @@ -17399,9 +17971,9 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@headlessui/tailwindcss@0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3)))': + '@headlessui/tailwindcss@0.1.3(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)))': dependencies: - tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) '@hookform/resolvers@3.9.0(react-hook-form@7.52.1(react@18.3.1))': dependencies: @@ -18065,7 +18637,12 @@ snapshots: dependencies: playwright: 1.45.1 - '@pmmmwh/react-refresh-webpack-plugin@0.5.15(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-refresh@0.14.2)(type-fest@4.21.0)(webpack-hot-middleware@2.26.1)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))': + '@playwright/test@1.47.0': + dependencies: + playwright: 1.47.0 + optional: true + + '@pmmmwh/react-refresh-webpack-plugin@0.5.15(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-refresh@0.14.2)(type-fest@4.26.1)(webpack-hot-middleware@2.26.1)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4))': dependencies: ansi-html: 0.0.9 core-js-pure: 3.31.1 @@ -18075,10 +18652,10 @@ snapshots: react-refresh: 0.14.2 schema-utils: 4.2.0 source-map: 0.7.4 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) optionalDependencies: - '@types/webpack': 5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) - type-fest: 4.21.0 + '@types/webpack': 5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)) + type-fest: 4.26.1 webpack-hot-middleware: 2.26.1 '@pnpm/config.env-replace@1.1.0': {} @@ -19132,7 +19709,7 @@ snapshots: '@types/react': 18.3.3 '@types/react-dom': 18.3.0 - '@reduxjs/toolkit@1.9.5(react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1))(react@18.3.1)': + '@reduxjs/toolkit@1.9.5(react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@5.0.1))(react@18.3.1)': dependencies: immer: 9.0.21 redux: 4.2.1 @@ -19140,7 +19717,7 @@ snapshots: reselect: 4.1.8 optionalDependencies: react: 18.3.1 - react-redux: 8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1) + react-redux: 8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@5.0.1) '@remix-run/router@1.17.1': {} @@ -19562,7 +20139,7 @@ snapshots: ejs: 3.1.9 esbuild: 0.18.20 esbuild-plugin-alias: 0.2.1 - express: 4.19.2 + express: 4.20.0 find-cache-dir: 3.3.2 fs-extra: 11.2.0 process: 0.11.10 @@ -19571,7 +20148,7 @@ snapshots: - encoding - supports-color - '@storybook/builder-webpack5@7.6.20(@swc/helpers@0.5.5)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))': + '@storybook/builder-webpack5@7.6.20(@swc/helpers@0.5.13)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))': dependencies: '@babel/core': 7.24.7 '@storybook/channels': 7.6.20 @@ -19582,33 +20159,33 @@ snapshots: '@storybook/node-logger': 7.6.20 '@storybook/preview': 7.6.20 '@storybook/preview-api': 7.6.20 - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.6.13(@swc/helpers@0.5.13) '@types/node': 18.19.39 '@types/semver': 7.5.8 - babel-loader: 9.1.3(@babel/core@7.24.7)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + babel-loader: 9.1.3(@babel/core@7.24.7)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) browser-assert: 1.2.1 case-sensitive-paths-webpack-plugin: 2.4.0 cjs-module-lexer: 1.3.1 constants-browserify: 1.0.0 - css-loader: 6.11.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + css-loader: 6.11.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) es-module-lexer: 1.5.4 - express: 4.19.2 - fork-ts-checker-webpack-plugin: 8.0.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + express: 4.20.0 + fork-ts-checker-webpack-plugin: 8.0.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) fs-extra: 11.2.0 - html-webpack-plugin: 5.6.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + html-webpack-plugin: 5.6.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) magic-string: 0.30.10 path-browserify: 1.0.1 process: 0.11.10 semver: 7.6.2 - style-loader: 3.3.4(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) - swc-loader: 0.2.6(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) - terser-webpack-plugin: 5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + style-loader: 3.3.4(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) + swc-loader: 0.2.6(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) + terser-webpack-plugin: 5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) ts-dedent: 2.2.0 url: 0.11.3 util: 0.12.5 util-deprecate: 1.0.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) - webpack-dev-middleware: 6.1.3(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack-dev-middleware: 6.1.3(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) webpack-hot-middleware: 2.26.1 webpack-virtual-modules: 0.5.0 optionalDependencies: @@ -19632,7 +20209,7 @@ snapshots: '@storybook/client-logger': 7.1.0 '@storybook/core-events': 7.1.0 '@storybook/global': 5.0.0 - qs: 6.12.3 + qs: 6.13.0 telejson: 7.2.0 tiny-invariant: 1.3.3 @@ -19641,7 +20218,7 @@ snapshots: '@storybook/client-logger': 7.6.17 '@storybook/core-events': 7.6.17 '@storybook/global': 5.0.0 - qs: 6.12.3 + qs: 6.13.0 telejson: 7.2.0 tiny-invariant: 1.3.3 @@ -19675,7 +20252,7 @@ snapshots: detect-indent: 6.1.0 envinfo: 7.10.0 execa: 5.1.1 - express: 4.19.2 + express: 4.20.0 find-up: 5.0.0 fs-extra: 11.1.1 get-npm-tarball-url: 2.0.3 @@ -19853,7 +20430,7 @@ snapshots: cli-table3: 0.6.3 compression: 1.7.4 detect-port: 1.5.1 - express: 4.19.2 + express: 4.20.0 fs-extra: 11.2.0 globby: 11.1.0 ip: 2.0.0 @@ -19994,16 +20571,16 @@ snapshots: '@storybook/postinstall@7.6.20': {} - '@storybook/preset-react-webpack@7.6.20(@babel/core@7.24.7)(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.21.0)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1)': + '@storybook/preset-react-webpack@7.6.20(@babel/core@7.24.7)(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.26.1)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1)': dependencies: '@babel/preset-flow': 7.24.7(@babel/core@7.24.7) '@babel/preset-react': 7.24.7(@babel/core@7.24.7) - '@pmmmwh/react-refresh-webpack-plugin': 0.5.15(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-refresh@0.14.2)(type-fest@4.21.0)(webpack-hot-middleware@2.26.1)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + '@pmmmwh/react-refresh-webpack-plugin': 0.5.15(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-refresh@0.14.2)(type-fest@4.26.1)(webpack-hot-middleware@2.26.1)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) '@storybook/core-webpack': 7.6.20 '@storybook/docs-tools': 7.6.20 '@storybook/node-logger': 7.6.20 '@storybook/react': 7.6.20(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.5.3) - '@storybook/react-docgen-typescript-plugin': 1.0.6--canary.9.0c3f3b7.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + '@storybook/react-docgen-typescript-plugin': 1.0.6--canary.9.0c3f3b7.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) '@types/node': 18.19.39 '@types/semver': 7.5.8 babel-plugin-add-react-displayname: 0.0.5 @@ -20014,7 +20591,7 @@ snapshots: react-dom: 18.3.1(react@18.3.1) react-refresh: 0.14.2 semver: 7.6.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) optionalDependencies: '@babel/core': 7.24.7 typescript: 5.5.3 @@ -20045,7 +20622,7 @@ snapshots: dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 - qs: 6.12.3 + qs: 6.13.0 synchronous-promise: 2.0.17 ts-dedent: 2.2.0 util-deprecate: 1.0.2 @@ -20086,9 +20663,9 @@ snapshots: '@storybook/preview@7.6.20': {} - '@storybook/react-docgen-typescript-plugin@1.0.6--canary.9.0c3f3b7.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))': + '@storybook/react-docgen-typescript-plugin@1.0.6--canary.9.0c3f3b7.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4))': dependencies: - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 endent: 2.1.0 find-cache-dir: 3.3.2 flat-cache: 3.2.0 @@ -20096,7 +20673,7 @@ snapshots: react-docgen-typescript: 2.2.2(typescript@5.5.3) tslib: 2.6.3 typescript: 5.5.3 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) transitivePeerDependencies: - supports-color @@ -20105,10 +20682,10 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@storybook/react-webpack5@7.6.20(@babel/core@7.24.7)(@swc/core@1.6.13(@swc/helpers@0.5.5))(@swc/helpers@0.5.5)(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.21.0)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1)': + '@storybook/react-webpack5@7.6.20(@babel/core@7.24.7)(@swc/core@1.7.24(@swc/helpers@0.5.13))(@swc/helpers@0.5.13)(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.26.1)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1)': dependencies: - '@storybook/builder-webpack5': 7.6.20(@swc/helpers@0.5.5)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1)) - '@storybook/preset-react-webpack': 7.6.20(@babel/core@7.24.7)(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.21.0)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1) + '@storybook/builder-webpack5': 7.6.20(@swc/helpers@0.5.13)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1)) + '@storybook/preset-react-webpack': 7.6.20(@babel/core@7.24.7)(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(type-fest@4.26.1)(typescript@5.5.3)(webpack-cli@5.1.4(webpack@5.92.1))(webpack-hot-middleware@2.26.1) '@storybook/react': 7.6.20(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.5.3) '@types/node': 18.19.39 react: 18.3.1 @@ -20167,13 +20744,13 @@ snapshots: dependencies: '@storybook/client-logger': 7.6.17 memoizerific: 1.11.3 - qs: 6.12.3 + qs: 6.13.0 '@storybook/router@7.6.20': dependencies: '@storybook/client-logger': 7.6.20 memoizerific: 1.11.3 - qs: 6.12.3 + qs: 6.13.0 '@storybook/telemetry@7.1.0': dependencies: @@ -20345,34 +20922,64 @@ snapshots: '@swc/core-darwin-arm64@1.6.13': optional: true + '@swc/core-darwin-arm64@1.7.24': + optional: true + '@swc/core-darwin-x64@1.6.13': optional: true + '@swc/core-darwin-x64@1.7.24': + optional: true + '@swc/core-linux-arm-gnueabihf@1.6.13': optional: true + '@swc/core-linux-arm-gnueabihf@1.7.24': + optional: true + '@swc/core-linux-arm64-gnu@1.6.13': optional: true + '@swc/core-linux-arm64-gnu@1.7.24': + optional: true + '@swc/core-linux-arm64-musl@1.6.13': optional: true + '@swc/core-linux-arm64-musl@1.7.24': + optional: true + '@swc/core-linux-x64-gnu@1.6.13': optional: true + '@swc/core-linux-x64-gnu@1.7.24': + optional: true + '@swc/core-linux-x64-musl@1.6.13': optional: true + '@swc/core-linux-x64-musl@1.7.24': + optional: true + '@swc/core-win32-arm64-msvc@1.6.13': optional: true + '@swc/core-win32-arm64-msvc@1.7.24': + optional: true + '@swc/core-win32-ia32-msvc@1.6.13': optional: true + '@swc/core-win32-ia32-msvc@1.7.24': + optional: true + '@swc/core-win32-x64-msvc@1.6.13': optional: true - '@swc/core@1.6.13(@swc/helpers@0.5.5)': + '@swc/core-win32-x64-msvc@1.7.24': + optional: true + + '@swc/core@1.6.13(@swc/helpers@0.5.13)': dependencies: '@swc/counter': 0.1.3 '@swc/types': 0.1.9 @@ -20387,15 +20994,43 @@ snapshots: '@swc/core-win32-arm64-msvc': 1.6.13 '@swc/core-win32-ia32-msvc': 1.6.13 '@swc/core-win32-x64-msvc': 1.6.13 - '@swc/helpers': 0.5.5 + '@swc/helpers': 0.5.13 + + '@swc/core@1.7.24(@swc/helpers@0.5.13)': + dependencies: + '@swc/counter': 0.1.3 + '@swc/types': 0.1.12 + optionalDependencies: + '@swc/core-darwin-arm64': 1.7.24 + '@swc/core-darwin-x64': 1.7.24 + '@swc/core-linux-arm-gnueabihf': 1.7.24 + '@swc/core-linux-arm64-gnu': 1.7.24 + '@swc/core-linux-arm64-musl': 1.7.24 + '@swc/core-linux-x64-gnu': 1.7.24 + '@swc/core-linux-x64-musl': 1.7.24 + '@swc/core-win32-arm64-msvc': 1.7.24 + '@swc/core-win32-ia32-msvc': 1.7.24 + '@swc/core-win32-x64-msvc': 1.7.24 + '@swc/helpers': 0.5.13 + optional: true '@swc/counter@0.1.3': {} + '@swc/helpers@0.5.13': + dependencies: + tslib: 2.7.0 + optional: true + '@swc/helpers@0.5.5': dependencies: '@swc/counter': 0.1.3 tslib: 2.6.3 + '@swc/types@0.1.12': + dependencies: + '@swc/counter': 0.1.3 + optional: true + '@swc/types@0.1.9': dependencies: '@swc/counter': 0.1.3 @@ -20404,14 +21039,14 @@ snapshots: dependencies: defer-to-connect: 2.0.1 - '@tailwindcss/aspect-ratio@0.4.2(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3)))': + '@tailwindcss/aspect-ratio@0.4.2(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)))': dependencies: - tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) - '@tailwindcss/forms@0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3)))': + '@tailwindcss/forms@0.5.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)))': dependencies: mini-svg-data-uri: 1.4.4 - tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) '@tanstack/eslint-plugin-query@5.50.1(eslint@8.45.0)(typescript@5.5.3)': dependencies: @@ -20466,7 +21101,7 @@ snapshots: lz-string: 1.5.0 pretty-format: 27.5.1 - '@testing-library/jest-dom@6.4.6(vitest@2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1))': + '@testing-library/jest-dom@6.4.6(vitest@2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@24.1.0)(sass@1.78.0)(terser@5.32.0))': dependencies: '@adobe/css-tools': 4.4.0 '@babel/runtime': 7.24.7 @@ -20477,7 +21112,7 @@ snapshots: lodash: 4.17.21 redent: 3.0.0 optionalDependencies: - vitest: 2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1) + vitest: 2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@24.1.0)(sass@1.78.0)(terser@5.32.0) '@testing-library/react@16.0.0(@testing-library/dom@10.3.1)(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: @@ -20582,11 +21217,11 @@ snapshots: '@types/doctrine@0.0.9': {} - '@types/dotenv-webpack@7.0.7(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))': + '@types/dotenv-webpack@7.0.7(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1))': dependencies: '@types/node': 20.14.10 tapable: 2.2.1 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) transitivePeerDependencies: - '@swc/core' - esbuild @@ -20609,6 +21244,12 @@ snapshots: '@types/estree': 1.0.5 '@types/json-schema': 7.0.15 + '@types/eslint@9.6.1': + dependencies: + '@types/estree': 1.0.5 + '@types/json-schema': 7.0.15 + optional: true + '@types/estree-jsx@1.0.5': dependencies: '@types/estree': 1.0.5 @@ -20740,6 +21381,10 @@ snapshots: dependencies: undici-types: 5.26.5 + '@types/node@22.5.4': + dependencies: + undici-types: 6.19.8 + '@types/normalize-package-data@2.4.1': {} '@types/parse-json@4.0.2': {} @@ -20806,11 +21451,11 @@ snapshots: '@types/webextension-polyfill@0.10.7': {} - '@types/webpack@5.28.5(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))': + '@types/webpack@5.28.5(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1))': dependencies: '@types/node': 20.14.10 tapable: 2.2.1 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) transitivePeerDependencies: - '@swc/core' - esbuild @@ -20846,7 +21491,7 @@ snapshots: '@typescript-eslint/scope-manager': 5.62.0 '@typescript-eslint/type-utils': 5.62.0(eslint@8.45.0)(typescript@5.5.3) '@typescript-eslint/utils': 5.62.0(eslint@8.45.0)(typescript@5.5.3) - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 eslint: 8.45.0 graphemer: 1.4.0 ignore: 5.3.1 @@ -20910,7 +21555,7 @@ snapshots: '@typescript-eslint/scope-manager': 5.62.0 '@typescript-eslint/types': 5.62.0 '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.5.3) - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 eslint: 8.45.0 optionalDependencies: typescript: 5.5.3 @@ -20972,7 +21617,7 @@ snapshots: dependencies: '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.5.3) '@typescript-eslint/utils': 5.62.0(eslint@8.45.0)(typescript@5.5.3) - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 eslint: 8.45.0 tsutils: 3.21.0(typescript@5.5.3) optionalDependencies: @@ -20984,7 +21629,7 @@ snapshots: dependencies: '@typescript-eslint/typescript-estree': 6.1.0(typescript@5.5.3) '@typescript-eslint/utils': 6.1.0(eslint@8.45.0)(typescript@5.5.3) - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.4 eslint: 8.45.0 ts-api-utils: 1.3.0(typescript@5.5.3) optionalDependencies: @@ -20996,7 +21641,7 @@ snapshots: dependencies: '@typescript-eslint/typescript-estree': 7.16.0(typescript@5.5.3) '@typescript-eslint/utils': 7.16.0(eslint@9.6.0)(typescript@5.5.3) - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 eslint: 9.6.0 ts-api-utils: 1.3.0(typescript@5.5.3) optionalDependencies: @@ -21018,7 +21663,7 @@ snapshots: dependencies: '@typescript-eslint/types': 5.33.1 '@typescript-eslint/visitor-keys': 5.33.1 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 @@ -21032,7 +21677,7 @@ snapshots: dependencies: '@typescript-eslint/types': 5.62.0 '@typescript-eslint/visitor-keys': 5.62.0 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 @@ -21046,7 +21691,7 @@ snapshots: dependencies: '@typescript-eslint/types': 6.1.0 '@typescript-eslint/visitor-keys': 6.1.0 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.4 globby: 11.1.0 is-glob: 4.0.3 semver: 7.6.2 @@ -21075,7 +21720,7 @@ snapshots: dependencies: '@typescript-eslint/types': 8.0.0-alpha.30 '@typescript-eslint/visitor-keys': 8.0.0-alpha.30 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 globby: 11.1.0 is-glob: 4.0.3 minimatch: 9.0.5 @@ -21203,9 +21848,9 @@ snapshots: dependencies: '@vanilla-extract/private': 1.0.5 - '@vanilla-extract/esbuild-plugin@2.3.8(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(esbuild@0.23.0)(sass@1.77.6)(terser@5.31.1)': + '@vanilla-extract/esbuild-plugin@2.3.8(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(esbuild@0.23.0)(sass@1.78.0)(terser@5.32.0)': dependencies: - '@vanilla-extract/integration': 7.1.7(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1) + '@vanilla-extract/integration': 7.1.7(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0) optionalDependencies: esbuild: 0.23.0 transitivePeerDependencies: @@ -21219,7 +21864,48 @@ snapshots: - supports-color - terser - '@vanilla-extract/integration@7.1.7(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1)': + '@vanilla-extract/esbuild-plugin@2.3.8(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(esbuild@0.23.1)(sass@1.78.0)(terser@5.32.0)': + dependencies: + '@vanilla-extract/integration': 7.1.7(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0) + optionalDependencies: + esbuild: 0.23.1 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + '@vanilla-extract/integration@7.1.7(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0)': + dependencies: + '@babel/core': 7.24.7 + '@babel/plugin-syntax-typescript': 7.24.7(@babel/core@7.24.7) + '@vanilla-extract/babel-plugin-debug-ids': 1.0.6 + '@vanilla-extract/css': 1.15.3(babel-plugin-macros@3.1.0) + dedent: 1.5.3(babel-plugin-macros@3.1.0) + esbuild: 0.21.5 + eval: 0.1.8 + find-up: 5.0.0 + javascript-stringify: 2.1.0 + mlly: 1.7.1 + vite: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + vite-node: 1.6.0(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + '@vanilla-extract/integration@7.1.7(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0)': dependencies: '@babel/core': 7.24.7 '@babel/plugin-syntax-typescript': 7.24.7(@babel/core@7.24.7) @@ -21231,8 +21917,8 @@ snapshots: find-up: 5.0.0 javascript-stringify: 2.1.0 mlly: 1.7.1 - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) - vite-node: 1.6.0(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + vite-node: 1.6.0(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - '@types/node' - babel-plugin-macros @@ -21250,10 +21936,10 @@ snapshots: dependencies: '@vanilla-extract/css': 1.15.3(babel-plugin-macros@3.1.0) - '@vanilla-extract/vite-plugin@4.0.13(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1))': + '@vanilla-extract/vite-plugin@4.0.13(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0))': dependencies: - '@vanilla-extract/integration': 7.1.7(@types/node@20.14.10)(babel-plugin-macros@3.1.0)(sass@1.77.6)(terser@5.31.1) - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + '@vanilla-extract/integration': 7.1.7(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(sass@1.78.0)(terser@5.32.0) + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - '@types/node' - babel-plugin-macros @@ -21265,21 +21951,39 @@ snapshots: - supports-color - terser - '@vitejs/plugin-react-swc@3.7.0(@swc/helpers@0.5.5)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1))': + '@vitejs/plugin-react-swc@3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0))': + dependencies: + '@swc/core': 1.6.13(@swc/helpers@0.5.13) + vite: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + transitivePeerDependencies: + - '@swc/helpers' + + '@vitejs/plugin-react-swc@3.7.0(@swc/helpers@0.5.13)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0))': dependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + '@swc/core': 1.6.13(@swc/helpers@0.5.13) + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - '@swc/helpers' - '@vitejs/plugin-react@4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1))': + '@vitejs/plugin-react@4.3.1(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0))': + dependencies: + '@babel/core': 7.24.7 + '@babel/plugin-transform-react-jsx-self': 7.24.7(@babel/core@7.24.7) + '@babel/plugin-transform-react-jsx-source': 7.24.7(@babel/core@7.24.7) + '@types/babel__core': 7.20.5 + react-refresh: 0.14.2 + vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) + transitivePeerDependencies: + - supports-color + + '@vitejs/plugin-react@4.3.1(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0))': dependencies: '@babel/core': 7.24.7 '@babel/plugin-transform-react-jsx-self': 7.24.7(@babel/core@7.24.7) '@babel/plugin-transform-react-jsx-source': 7.24.7(@babel/core@7.24.7) '@types/babel__core': 7.20.5 react-refresh: 0.14.2 - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - supports-color @@ -21442,19 +22146,19 @@ snapshots: '@webassemblyjs/ast': 1.12.1 '@xtuc/long': 4.2.2 - '@webpack-cli/configtest@2.1.1(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))': + '@webpack-cli/configtest@2.1.1(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4))': dependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-cli: 5.1.4(webpack@5.92.1) - '@webpack-cli/info@2.0.2(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))': + '@webpack-cli/info@2.0.2(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4))': dependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-cli: 5.1.4(webpack@5.92.1) - '@webpack-cli/serve@2.0.5(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)))': + '@webpack-cli/serve@2.0.5(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4))': dependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-cli: 5.1.4(webpack@5.92.1) '@whatwg-node/events@0.0.3': {} @@ -21540,12 +22244,12 @@ snapshots: acorn@8.12.1: {} - addons-linter@5.32.0(body-parser@1.20.2)(node-fetch@3.3.1): + addons-linter@5.32.0(node-fetch@3.3.1): dependencies: '@fluent/syntax': 0.19.0 '@mdn/browser-compat-data': 5.2.42 addons-moz-compare: 1.3.0 - addons-scanner-utils: 8.5.0(body-parser@1.20.2)(node-fetch@3.3.1) + addons-scanner-utils: 8.5.0(node-fetch@3.3.1) ajv: 8.12.0 chalk: 4.1.2 cheerio: 1.0.0-rc.12 @@ -21583,7 +22287,7 @@ snapshots: addons-moz-compare@1.3.0: {} - addons-scanner-utils@8.5.0(body-parser@1.20.2)(node-fetch@3.3.1): + addons-scanner-utils@8.5.0(node-fetch@3.3.1): dependencies: '@types/yauzl': 2.10.0 common-tags: 1.8.2 @@ -21592,7 +22296,6 @@ snapshots: upath: 2.0.1 yauzl: 2.10.0 optionalDependencies: - body-parser: 1.20.2 node-fetch: 3.3.1 address@1.2.2: {} @@ -21609,13 +22312,13 @@ snapshots: agent-base@7.1.0: dependencies: - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color agent-base@7.1.1: dependencies: - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -21921,12 +22624,12 @@ snapshots: dependencies: '@babel/core': 7.24.7 - babel-loader@9.1.3(@babel/core@7.24.7)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + babel-loader@9.1.3(@babel/core@7.24.7)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: '@babel/core': 7.24.7 find-cache-dir: 4.0.0 schema-utils: 4.2.0 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) babel-plugin-add-react-displayname@0.0.5: {} @@ -22099,7 +22802,7 @@ snapshots: bluebird@3.7.2: {} - body-parser@1.20.2: + body-parser@1.20.3: dependencies: bytes: 3.1.2 content-type: 1.0.5 @@ -22109,7 +22812,7 @@ snapshots: http-errors: 2.0.0 iconv-lite: 0.4.24 on-finished: 2.4.1 - qs: 6.11.0 + qs: 6.13.0 raw-body: 2.5.2 type-is: 1.6.18 unpipe: 1.0.0 @@ -22161,6 +22864,13 @@ snapshots: node-releases: 2.0.14 update-browserslist-db: 1.1.0(browserslist@4.23.1) + browserslist@4.23.3: + dependencies: + caniuse-lite: 1.0.30001660 + electron-to-chromium: 1.5.18 + node-releases: 2.0.18 + update-browserslist-db: 1.1.0(browserslist@4.23.3) + bs58@6.0.0: dependencies: base-x: 5.0.0 @@ -22254,6 +22964,8 @@ snapshots: caniuse-lite@1.0.30001640: {} + caniuse-lite@1.0.30001660: {} + capital-case@1.0.4: dependencies: no-case: 3.0.4 @@ -22677,7 +23389,7 @@ snapshots: cookie@0.6.0: {} - copy-webpack-plugin@11.0.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + copy-webpack-plugin@11.0.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: fast-glob: 3.3.2 glob-parent: 6.0.2 @@ -22685,7 +23397,7 @@ snapshots: normalize-path: 3.0.0 schema-utils: 4.2.0 serialize-javascript: 6.0.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) core-js-compat@3.37.1: dependencies: @@ -22778,7 +23490,7 @@ snapshots: postcss-selector-parser: 6.1.0 postcss-value-parser: 4.2.0 - css-loader@6.11.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + css-loader@6.11.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: icss-utils: 5.1.0(postcss@8.4.39) postcss: 8.4.39 @@ -22789,7 +23501,7 @@ snapshots: postcss-value-parser: 4.2.0 semver: 7.6.2 optionalDependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) css-prefers-color-scheme@9.0.1(postcss@8.4.39): dependencies: @@ -22837,6 +23549,11 @@ snapshots: dependencies: rrweb-cssom: 0.6.0 + cssstyle@4.1.0: + dependencies: + rrweb-cssom: 0.7.1 + optional: true + csstype@3.1.3: {} cytoscape-cose-bilkent@4.1.0(cytoscape@3.29.2): @@ -23085,6 +23802,10 @@ snapshots: optionalDependencies: supports-color: 8.1.1 + debug@4.3.7: + dependencies: + ms: 2.1.3 + decamelize@1.2.0: {} decamelize@6.0.0: {} @@ -23202,7 +23923,7 @@ snapshots: detect-port@1.5.1: dependencies: address: 1.2.2 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -23309,10 +24030,10 @@ snapshots: dotenv-expand@10.0.0: {} - dotenv-webpack@8.1.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + dotenv-webpack@8.1.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: dotenv-defaults: 2.0.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) dotenv@16.4.1: {} @@ -23351,6 +24072,8 @@ snapshots: electron-to-chromium@1.4.819: {} + electron-to-chromium@1.5.18: {} + elkjs@0.9.3: {} emoji-regex@8.0.0: {} @@ -23361,6 +24084,8 @@ snapshots: encodeurl@1.0.2: {} + encodeurl@2.0.0: {} + end-of-stream@1.4.4: dependencies: once: 1.4.0 @@ -23413,7 +24138,7 @@ snapshots: gopd: 1.0.1 has: 1.0.3 has-property-descriptors: 1.0.1 - has-proto: 1.0.1 + has-proto: 1.0.3 has-symbols: 1.0.3 internal-slot: 1.0.5 is-array-buffer: 3.0.2 @@ -23531,14 +24256,14 @@ snapshots: esbuild-register@3.4.2(esbuild@0.18.20): dependencies: - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 esbuild: 0.18.20 transitivePeerDependencies: - supports-color esbuild-register@3.5.0(esbuild@0.18.20): dependencies: - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 esbuild: 0.18.20 transitivePeerDependencies: - supports-color @@ -23621,6 +24346,34 @@ snapshots: '@esbuild/win32-ia32': 0.23.0 '@esbuild/win32-x64': 0.23.0 + esbuild@0.23.1: + optionalDependencies: + '@esbuild/aix-ppc64': 0.23.1 + '@esbuild/android-arm': 0.23.1 + '@esbuild/android-arm64': 0.23.1 + '@esbuild/android-x64': 0.23.1 + '@esbuild/darwin-arm64': 0.23.1 + '@esbuild/darwin-x64': 0.23.1 + '@esbuild/freebsd-arm64': 0.23.1 + '@esbuild/freebsd-x64': 0.23.1 + '@esbuild/linux-arm': 0.23.1 + '@esbuild/linux-arm64': 0.23.1 + '@esbuild/linux-ia32': 0.23.1 + '@esbuild/linux-loong64': 0.23.1 + '@esbuild/linux-mips64el': 0.23.1 + '@esbuild/linux-ppc64': 0.23.1 + '@esbuild/linux-riscv64': 0.23.1 + '@esbuild/linux-s390x': 0.23.1 + '@esbuild/linux-x64': 0.23.1 + '@esbuild/netbsd-x64': 0.23.1 + '@esbuild/openbsd-arm64': 0.23.1 + '@esbuild/openbsd-x64': 0.23.1 + '@esbuild/sunos-x64': 0.23.1 + '@esbuild/win32-arm64': 0.23.1 + '@esbuild/win32-ia32': 0.23.1 + '@esbuild/win32-x64': 0.23.1 + optional: true + escalade@3.1.1: {} escalade@3.1.2: {} @@ -23647,7 +24400,7 @@ snapshots: dependencies: eslint: 8.45.0 - eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.24.7))(@babel/plugin-transform-react-jsx@7.24.7(@babel/core@7.24.7))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0))(eslint@8.45.0)(typescript@5.5.3): + eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.25.2))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.25.2))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0))(eslint@8.45.0)(typescript@5.5.3): dependencies: '@babel/core': 7.24.7 '@babel/eslint-parser': 7.18.9(@babel/core@7.24.7)(eslint@8.45.0) @@ -23657,7 +24410,7 @@ snapshots: babel-preset-react-app: 10.0.1 confusing-browser-globals: 1.0.11 eslint: 8.45.0 - eslint-plugin-flowtype: 8.0.3(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.24.7))(@babel/plugin-transform-react-jsx@7.24.7(@babel/core@7.24.7))(eslint@8.45.0) + eslint-plugin-flowtype: 8.0.3(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.25.2))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.25.2))(eslint@8.45.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@5.62.0(eslint@8.45.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.1.0(eslint@8.45.0)(typescript@5.5.3))(eslint-plugin-import@2.29.1)(eslint@8.45.0))(eslint@8.45.0) eslint-plugin-jest: 25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.45.0)(typescript@5.5.3))(eslint@8.45.0)(typescript@5.5.3))(eslint@8.45.0)(typescript@5.5.3) eslint-plugin-jsx-a11y: 6.6.1(eslint@8.45.0) @@ -23721,10 +24474,10 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-plugin-flowtype@8.0.3(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.24.7))(@babel/plugin-transform-react-jsx@7.24.7(@babel/core@7.24.7))(eslint@8.45.0): + eslint-plugin-flowtype@8.0.3(@babel/plugin-syntax-flow@7.24.7(@babel/core@7.25.2))(@babel/plugin-transform-react-jsx@7.25.2(@babel/core@7.25.2))(eslint@8.45.0): dependencies: - '@babel/plugin-syntax-flow': 7.24.7(@babel/core@7.24.7) - '@babel/plugin-transform-react-jsx': 7.24.7(@babel/core@7.24.7) + '@babel/plugin-syntax-flow': 7.24.7(@babel/core@7.25.2) + '@babel/plugin-transform-react-jsx': 7.25.2(@babel/core@7.25.2) eslint: 8.45.0 lodash: 4.17.21 string-natural-compare: 3.0.1 @@ -23818,14 +24571,14 @@ snapshots: dependencies: eslint: 8.36.0 - eslint-plugin-prettier@5.1.3(@types/eslint@8.56.10)(eslint-config-prettier@8.8.0(eslint@8.45.0))(eslint@8.45.0)(prettier@3.3.2): + eslint-plugin-prettier@5.1.3(@types/eslint@9.6.1)(eslint-config-prettier@8.8.0(eslint@8.45.0))(eslint@8.45.0)(prettier@3.3.2): dependencies: eslint: 8.45.0 prettier: 3.3.2 prettier-linter-helpers: 1.0.0 synckit: 0.8.8 optionalDependencies: - '@types/eslint': 8.56.10 + '@types/eslint': 9.6.1 eslint-config-prettier: 8.8.0(eslint@8.45.0) eslint-plugin-react-hooks@4.6.2(eslint@8.45.0): @@ -23917,7 +24670,7 @@ snapshots: eslint-visitor-keys@4.0.0: {} - eslint-webpack-plugin@4.2.0(eslint@8.45.0)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + eslint-webpack-plugin@4.2.0(eslint@8.45.0)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: '@types/eslint': 8.56.10 eslint: 8.45.0 @@ -23925,7 +24678,7 @@ snapshots: micromatch: 4.0.7 normalize-path: 3.0.0 schema-utils: 4.2.0 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) eslint@8.36.0: dependencies: @@ -23939,7 +24692,7 @@ snapshots: ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.3 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 doctrine: 3.0.0 escape-string-regexp: 4.0.0 eslint-scope: 7.2.2 @@ -24177,34 +24930,34 @@ snapshots: signal-exit: 4.1.0 strip-final-newline: 3.0.0 - express@4.19.2: + express@4.20.0: dependencies: accepts: 1.3.8 array-flatten: 1.1.1 - body-parser: 1.20.2 + body-parser: 1.20.3 content-disposition: 0.5.4 content-type: 1.0.5 cookie: 0.6.0 cookie-signature: 1.0.6 debug: 2.6.9 depd: 2.0.0 - encodeurl: 1.0.2 + encodeurl: 2.0.0 escape-html: 1.0.3 etag: 1.8.1 finalhandler: 1.2.0 fresh: 0.5.2 http-errors: 2.0.0 - merge-descriptors: 1.0.1 + merge-descriptors: 1.0.3 methods: 1.1.2 on-finished: 2.4.1 parseurl: 1.3.3 - path-to-regexp: 0.1.7 + path-to-regexp: 0.1.10 proxy-addr: 2.0.7 qs: 6.11.0 range-parser: 1.2.1 safe-buffer: 5.2.1 - send: 0.18.0 - serve-static: 1.15.0 + send: 0.19.0 + serve-static: 1.16.0 setprototypeof: 1.2.0 statuses: 2.0.1 type-is: 1.6.18 @@ -24435,7 +25188,7 @@ snapshots: forever-agent@0.6.1: {} - fork-ts-checker-webpack-plugin@8.0.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + fork-ts-checker-webpack-plugin@8.0.0(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: '@babel/code-frame': 7.24.7 chalk: 4.1.2 @@ -24450,7 +25203,7 @@ snapshots: semver: 7.6.2 tapable: 2.2.1 typescript: 5.5.3 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) form-data-encoder@2.1.4: {} @@ -24616,9 +25369,9 @@ snapshots: dependencies: es-errors: 1.3.0 function-bind: 1.1.2 - has-proto: 1.0.1 + has-proto: 1.0.3 has-symbols: 1.0.3 - hasown: 2.0.0 + hasown: 2.0.2 get-nonce@1.0.1: {} @@ -24942,7 +25695,14 @@ snapshots: webidl-conversions: 7.0.0 whatwg-mimetype: 3.0.0 - har-schema@2.0.0: {} + happy-dom@15.7.3: + dependencies: + entities: 4.5.0 + webidl-conversions: 7.0.0 + whatwg-mimetype: 3.0.0 + optional: true + + har-schema@2.0.0: {} har-validator@5.1.5: dependencies: @@ -24965,8 +25725,6 @@ snapshots: dependencies: es-define-property: 1.0.0 - has-proto@1.0.1: {} - has-proto@1.0.3: {} has-symbols@1.0.3: {} @@ -24993,10 +25751,6 @@ snapshots: sort-keys: 5.0.0 type-fest: 1.4.0 - hasown@2.0.0: - dependencies: - function-bind: 1.1.2 - hasown@2.0.2: dependencies: function-bind: 1.1.2 @@ -25140,7 +25894,7 @@ snapshots: html-void-elements@3.0.0: {} - html-webpack-plugin@5.6.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + html-webpack-plugin@5.6.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: '@types/html-minifier-terser': 6.1.0 html-minifier-terser: 6.1.0 @@ -25148,7 +25902,7 @@ snapshots: pretty-error: 4.0.0 tapable: 2.2.1 optionalDependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) htmlparser2@6.1.0: dependencies: @@ -25188,14 +25942,14 @@ snapshots: http-proxy-agent@7.0.0: dependencies: agent-base: 7.1.0 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color http-proxy-agent@7.0.2: dependencies: agent-base: 7.1.1 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -25213,7 +25967,7 @@ snapshots: https-proxy-agent@4.0.0: dependencies: agent-base: 5.1.1 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -25227,14 +25981,14 @@ snapshots: https-proxy-agent@7.0.2: dependencies: agent-base: 7.1.0 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color https-proxy-agent@7.0.5: dependencies: agent-base: 7.1.1 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 transitivePeerDependencies: - supports-color @@ -25278,12 +26032,18 @@ snapshots: immediate@3.0.6: {} + immer@10.1.1: + optional: true + immer@9.0.21: {} immutable@3.7.6: {} immutable@4.3.6: {} + immutable@4.3.7: + optional: true + import-fresh@3.3.0: dependencies: parent-module: 1.0.1 @@ -25786,6 +26546,35 @@ snapshots: - supports-color - utf-8-validate + jsdom@25.0.0: + dependencies: + cssstyle: 4.1.0 + data-urls: 5.0.0 + decimal.js: 10.4.3 + form-data: 4.0.0 + html-encoding-sniffer: 4.0.0 + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.5 + is-potential-custom-element-name: 1.0.1 + nwsapi: 2.2.12 + parse5: 7.1.2 + rrweb-cssom: 0.7.1 + saxes: 6.0.0 + symbol-tree: 3.2.4 + tough-cookie: 4.1.4 + w3c-xmlserializer: 5.0.0 + webidl-conversions: 7.0.0 + whatwg-encoding: 3.1.1 + whatwg-mimetype: 4.0.0 + whatwg-url: 14.0.0 + ws: 8.18.0 + xml-name-validator: 5.0.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + optional: true + jsesc@0.5.0: {} jsesc@2.5.2: {} @@ -26330,7 +27119,7 @@ snapshots: dependencies: map-or-similar: 1.5.0 - merge-descriptors@1.0.1: {} + merge-descriptors@1.0.3: {} merge-stream@2.0.0: {} @@ -26639,7 +27428,7 @@ snapshots: micromark@3.2.0: dependencies: '@types/debug': 4.1.12 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 decode-named-character-reference: 1.0.2 micromark-core-commonmark: 1.1.0 micromark-factory-space: 1.1.0 @@ -26688,11 +27477,11 @@ snapshots: min-indent@1.0.1: {} - mini-css-extract-plugin@2.9.0(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + mini-css-extract-plugin@2.9.0(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: schema-utils: 4.2.0 tapable: 2.2.1 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) mini-svg-data-uri@1.4.4: {} @@ -26849,19 +27638,19 @@ snapshots: transitivePeerDependencies: - supports-color - next-seo@6.5.0(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-seo@6.5.0(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - next: 14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6) + next: 14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - next-themes@0.2.1(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-themes@0.2.1(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - next: 14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6) + next: 14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6): + next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0): dependencies: '@next/env': 14.2.4 '@swc/helpers': 0.5.5 @@ -26871,7 +27660,7 @@ snapshots: postcss: 8.4.31 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - styled-jsx: 5.1.1(@babel/core@7.24.7)(react@18.3.1) + styled-jsx: 5.1.1(@babel/core@7.25.2)(react@18.3.1) optionalDependencies: '@next/swc-darwin-arm64': 14.2.4 '@next/swc-darwin-x64': 14.2.4 @@ -26882,13 +27671,13 @@ snapshots: '@next/swc-win32-arm64-msvc': 14.2.4 '@next/swc-win32-ia32-msvc': 14.2.4 '@next/swc-win32-x64-msvc': 14.2.4 - '@playwright/test': 1.45.1 - sass: 1.77.6 + '@playwright/test': 1.47.0 + sass: 1.78.0 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros - nextra-theme-docs@2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(nextra@2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + nextra-theme-docs@2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(nextra@2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@headlessui/react': 1.7.19(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@popperjs/core': 2.11.8 @@ -26899,16 +27688,16 @@ snapshots: git-url-parse: 13.1.1 intersection-observer: 0.12.2 match-sorter: 6.3.4 - next: 14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6) - next-seo: 6.5.0(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - next-themes: 0.2.1(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - nextra: 2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0) + next-seo: 6.5.0(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next-themes: 0.2.1(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + nextra: 2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) scroll-into-view-if-needed: 3.1.0 zod: 3.23.8 - nextra@2.13.4(next@14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + nextra@2.13.4(next@14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@headlessui/react': 1.7.19(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@mdx-js/mdx': 2.3.0 @@ -26922,7 +27711,7 @@ snapshots: gray-matter: 4.0.3 katex: 0.16.10 lodash.get: 4.4.2 - next: 14.2.4(@babel/core@7.24.7)(@playwright/test@1.45.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.6) + next: 14.2.4(@babel/core@7.25.2)(@playwright/test@1.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.78.0) next-mdx-remote: 4.4.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) p-limit: 3.1.0 react: 18.3.1 @@ -26988,6 +27777,8 @@ snapshots: node-releases@2.0.14: {} + node-releases@2.0.18: {} + non-layered-tidy-tree-layout@2.0.2: {} normalize-package-data@2.5.0: @@ -27038,6 +27829,9 @@ snapshots: nwsapi@2.2.10: {} + nwsapi@2.2.12: + optional: true + oauth-sign@0.9.0: {} object-assign@4.1.1: {} @@ -27385,7 +28179,7 @@ snapshots: lru-cache: 10.4.1 minipass: 7.1.2 - path-to-regexp@0.1.7: {} + path-to-regexp@0.1.10: {} path-to-regexp@6.2.2: {} @@ -27476,12 +28270,22 @@ snapshots: playwright-core@1.45.1: {} + playwright-core@1.47.0: + optional: true + playwright@1.45.1: dependencies: playwright-core: 1.45.1 optionalDependencies: fsevents: 2.3.2 + playwright@1.47.0: + dependencies: + playwright-core: 1.47.0 + optionalDependencies: + fsevents: 2.3.2 + optional: true + polished@4.3.1: dependencies: '@babel/runtime': 7.24.7 @@ -27641,21 +28445,29 @@ snapshots: '@csstools/utilities': 1.0.0(postcss@8.4.39) postcss: 8.4.39 - postcss-load-config@4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)): + postcss-load-config@4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3)): dependencies: lilconfig: 3.1.2 yaml: 2.4.5 optionalDependencies: postcss: 8.4.39 - ts-node: 10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3) + ts-node: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3) - postcss-loader@7.3.3(postcss@8.4.39)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + postcss-load-config@4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)): + dependencies: + lilconfig: 3.1.2 + yaml: 2.4.5 + optionalDependencies: + postcss: 8.4.39 + ts-node: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3) + + postcss-loader@7.3.3(postcss@8.4.39)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: cosmiconfig: 8.2.0 jiti: 1.19.1 postcss: 8.4.39 semver: 7.6.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) postcss-logical@7.0.1(postcss@8.4.39): dependencies: @@ -27938,7 +28750,7 @@ snapshots: puppeteer-core@2.1.1: dependencies: '@types/mime-types': 2.1.1 - debug: 4.3.5(supports-color@8.1.1) + debug: 4.3.7 extract-zip: 1.7.0 https-proxy-agent: 4.0.0 mime: 2.6.0 @@ -27966,6 +28778,10 @@ snapshots: dependencies: side-channel: 1.0.6 + qs@6.13.0: + dependencies: + side-channel: 1.0.6 + qs@6.5.3: {} querystringify@2.2.0: {} @@ -28081,7 +28897,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1): + react-redux@8.1.1(@types/react-dom@18.3.0)(@types/react@18.3.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@5.0.1): dependencies: '@babel/runtime': 7.22.6 '@types/hoist-non-react-statics': 3.3.1 @@ -28094,7 +28910,7 @@ snapshots: '@types/react': 18.3.3 '@types/react-dom': 18.3.0 react-dom: 18.3.1(react@18.3.1) - redux: 4.2.1 + redux: 5.0.1 react-refresh@0.14.2: {} @@ -28274,6 +29090,9 @@ snapshots: dependencies: '@babel/runtime': 7.22.10 + redux@5.0.1: + optional: true + regenerate-unicode-properties@10.1.1: dependencies: regenerate: 1.4.2 @@ -28611,10 +29430,10 @@ snapshots: safer-buffer@2.1.2: {} - sass-loader@13.3.2(sass@1.77.6)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + sass-loader@13.3.2(sass@1.77.6)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: neo-async: 2.6.2 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) optionalDependencies: sass: 1.77.6 @@ -28624,6 +29443,13 @@ snapshots: immutable: 4.3.6 source-map-js: 1.2.0 + sass@1.78.0: + dependencies: + chokidar: 3.6.0 + immutable: 4.3.7 + source-map-js: 1.2.1 + optional: true + sax@1.2.4: {} saxes@6.0.0: @@ -28669,6 +29495,8 @@ snapshots: semver@7.6.2: {} + semver@7.6.3: {} + send@0.18.0: dependencies: debug: 2.6.9 @@ -28687,6 +29515,24 @@ snapshots: transitivePeerDependencies: - supports-color + send@0.19.0: + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + transitivePeerDependencies: + - supports-color + sentence-case@3.0.4: dependencies: no-case: 3.0.4 @@ -28705,7 +29551,7 @@ snapshots: parseurl: 1.3.3 safe-buffer: 5.1.1 - serve-static@1.15.0: + serve-static@1.16.0: dependencies: encodeurl: 1.0.2 escape-html: 1.0.3 @@ -28796,12 +29642,6 @@ snapshots: '@shikijs/core': 1.10.3 '@types/hast': 3.0.4 - side-channel@1.0.4: - dependencies: - call-bind: 1.0.6 - get-intrinsic: 1.2.4 - object-inspect: 1.12.3 - side-channel@1.0.6: dependencies: call-bind: 1.0.7 @@ -28879,6 +29719,9 @@ snapshots: source-map-js@1.2.0: {} + source-map-js@1.2.1: + optional: true + source-map-support@0.5.21: dependencies: buffer-from: 1.1.2 @@ -29015,7 +29858,7 @@ snapshots: has-symbols: 1.0.3 internal-slot: 1.0.5 regexp.prototype.flags: 1.4.3 - side-channel: 1.0.4 + side-channel: 1.0.6 string.prototype.trim@1.2.7: dependencies: @@ -29118,20 +29961,20 @@ snapshots: strip-json-comments@5.0.0: {} - style-loader@3.3.4(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + style-loader@3.3.4(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) style-to-object@0.4.4: dependencies: inline-style-parser: 0.1.1 - styled-jsx@5.1.1(@babel/core@7.24.7)(react@18.3.1): + styled-jsx@5.1.1(@babel/core@7.25.2)(react@18.3.1): dependencies: client-only: 0.0.1 react: 18.3.1 optionalDependencies: - '@babel/core': 7.24.7 + '@babel/core': 7.25.2 stylis@4.3.2: {} @@ -29189,11 +30032,11 @@ snapshots: dependencies: tslib: 2.6.3 - swc-loader@0.2.6(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + swc-loader@0.2.6(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.6.13(@swc/helpers@0.5.13) '@swc/counter': 0.1.3 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) symbol-tree@3.2.4: {} @@ -29208,11 +30051,42 @@ snapshots: tailwind-merge@2.4.0: {} - tailwindcss-animate@1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@types/node@20.14.10)(typescript@5.5.3))): + tailwindcss-animate@1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3))): + dependencies: + tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3)) + + tailwindcss-animate@1.0.7(tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3))): dependencies: - tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + tailwindcss: 3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) + + tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3)): + dependencies: + '@alloc/quick-lru': 5.2.0 + arg: 5.0.2 + chokidar: 3.6.0 + didyoumean: 1.2.2 + dlv: 1.1.3 + fast-glob: 3.3.2 + glob-parent: 6.0.2 + is-glob: 4.0.3 + jiti: 1.21.6 + lilconfig: 2.1.0 + micromatch: 4.0.7 + normalize-path: 3.0.0 + object-hash: 3.0.0 + picocolors: 1.0.1 + postcss: 8.4.39 + postcss-import: 15.1.0(postcss@8.4.39) + postcss-js: 4.0.1(postcss@8.4.39) + postcss-load-config: 4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3)) + postcss-nested: 6.0.1(postcss@8.4.39) + postcss-selector-parser: 6.1.0 + resolve: 1.22.8 + sucrase: 3.35.0 + transitivePeerDependencies: + - ts-node - tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)): + tailwindcss@3.4.4(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)): dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 @@ -29231,7 +30105,7 @@ snapshots: postcss: 8.4.39 postcss-import: 15.1.0(postcss@8.4.39) postcss-js: 4.0.1(postcss@8.4.39) - postcss-load-config: 4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3)) + postcss-load-config: 4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3)) postcss-nested: 6.0.1(postcss@8.4.39) postcss-selector-parser: 6.1.0 resolve: 1.22.8 @@ -29294,16 +30168,27 @@ snapshots: term-size@2.2.1: {} - terser-webpack-plugin@5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + terser-webpack-plugin@5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: '@jridgewell/trace-mapping': 0.3.25 jest-worker: 27.5.1 schema-utils: 3.3.0 serialize-javascript: 6.0.2 terser: 5.31.1 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) optionalDependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.6.13(@swc/helpers@0.5.13) + + terser-webpack-plugin@5.3.10(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): + dependencies: + '@jridgewell/trace-mapping': 0.3.25 + jest-worker: 27.5.1 + schema-utils: 3.3.0 + serialize-javascript: 6.0.2 + terser: 5.31.1 + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) + optionalDependencies: + '@swc/core': 1.7.24(@swc/helpers@0.5.13) terser@5.31.1: dependencies: @@ -29312,6 +30197,14 @@ snapshots: commander: 2.20.3 source-map-support: 0.5.21 + terser@5.32.0: + dependencies: + '@jridgewell/source-map': 0.3.6 + acorn: 8.12.1 + commander: 2.20.3 + source-map-support: 0.5.21 + optional: true + test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 @@ -29434,7 +30327,7 @@ snapshots: ts-interface-checker@0.1.13: {} - ts-loader@9.5.1(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + ts-loader@9.5.1(typescript@5.5.3)(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: chalk: 4.1.2 enhanced-resolve: 5.17.0 @@ -29442,58 +30335,58 @@ snapshots: semver: 7.6.2 source-map: 0.7.4 typescript: 5.5.3 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) ts-log@2.2.5: {} - ts-node@10.9.2(@swc/core@1.6.13(@swc/helpers@0.5.5))(@types/node@20.14.10)(typescript@5.5.3): + ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@16.18.101)(typescript@5.1.6): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 20.14.10 + '@types/node': 16.18.101 acorn: 8.12.1 acorn-walk: 8.3.3 arg: 4.1.3 create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.5.3 + typescript: 5.1.6 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 optionalDependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.7.24(@swc/helpers@0.5.13) - ts-node@10.9.2(@swc/core@1.6.13)(@types/node@16.18.101)(typescript@5.1.6): + ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@20.14.10)(typescript@5.5.3): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 16.18.101 + '@types/node': 20.14.10 acorn: 8.12.1 acorn-walk: 8.3.3 arg: 4.1.3 create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.1.6 + typescript: 5.5.3 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 optionalDependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.7.24(@swc/helpers@0.5.13) - ts-node@10.9.2(@swc/core@1.6.13)(@types/node@20.14.10)(typescript@5.5.3): + ts-node@10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@22.5.4)(typescript@5.5.3): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 20.14.10 + '@types/node': 22.5.4 acorn: 8.12.1 acorn-walk: 8.3.3 arg: 4.1.3 @@ -29504,7 +30397,7 @@ snapshots: v8-compile-cache-lib: 3.0.1 yn: 3.1.1 optionalDependencies: - '@swc/core': 1.6.13(@swc/helpers@0.5.5) + '@swc/core': 1.7.24(@swc/helpers@0.5.13) ts-retry-promise@0.8.1: {} @@ -29535,6 +30428,9 @@ snapshots: tslib@2.6.3: {} + tslib@2.7.0: + optional: true + tsutils@3.21.0(typescript@5.5.3): dependencies: tslib: 1.14.1 @@ -29604,6 +30500,9 @@ snapshots: type-fest@4.21.0: {} + type-fest@4.26.1: + optional: true + type-is@1.6.18: dependencies: media-typer: 0.3.0 @@ -29653,23 +30552,23 @@ snapshots: typedarray@0.0.6: {} - typedoc@0.26.3(typescript@5.5.3): + typedoc@0.26.3(typescript@5.6.2): dependencies: lunr: 2.3.9 markdown-it: 14.1.0 minimatch: 9.0.5 shiki: 1.10.3 - typescript: 5.5.3 + typescript: 5.6.2 yaml: 2.4.5 - typescript-json-schema@0.64.0(@swc/core@1.6.13): + typescript-json-schema@0.64.0(@swc/core@1.7.24(@swc/helpers@0.5.13)): dependencies: '@types/json-schema': 7.0.15 '@types/node': 16.18.101 glob: 7.2.3 path-equal: 1.2.5 safe-stable-stringify: 2.4.3 - ts-node: 10.9.2(@swc/core@1.6.13)(@types/node@16.18.101)(typescript@5.1.6) + ts-node: 10.9.2(@swc/core@1.7.24(@swc/helpers@0.5.13))(@types/node@16.18.101)(typescript@5.1.6) typescript: 5.1.6 yargs: 17.7.2 transitivePeerDependencies: @@ -29682,6 +30581,8 @@ snapshots: typescript@5.5.3: {} + typescript@5.6.2: {} + ua-parser-js@1.0.37: {} uc.micro@2.1.0: {} @@ -29705,6 +30606,8 @@ snapshots: undici-types@5.26.5: {} + undici-types@6.19.8: {} + unicode-canonical-property-names-ecmascript@2.0.0: {} unicode-match-property-ecmascript@2.0.0: @@ -29866,6 +30769,12 @@ snapshots: escalade: 3.1.2 picocolors: 1.0.1 + update-browserslist-db@1.1.0(browserslist@4.23.3): + dependencies: + browserslist: 4.23.3 + escalade: 3.1.2 + picocolors: 1.0.1 + update-notifier@6.0.2: dependencies: boxen: 7.1.1 @@ -29903,7 +30812,7 @@ snapshots: url@0.11.3: dependencies: punycode: 1.4.1 - qs: 6.12.3 + qs: 6.13.0 urlpattern-polyfill@10.0.0: {} @@ -30044,13 +30953,64 @@ snapshots: unist-util-stringify-position: 4.0.0 vfile-message: 4.0.2 - vite-node@1.6.0(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1): + vite-node@1.6.0(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0): + dependencies: + cac: 6.7.14 + debug: 4.3.7 + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-node@1.6.0(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0): + dependencies: + cac: 6.7.14 + debug: 4.3.7 + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-node@2.0.1(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0): + dependencies: + cac: 6.7.14 + debug: 4.3.5(supports-color@8.1.1) + pathe: 1.1.2 + picocolors: 1.0.1 + vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vite-node@2.0.1(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0): dependencies: cac: 6.7.14 debug: 4.3.5(supports-color@8.1.1) pathe: 1.1.2 picocolors: 1.0.1 - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - '@types/node' - less @@ -30061,13 +31021,13 @@ snapshots: - supports-color - terser - vite-node@2.0.1(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1): + vite-node@2.0.1(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0): dependencies: cac: 6.7.14 debug: 4.3.5(supports-color@8.1.1) pathe: 1.1.2 picocolors: 1.0.1 - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) transitivePeerDependencies: - '@types/node' - less @@ -30078,18 +31038,29 @@ snapshots: - supports-color - terser - vite-tsconfig-paths@4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1)): + vite-tsconfig-paths@4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0)): dependencies: debug: 4.3.5(supports-color@8.1.1) globrex: 0.1.2 tsconfck: 3.1.1(typescript@5.5.3) optionalDependencies: - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) transitivePeerDependencies: - supports-color - typescript - vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1): + vite-tsconfig-paths@4.3.2(typescript@5.5.3)(vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0)): + dependencies: + debug: 4.3.5(supports-color@8.1.1) + globrex: 0.1.2 + tsconfck: 3.1.1(typescript@5.5.3) + optionalDependencies: + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + transitivePeerDependencies: + - supports-color + - typescript + + vite@5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0): dependencies: esbuild: 0.21.5 postcss: 8.4.39 @@ -30098,9 +31069,31 @@ snapshots: '@types/node': 20.14.10 fsevents: 2.3.3 sass: 1.77.6 - terser: 5.31.1 + terser: 5.32.0 - vitest@2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@24.1.0)(sass@1.77.6)(terser@5.31.1): + vite@5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.1 + optionalDependencies: + '@types/node': 20.14.10 + fsevents: 2.3.3 + sass: 1.78.0 + terser: 5.32.0 + + vite@5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.4.39 + rollup: 4.18.1 + optionalDependencies: + '@types/node': 22.5.4 + fsevents: 2.3.3 + sass: 1.78.0 + terser: 5.32.0 + + vitest@2.0.1(@types/node@20.14.10)(happy-dom@14.12.3)(jsdom@25.0.0)(sass@1.77.6)(terser@5.32.0): dependencies: '@ampproject/remapping': 2.3.0 '@vitest/expect': 2.0.1 @@ -30117,12 +31110,78 @@ snapshots: std-env: 3.7.0 tinybench: 2.8.0 tinypool: 1.0.0 - vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) - vite-node: 2.0.1(@types/node@20.14.10)(sass@1.77.6)(terser@5.31.1) + vite: 5.3.3(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) + vite-node: 2.0.1(@types/node@20.14.10)(sass@1.77.6)(terser@5.32.0) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 20.14.10 happy-dom: 14.12.3 + jsdom: 25.0.0 + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vitest@2.0.1(@types/node@20.14.10)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0): + dependencies: + '@ampproject/remapping': 2.3.0 + '@vitest/expect': 2.0.1 + '@vitest/runner': 2.0.1 + '@vitest/snapshot': 2.0.1 + '@vitest/spy': 2.0.1 + '@vitest/utils': 2.0.1 + chai: 5.1.1 + debug: 4.3.5(supports-color@8.1.1) + execa: 8.0.1 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.1 + std-env: 3.7.0 + tinybench: 2.8.0 + tinypool: 1.0.0 + vite: 5.3.3(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + vite-node: 2.0.1(@types/node@20.14.10)(sass@1.78.0)(terser@5.32.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.14.10 + happy-dom: 15.7.3 + jsdom: 25.0.0 + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + + vitest@2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@24.1.0)(sass@1.78.0)(terser@5.32.0): + dependencies: + '@ampproject/remapping': 2.3.0 + '@vitest/expect': 2.0.1 + '@vitest/runner': 2.0.1 + '@vitest/snapshot': 2.0.1 + '@vitest/spy': 2.0.1 + '@vitest/utils': 2.0.1 + chai: 5.1.1 + debug: 4.3.5(supports-color@8.1.1) + execa: 8.0.1 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.1 + std-env: 3.7.0 + tinybench: 2.8.0 + tinypool: 1.0.0 + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + vite-node: 2.0.1(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 22.5.4 + happy-dom: 15.7.3 jsdom: 24.1.0 transitivePeerDependencies: - less @@ -30133,6 +31192,39 @@ snapshots: - supports-color - terser + vitest@2.0.1(@types/node@22.5.4)(happy-dom@15.7.3)(jsdom@25.0.0)(sass@1.78.0)(terser@5.32.0): + dependencies: + '@ampproject/remapping': 2.3.0 + '@vitest/expect': 2.0.1 + '@vitest/runner': 2.0.1 + '@vitest/snapshot': 2.0.1 + '@vitest/spy': 2.0.1 + '@vitest/utils': 2.0.1 + chai: 5.1.1 + debug: 4.3.5(supports-color@8.1.1) + execa: 8.0.1 + magic-string: 0.30.10 + pathe: 1.1.2 + picocolors: 1.0.1 + std-env: 3.7.0 + tinybench: 2.8.0 + tinypool: 1.0.0 + vite: 5.3.3(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + vite-node: 2.0.1(@types/node@22.5.4)(sass@1.78.0)(terser@5.32.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 22.5.4 + happy-dom: 15.7.3 + jsdom: 25.0.0 + transitivePeerDependencies: + - less + - lightningcss + - sass + - stylus + - sugarss + - supports-color + - terser + vscode-oniguruma@1.7.0: {} vscode-textmate@8.0.0: {} @@ -30184,11 +31276,11 @@ snapshots: dependencies: defaults: 1.0.4 - web-ext@7.6.2(body-parser@1.20.2): + web-ext@7.6.2: dependencies: '@babel/runtime': 7.21.0 '@devicefarmer/adbkit': 3.2.3 - addons-linter: 5.32.0(body-parser@1.20.2)(node-fetch@3.3.1) + addons-linter: 5.32.0(node-fetch@3.3.1) bunyan: 1.8.15 camelcase: 7.0.1 chrome-launcher: 0.15.1 @@ -30251,9 +31343,9 @@ snapshots: webpack-cli@5.1.4(webpack@5.92.1): dependencies: '@discoveryjs/json-ext': 0.5.7 - '@webpack-cli/configtest': 2.1.1(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) - '@webpack-cli/info': 2.0.2(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) - '@webpack-cli/serve': 2.0.5(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + '@webpack-cli/configtest': 2.1.1(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) + '@webpack-cli/info': 2.0.2(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) + '@webpack-cli/serve': 2.0.5(webpack-cli@5.1.4(webpack@5.92.1))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) colorette: 2.0.20 commander: 10.0.1 cross-spawn: 7.0.3 @@ -30262,10 +31354,10 @@ snapshots: import-local: 3.1.0 interpret: 3.1.1 rechoir: 0.8.0 - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-merge: 5.8.0 - webpack-dev-middleware@6.1.3(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))): + webpack-dev-middleware@6.1.3(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)): dependencies: colorette: 2.0.20 memfs: 3.5.3 @@ -30273,7 +31365,7 @@ snapshots: range-parser: 1.2.1 schema-utils: 4.2.0 optionalDependencies: - webpack: 5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)) + webpack: 5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4) webpack-hot-middleware@2.26.1: dependencies: @@ -30292,7 +31384,7 @@ snapshots: webpack-virtual-modules@0.6.2: {} - webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1)): + webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack-cli@5.1.4(webpack@5.92.1)): dependencies: '@types/eslint-scope': 3.7.7 '@types/estree': 1.0.5 @@ -30315,7 +31407,40 @@ snapshots: neo-async: 2.6.2 schema-utils: 3.3.0 tapable: 2.2.1 - terser-webpack-plugin: 5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack@5.92.1(@swc/core@1.6.13(@swc/helpers@0.5.5))(webpack-cli@5.1.4(webpack@5.92.1))) + terser-webpack-plugin: 5.3.10(@swc/core@1.6.13(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) + watchpack: 2.4.1 + webpack-sources: 3.2.3 + optionalDependencies: + webpack-cli: 5.1.4(webpack@5.92.1) + transitivePeerDependencies: + - '@swc/core' + - esbuild + - uglify-js + + webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4): + dependencies: + '@types/eslint-scope': 3.7.7 + '@types/estree': 1.0.5 + '@webassemblyjs/ast': 1.12.1 + '@webassemblyjs/wasm-edit': 1.12.1 + '@webassemblyjs/wasm-parser': 1.12.1 + acorn: 8.12.1 + acorn-import-attributes: 1.9.5(acorn@8.12.1) + browserslist: 4.23.1 + chrome-trace-event: 1.0.4 + enhanced-resolve: 5.17.0 + es-module-lexer: 1.5.4 + eslint-scope: 5.1.1 + events: 3.3.0 + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + json-parse-even-better-errors: 2.3.1 + loader-runner: 4.3.0 + mime-types: 2.1.35 + neo-async: 2.6.2 + schema-utils: 3.3.0 + tapable: 2.2.1 + terser-webpack-plugin: 5.3.10(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack@5.92.1(@swc/core@1.7.24(@swc/helpers@0.5.13))(webpack-cli@5.1.4)) watchpack: 2.4.1 webpack-sources: 3.2.3 optionalDependencies: @@ -30560,12 +31685,12 @@ snapshots: zod@3.23.8: {} - zustand@4.5.4(@types/react@18.3.3)(immer@9.0.21)(react@18.3.1): + zustand@4.5.4(@types/react@18.3.3)(immer@10.1.1)(react@18.3.1): dependencies: use-sync-external-store: 1.2.0(react@18.3.1) optionalDependencies: '@types/react': 18.3.3 - immer: 9.0.21 + immer: 10.1.1 react: 18.3.1 zwitch@2.0.4: {} diff --git a/scripts/generate_indexer_schema.sh b/scripts/generate_indexer_schema.sh new file mode 100755 index 0000000000000..f505cd65814e5 --- /dev/null +++ b/scripts/generate_indexer_schema.sh @@ -0,0 +1,78 @@ +#!/bin/bash +# Copyright (c) Mysten Labs, Inc. +# SPDX-License-Identifier: Apache-2.0 +# +# Update sui-indexer's generated src/schema.rs based on the schema after +# running all its migrations on a clean database. Expects the first argument to +# be a port to run the temporary database on (defaults to 5433). + +set -x +set -e + +if ! command -v git &> /dev/null; then + echo "Please install git: e.g. brew install git" >&2 + exit 1 +fi + +for PG in psql initdb postgres pg_isready pg_ctl; do + if ! command -v $PG &> /dev/null; then + echo "Could not find $PG. Please install postgres: e.g. brew install postgresql@15" >&2 + exit 1 + fi +done + +if ! command -v diesel &> /dev/null; then + echo "Please install diesel: e.g. cargo install diesel_cli --features postgres" >&2 + exit 1 +fi + +REPO=$(git rev-parse --show-toplevel) + +# Create a temporary directory to store the ephemeral DB. +TMP=$(mktemp -d) + +# Set-up a trap to clean everything up on EXIT (stop DB, delete temp directory) +function cleanup { + pg_ctl stop -D "$TMP" -mfast + set +x + echo "Postgres STDOUT:" + cat "$TMP/db.stdout" + echo "Postgres STDERR:" + cat "$TMP/db.stderr" + set -x + rm -rf "$TMP" +} +trap cleanup EXIT + +# Create a new database in the temporary directory +initdb -D "$TMP" --user postgres + +# Run the DB in the background, on the port provided and capture its output +PORT=${1:-5433} +postgres -D "$TMP" -p "$PORT" -c unix_socket_directories= \ + > "$TMP/db.stdout" \ + 2> "$TMP/db.stderr" & + +# Wait for postgres to report as ready +RETRIES=0 +while ! pg_isready -p "$PORT" --host "localhost" --username "postgres"; do + if [ $RETRIES -gt 5 ]; then + echo "Postgres failed to start" >&2 + exit 1 + fi + sleep 1 + RETRIES=$((RETRIES + 1)) +done + +# Run all migrations on the new database +diesel migration run \ + --database-url "postgres://postgres:postgrespw@localhost:$PORT" \ + --migration-dir "$REPO/crates/sui-indexer/migrations/pg" + +# Generate the schema.rs file, excluding partition tables and including the +# copyright notice. +diesel print-schema \ + --database-url "postgres://postgres:postgrespw@localhost:$PORT" \ + --patch-file "$REPO/crates/sui-indexer/src/schema.patch" \ + --except-tables "^objects_version_|_partition_" \ + > "$REPO/crates/sui-indexer/src/schema.rs" diff --git a/scripts/simtest/codecov.sh b/scripts/simtest/codecov.sh index 6df7c9ccaef05..60e9dd3ffa372 100755 --- a/scripts/simtest/codecov.sh +++ b/scripts/simtest/codecov.sh @@ -17,4 +17,4 @@ export SIMTEST_STATIC_INIT_MOVE=$root_dir"/examples/move/basics" MSIM_WATCHDOG_TIMEOUT_MS=60000 MSIM_TEST_SEED=1 cargo llvm-cov --ignore-run-fail --lcov --output-path lcov-simtest.info nextest --cargo-profile simulator # remove the patch -git checkout .cargo/config Cargo.toml Cargo.lock +git checkout .cargo/config.toml Cargo.toml Cargo.lock diff --git a/scripts/simtest/config-patch b/scripts/simtest/config-patch index 655ca6ce64adc..ed77e318c0453 100644 --- a/scripts/simtest/config-patch +++ b/scripts/simtest/config-patch @@ -1,7 +1,7 @@ -diff --git a/.cargo/config b/.cargo/config +diff --git a/.cargo/config.toml b/.cargo/config.toml index ec2c459490..55985cbe9f 100644 ---- a/.cargo/config -+++ b/.cargo/config +--- a/.cargo/config.toml ++++ b/.cargo/config.toml @@ -25,4 +25,4 @@ move-clippy = [ ] diff --git a/scripts/update_graphql_e2e_test_protocol_version.py b/scripts/update_graphql_e2e_test_protocol_version.py index 228892f305399..1892c665daa58 100644 --- a/scripts/update_graphql_e2e_test_protocol_version.py +++ b/scripts/update_graphql_e2e_test_protocol_version.py @@ -65,4 +65,4 @@ def replace_protocol_version_in_repo(repo_path, old_version, new_version, yes_to new_version = input("Enter the new protocol version (YY): ") replace_protocol_version_in_repo(repo_path, old_version, new_version, args.yes_to_all, args.dry_run) if not args.dry_run: - print(f"Next step. Running `env UB=1 cargo nextest run --features pg_integration` in `crates/sui-graphql-e2e-tests` to update all the snapshots.") + print(f"Next step. Running `env UB=1 cargo nextest run` in `crates/sui-graphql-e2e-tests` to update all the snapshots.") diff --git a/sdk/create-dapp/CHANGELOG.md b/sdk/create-dapp/CHANGELOG.md index bf39f88b4f7af..04b26c5faa11e 100644 --- a/sdk/create-dapp/CHANGELOG.md +++ b/sdk/create-dapp/CHANGELOG.md @@ -1,5 +1,20 @@ # @mysten/create-dapp +## 0.3.21 + +### Patch Changes + +- @mysten/dapp-kit@0.14.21 + +## 0.3.20 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + - @mysten/dapp-kit@0.14.20 + ## 0.3.19 ### Patch Changes diff --git a/sdk/create-dapp/package.json b/sdk/create-dapp/package.json index e61f79dfd6315..7b7ddaa056afe 100644 --- a/sdk/create-dapp/package.json +++ b/sdk/create-dapp/package.json @@ -3,7 +3,7 @@ "author": "Mysten Labs ", "description": "A CLI for creating new Sui dApps", "homepage": "https://sdk.mystenlabs.com", - "version": "0.3.19", + "version": "0.3.21", "license": "Apache-2.0", "files": [ "CHANGELOG.md", diff --git a/sdk/dapp-kit/CHANGELOG.md b/sdk/dapp-kit/CHANGELOG.md index b6d83c8acb8f0..1af73803b0d5c 100644 --- a/sdk/dapp-kit/CHANGELOG.md +++ b/sdk/dapp-kit/CHANGELOG.md @@ -1,5 +1,22 @@ # @mysten/dapp-kit +## 0.14.21 + +### Patch Changes + +- Updated dependencies [0db770a] + - @mysten/zksend@0.11.3 + +## 0.14.20 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + - @mysten/wallet-standard@0.13.4 + - @mysten/zksend@0.11.2 + ## 0.14.19 ### Patch Changes diff --git a/sdk/dapp-kit/package.json b/sdk/dapp-kit/package.json index ba57d0fab7043..b8d0249d00dc2 100644 --- a/sdk/dapp-kit/package.json +++ b/sdk/dapp-kit/package.json @@ -3,7 +3,7 @@ "author": "Mysten Labs ", "description": "A collection of React hooks and components for interacting with the Sui blockchain and wallets.", "homepage": "https://sdk.mystenlabs.com/typescript", - "version": "0.14.19", + "version": "0.14.21", "license": "Apache-2.0", "files": [ "CHANGELOG.md", diff --git a/sdk/deepbook-v3/CHANGELOG.md b/sdk/deepbook-v3/CHANGELOG.md index b9edceb2dd748..07e1ad500200a 100644 --- a/sdk/deepbook-v3/CHANGELOG.md +++ b/sdk/deepbook-v3/CHANGELOG.md @@ -1,5 +1,19 @@ # @mysten/deepbook-v3 +## 0.6.0 + +### Minor Changes + +- ebe2ae8: Admin function updates, package constant updates + +## 0.5.1 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.5.0 ### Minor Changes diff --git a/sdk/deepbook-v3/package.json b/sdk/deepbook-v3/package.json index 9013f9cada6bd..ae442ee5ae5d7 100644 --- a/sdk/deepbook-v3/package.json +++ b/sdk/deepbook-v3/package.json @@ -2,7 +2,7 @@ "name": "@mysten/deepbook-v3", "author": "Mysten Labs ", "description": "Sui Deepbook SDK", - "version": "0.5.0", + "version": "0.6.0", "license": "Apache-2.0", "type": "commonjs", "main": "./dist/cjs/index.js", diff --git a/sdk/deepbook-v3/src/transactions/deepbookAdmin.ts b/sdk/deepbook-v3/src/transactions/deepbookAdmin.ts index 801f8f7bb9ce0..01750b0078fc8 100644 --- a/sdk/deepbook-v3/src/transactions/deepbookAdmin.ts +++ b/sdk/deepbook-v3/src/transactions/deepbookAdmin.ts @@ -1,12 +1,11 @@ // Copyright (c) Mysten Labs, Inc. // SPDX-License-Identifier: Apache-2.0 -import { coinWithBalance } from '@mysten/sui/transactions'; import type { Transaction } from '@mysten/sui/transactions'; import type { CreatePoolAdminParams } from '../types/index.js'; import type { DeepBookConfig } from '../utils/config.js'; -import { FLOAT_SCALAR, POOL_CREATION_FEE } from '../utils/config.js'; +import { FLOAT_SCALAR } from '../utils/config.js'; /** * DeepBookAdminContract class for managing admin actions. @@ -44,9 +43,7 @@ export class DeepBookAdminContract { params; const baseCoin = this.#config.getCoin(baseCoinKey); const quoteCoin = this.#config.getCoin(quoteCoinKey); - const deepCoinType = this.#config.getCoin('DEEP').type; - const creationFee = coinWithBalance({ type: deepCoinType, balance: POOL_CREATION_FEE }); const baseScalar = baseCoin.scalar; const quoteScalar = quoteCoin.scalar; @@ -61,7 +58,6 @@ export class DeepBookAdminContract { tx.pure.u64(adjustedTickSize), // adjusted tick_size tx.pure.u64(adjustedLotSize), // adjusted lot_size tx.pure.u64(adjustedMinSize), // adjusted min_size - creationFee, // 0x2::balance::Balance<0x2::sui::SUI> tx.pure.bool(whitelisted), tx.pure.bool(stablePool), tx.object(this.#adminCap()), @@ -81,7 +77,11 @@ export class DeepBookAdminContract { const quoteCoin = this.#config.getCoin(pool.quoteCoin); tx.moveCall({ target: `${this.#config.DEEPBOOK_PACKAGE_ID}::pool::unregister_pool_admin`, - arguments: [tx.object(this.#config.REGISTRY_ID), tx.object(this.#adminCap())], + arguments: [ + tx.object(pool.address), + tx.object(this.#config.REGISTRY_ID), + tx.object(this.#adminCap()), + ], typeArguments: [baseCoin.type, quoteCoin.type], }); }; @@ -137,4 +137,20 @@ export class DeepBookAdminContract { ], }); }; + + /** + * @description Sets the treasury address where pool creation fees will be sent + * @param {string} treasuryAddress The treasury address + * @returns A function that takes a Transaction object + */ + setTreasuryAddress = (treasuryAddress: string) => (tx: Transaction) => { + tx.moveCall({ + target: `${this.#config.DEEPBOOK_PACKAGE_ID}::registry::set_treasury_address`, + arguments: [ + tx.object(this.#config.REGISTRY_ID), + tx.pure.address(treasuryAddress), + tx.object(this.#adminCap()), + ], + }); + }; } diff --git a/sdk/deepbook-v3/src/utils/config.ts b/sdk/deepbook-v3/src/utils/config.ts index 750ef6d7e68e4..80c8dcc7f0773 100644 --- a/sdk/deepbook-v3/src/utils/config.ts +++ b/sdk/deepbook-v3/src/utils/config.ts @@ -15,7 +15,6 @@ import { } from './constants.js'; export const FLOAT_SCALAR = 1000000000; -export const POOL_CREATION_FEE = 10000 * 1000000; export const MAX_TIMESTAMP = 1844674407370955161n; export const GAS_BUDGET = 0.5 * 500000000; // Adjust based on benchmarking export const DEEP_SCALAR = 1000000; diff --git a/sdk/deepbook-v3/src/utils/constants.ts b/sdk/deepbook-v3/src/utils/constants.ts index 93a861bedd7be..a9ad88106e08e 100644 --- a/sdk/deepbook-v3/src/utils/constants.ts +++ b/sdk/deepbook-v3/src/utils/constants.ts @@ -12,8 +12,8 @@ export interface DeepbookPackageIds { } export const testnetPackageIds = { - DEEPBOOK_PACKAGE_ID: '0x103145af17de55d4e191374a2e971f4d5eef2205c7e789aed7aa6dfc0349fb3b', - REGISTRY_ID: '0x25f80b2d9ae3a2108e3ccbfafb458872261795e409949f82ab417e159d5e76e2', + DEEPBOOK_PACKAGE_ID: '0x48cc688a15bdda6017c730a3c65b30414e642d041f2931ef14e08f6b0b2a1b7f', + REGISTRY_ID: '0x60517d0cae6f1168c1ba94cb45c06191f9837ec695d0d465d594b2f6287534f0', DEEP_TREASURY_ID: '0x69fffdae0075f8f71f4fa793549c11079266910e8905169845af1f5d00e09dcb', } satisfies DeepbookPackageIds; @@ -58,6 +58,11 @@ export const mainnetCoins: CoinMap = { scalar: 1000000000, }, USDC: { + address: ``, + type: ``, + scalar: 0, + }, + WUSDC: { address: `0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf`, type: `0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN`, scalar: 1000000, @@ -67,7 +72,12 @@ export const mainnetCoins: CoinMap = { type: `0xaf8cd5edc19c4512f4259f0bee101a40d41ebed738ade5874359610ef8eeced5::coin::COIN`, scalar: 100000000, }, - USDT: { + WBTC: { + address: `0x027792d9fed7f9844eb4839566001bb6f6cb4804f66aa2da6fe1ee242d896881`, + type: `0x027792d9fed7f9844eb4839566001bb6f6cb4804f66aa2da6fe1ee242d896881::coin::COIN`, + scalar: 0, + }, + WUSDT: { address: `0xc060006111016b8a020ad5b33834984a437aaa7d3c74c18e09a95d48aceab08c`, type: `0xc060006111016b8a020ad5b33834984a437aaa7d3c74c18e09a95d48aceab08c::coin::COIN`, scalar: 1000000, @@ -76,22 +86,22 @@ export const mainnetCoins: CoinMap = { export const testnetPools: PoolMap = { DEEP_SUI: { - address: `0xd69b2abe6169964d320801e3a94c9f508274085246a25c44f15ba3fa51bed055`, + address: `0x877c81b2d760a91a405dc12974742ba2401dc1da727681d2de32c861f807ec28`, baseCoin: 'DEEP', quoteCoin: 'SUI', }, SUI_DBUSDC: { - address: `0xe82f5d1a08fa7aef1a49bdc8006b82d771724b47a7d3975e69e7bc645fcf6919`, + address: `0x966c99a5ce0ce3e09dacac0a42cc2b888d9e1a0c5f39b69f556c38f38ef0b81d`, baseCoin: 'SUI', quoteCoin: 'DBUSDC', }, DEEP_DBUSDC: { - address: `0x748341bbd25a1a40d3fe7fcbf1519d8b69869b7170003b2725917982e9443389`, + address: `0x6546ae819e61770c6070962157a2868c012c121ef4e6dd93e81e98900c61b799`, baseCoin: 'DEEP', quoteCoin: 'DBUSDC', }, DBUSDT_DBUSDC: { - address: `0xe4141fee12c3c4897d8fe6e0ed25734d702a84f03642d125ac3add9b155e82bd`, + address: `0x9c0a6ef057b5f81d9f2d95a38ad753b8b5b8f535f9c10bb3e7d7256296eb4e18`, baseCoin: 'DBUSDT', quoteCoin: 'DBUSDC', }, diff --git a/sdk/deepbook/CHANGELOG.md b/sdk/deepbook/CHANGELOG.md index 21dac5b61d897..c1a1b7a41cdef 100644 --- a/sdk/deepbook/CHANGELOG.md +++ b/sdk/deepbook/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/deepbook +## 0.8.18 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.8.17 ### Patch Changes diff --git a/sdk/deepbook/package.json b/sdk/deepbook/package.json index 39c23e283a8d5..9d1175246e219 100644 --- a/sdk/deepbook/package.json +++ b/sdk/deepbook/package.json @@ -2,7 +2,7 @@ "name": "@mysten/deepbook", "author": "Mysten Labs ", "description": "Sui Deepbook SDK", - "version": "0.8.17", + "version": "0.8.18", "license": "Apache-2.0", "type": "commonjs", "main": "./dist/cjs/index.js", diff --git a/sdk/docs/pages/dapp-kit/wallet-hooks/useSignTransaction.mdx b/sdk/docs/pages/dapp-kit/wallet-hooks/useSignTransaction.mdx index 19d65d2cd19a5..546968ebb1ab1 100644 --- a/sdk/docs/pages/dapp-kit/wallet-hooks/useSignTransaction.mdx +++ b/sdk/docs/pages/dapp-kit/wallet-hooks/useSignTransaction.mdx @@ -5,6 +5,7 @@ import { UseSignTransactionExample } from '../../../examples/wallet-hooks'; Use the `useSignTransaction` hook to prompt the user to sign a transaction with their wallet. ```ts +import { Transaction } from '@mysten/sui/transactions'; import { ConnectButton, useCurrentAccount, @@ -42,7 +43,7 @@ function MyComponent() { }); // Always report transaction effects to the wallet after execution - reportTransactionEffects(result.rawEffects!); + reportTransactionEffects(executeResult.rawEffects!); console.log(executeResult); }} diff --git a/sdk/docs/pages/typescript/executors.mdx b/sdk/docs/pages/typescript/executors.mdx index 05eeed173ea9c..bb5583b0ca965 100644 --- a/sdk/docs/pages/typescript/executors.mdx +++ b/sdk/docs/pages/typescript/executors.mdx @@ -47,8 +47,8 @@ const tx1 = new Transaction(); const [coin1] = tx1.splitCoins(tx1.gas, [1]); tx1.transferObjects([coin1], address1); const tx2 = new Transaction(); -const [coin2] = tx1.splitCoins(tx2.gas, [1]); -tx1.transferObjects([coin2], address2); +const [coin2] = tx2.splitCoins(tx2.gas, [1]); +tx2.transferObjects([coin2], address2); const [{ digest: digest1 }, { digest: digest2 }] = await Promise.all([ executor.executeTransaction(tx1), @@ -117,8 +117,8 @@ const tx1 = new Transaction(); const [coin1] = tx1.splitCoins(tx1.gas, [1]); tx1.transferObjects([coin1], address1); const tx2 = new Transaction(); -const [coin2] = tx1.splitCoins(tx2.gas, [1]); -tx1.transferObjects([coin2], address2); +const [coin2] = tx2.splitCoins(tx2.gas, [1]); +tx2.transferObjects([coin2], address2); const [{ digest: digest1 }, { digest: digest2 }] = await Promise.all([ executor.executeTransaction(tx1), diff --git a/sdk/docs/pages/typescript/sui-client.mdx b/sdk/docs/pages/typescript/sui-client.mdx index 95caae4da0407..0429aff6fd843 100644 --- a/sdk/docs/pages/typescript/sui-client.mdx +++ b/sdk/docs/pages/typescript/sui-client.mdx @@ -139,7 +139,8 @@ const result = await client.executeTransactionBlock({ - `transactionBlock` - either a Transaction or BCS serialized transaction data bytes as a Uint8Array or as a base-64 encoded string. -- `signer` - A `Keypair` instance to sign the transaction +- `signature` - A signature, or list of signatures committed to the intent message of the + transaction data, as a base-64 encoded string. - `requestType`: `WaitForEffectsCert` or `WaitForLocalExecution`. Determines when the RPC node should return the response. Default to be `WaitForLocalExecution` - `options`: @@ -170,8 +171,7 @@ const result = await client.signAndExecuteTransaction({ - `transaction` - BCS serialized transaction data bytes as a Uint8Array or as a base-64 encoded string. -- `signature` - A signature, or list of signatures committed to the intent message of the - transaction data, as a base-64 encoded string. +- `signer` - A `Keypair` instance to sign the transaction - `requestType`: `WaitForEffectsCert` or `WaitForLocalExecution`. Determines when the RPC node should return the response. Default to be `WaitForLocalExecution` - `options`: diff --git a/sdk/docs/pages/typescript/zklogin.mdx b/sdk/docs/pages/typescript/zklogin.mdx index 1a1891ec93e47..1ef30fbd48e36 100644 --- a/sdk/docs/pages/typescript/zklogin.mdx +++ b/sdk/docs/pages/typescript/zklogin.mdx @@ -12,7 +12,7 @@ signatures and compute zkLogin addresses. To parse a serialized zkLogin signature ```typescript -import { parseZkLoginSignature } '@mysten/sui/zklogin'; +import { parseZkLoginSignature } from '@mysten/sui/zklogin'; const parsedSignature = await parseZkLoginSignature('BQNNMTY4NjAxMzAyO....'); ``` @@ -20,7 +20,7 @@ const parsedSignature = await parseZkLoginSignature('BQNNMTY4NjAxMzAyO....'); Use `getZkLoginSignature` to serialize a zkLogin signature. ```typescript -import { getZkLoginSignature } '@mysten/sui/zklogin'; +import { getZkLoginSignature } from '@mysten/sui/zklogin'; const serializedSignature = await getZkLoginSignature({ inputs, maxEpoch, userSignature }); ``` @@ -28,7 +28,7 @@ const serializedSignature = await getZkLoginSignature({ inputs, maxEpoch, userSi To compute the address for a given address seed and iss you can use `computeZkLoginAddressFromSeed` ```typescript -import { computeZkLoginAddressFromSeed } '@mysten/sui/zklogin'; +import { computeZkLoginAddressFromSeed } from '@mysten/sui/zklogin'; const address = await computeZkLoginAddressFromSeed(0n, 'https://accounts.google.com'); ``` diff --git a/sdk/enoki/CHANGELOG.md b/sdk/enoki/CHANGELOG.md index ae9f8d123fa10..d769926b3bf69 100644 --- a/sdk/enoki/CHANGELOG.md +++ b/sdk/enoki/CHANGELOG.md @@ -1,5 +1,14 @@ # @mysten/enoki +## 0.4.2 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + - @mysten/zklogin@0.7.19 + ## 0.4.1 ### Patch Changes diff --git a/sdk/enoki/package.json b/sdk/enoki/package.json index 299c302a53840..a7bc8770aba1e 100644 --- a/sdk/enoki/package.json +++ b/sdk/enoki/package.json @@ -1,6 +1,6 @@ { "name": "@mysten/enoki", - "version": "0.4.1", + "version": "0.4.2", "description": "TODO: Description", "license": "Apache-2.0", "author": "Mysten Labs ", diff --git a/sdk/graphql-transport/CHANGELOG.md b/sdk/graphql-transport/CHANGELOG.md index 63ba4a24502aa..fa2fd7fc4575b 100644 --- a/sdk/graphql-transport/CHANGELOG.md +++ b/sdk/graphql-transport/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/graphql-transport +## 0.2.18 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.2.17 ### Patch Changes diff --git a/sdk/graphql-transport/package.json b/sdk/graphql-transport/package.json index 53b138b4a679b..68dc3e4320960 100644 --- a/sdk/graphql-transport/package.json +++ b/sdk/graphql-transport/package.json @@ -1,6 +1,6 @@ { "name": "@mysten/graphql-transport", - "version": "0.2.17", + "version": "0.2.18", "description": "A GraphQL transport to allow SuiClient to work with RPC 2.0", "license": "Apache-2.0", "author": "Mysten Labs ", diff --git a/sdk/graphql-transport/src/methods.ts b/sdk/graphql-transport/src/methods.ts index 4525d68762217..011e965897b84 100644 --- a/sdk/graphql-transport/src/methods.ts +++ b/sdk/graphql-transport/src/methods.ts @@ -15,6 +15,7 @@ import { normalizeStructTag, normalizeSuiAddress, parseStructTag } from '@mysten import type { ObjectFilter, QueryEventsQueryVariables, + QueryTransactionBlocksQueryVariables, Rpc_Checkpoint_FieldsFragment, Rpc_Transaction_FieldsFragment, } from './generated/queries.js'; @@ -580,7 +581,7 @@ export const RPC_METHODS: { })); }, async queryTransactionBlocks(transport, [{ filter, options }, cursor, limit = 20, descending]) { - const pagination = descending + const pagination: Partial = descending ? { last: limit, before: cursor, diff --git a/sdk/kiosk/CHANGELOG.md b/sdk/kiosk/CHANGELOG.md index 92688a390a8b3..992397fc194b7 100644 --- a/sdk/kiosk/CHANGELOG.md +++ b/sdk/kiosk/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/kiosk +## 0.9.18 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.9.17 ### Patch Changes diff --git a/sdk/kiosk/package.json b/sdk/kiosk/package.json index 5afa44994c182..57e298ee922d8 100644 --- a/sdk/kiosk/package.json +++ b/sdk/kiosk/package.json @@ -2,7 +2,7 @@ "name": "@mysten/kiosk", "author": "Mysten Labs ", "description": "Sui Kiosk library", - "version": "0.9.17", + "version": "0.9.18", "license": "Apache-2.0", "type": "commonjs", "main": "./dist/cjs/index.js", diff --git a/sdk/suins-toolkit/CHANGELOG.md b/sdk/suins-toolkit/CHANGELOG.md index 83644873967d0..09c034761eaa1 100644 --- a/sdk/suins-toolkit/CHANGELOG.md +++ b/sdk/suins-toolkit/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/suins-toolkit +## 0.5.18 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.5.17 ### Patch Changes diff --git a/sdk/suins-toolkit/package.json b/sdk/suins-toolkit/package.json index 21ba201e53c8a..260a1fd686ed5 100644 --- a/sdk/suins-toolkit/package.json +++ b/sdk/suins-toolkit/package.json @@ -2,7 +2,7 @@ "name": "@mysten/suins-toolkit", "author": "Mysten Labs ", "description": "SuiNS TypeScript SDK", - "version": "0.5.17", + "version": "0.5.18", "license": "Apache-2.0", "type": "commonjs", "main": "./dist/cjs/index.js", diff --git a/sdk/typescript/CHANGELOG.md b/sdk/typescript/CHANGELOG.md index c960c14e6123a..34ab44e579cfb 100644 --- a/sdk/typescript/CHANGELOG.md +++ b/sdk/typescript/CHANGELOG.md @@ -1,5 +1,12 @@ # @mysten/sui.js +## 1.9.0 + +### Minor Changes + +- 2c96b06: Adds experimental named packages plugin +- 1fd22cc: Require name to register global transaction plugins + ## 1.8.0 ### Minor Changes diff --git a/sdk/typescript/package.json b/sdk/typescript/package.json index d135c3d72a43d..344f45a949b17 100644 --- a/sdk/typescript/package.json +++ b/sdk/typescript/package.json @@ -3,7 +3,7 @@ "author": "Mysten Labs ", "description": "Sui TypeScript API(Work in Progress)", "homepage": "https://sdk.mystenlabs.com", - "version": "1.8.0", + "version": "1.9.0", "license": "Apache-2.0", "sideEffects": false, "files": [ diff --git a/sdk/typescript/src/transactions/Transaction.ts b/sdk/typescript/src/transactions/Transaction.ts index 30a7723beeb01..ee599f4d4b77b 100644 --- a/sdk/typescript/src/transactions/Transaction.ts +++ b/sdk/typescript/src/transactions/Transaction.ts @@ -99,19 +99,23 @@ export function isTransaction(obj: unknown): obj is Transaction { export type TransactionObjectInput = string | CallArg | TransactionObjectArgument; -const modulePluginRegistry = { - buildPlugins: [] as TransactionPlugin[], - serializationPlugins: [] as TransactionPlugin[], +interface TransactionPluginRegistry { + // eslint-disable-next-line @typescript-eslint/ban-types + buildPlugins: Map; + // eslint-disable-next-line @typescript-eslint/ban-types + serializationPlugins: Map; +} + +const modulePluginRegistry: TransactionPluginRegistry = { + buildPlugins: new Map(), + serializationPlugins: new Map(), }; const TRANSACTION_REGISTRY_KEY = Symbol.for('@mysten/transaction/registry'); function getGlobalPluginRegistry() { try { const target = globalThis as { - [TRANSACTION_REGISTRY_KEY]?: { - buildPlugins: TransactionPlugin[]; - serializationPlugins: TransactionPlugin[]; - }; + [TRANSACTION_REGISTRY_KEY]?: TransactionPluginRegistry; }; if (!target[TRANSACTION_REGISTRY_KEY]) { @@ -168,12 +172,38 @@ export class Transaction { return newTransaction; } - static registerGlobalSerializationPlugin(step: TransactionPlugin) { - getGlobalPluginRegistry().serializationPlugins.push(step); + /** @deprecated global plugins should be registered with a name */ + static registerGlobalSerializationPlugin(step: TransactionPlugin): void; + static registerGlobalSerializationPlugin(name: string, step: TransactionPlugin): void; + static registerGlobalSerializationPlugin( + stepOrStep: TransactionPlugin | string, + step?: TransactionPlugin, + ) { + getGlobalPluginRegistry().serializationPlugins.set( + stepOrStep, + step ?? (stepOrStep as TransactionPlugin), + ); + } + + static unregisterGlobalSerializationPlugin(name: string) { + getGlobalPluginRegistry().serializationPlugins.delete(name); + } + + /** @deprecated global plugins should be registered with a name */ + static registerGlobalBuildPlugin(step: TransactionPlugin): void; + static registerGlobalBuildPlugin(name: string, step: TransactionPlugin): void; + static registerGlobalBuildPlugin( + stepOrStep: TransactionPlugin | string, + step?: TransactionPlugin, + ) { + getGlobalPluginRegistry().buildPlugins.set( + stepOrStep, + step ?? (stepOrStep as TransactionPlugin), + ); } - static registerGlobalBuildPlugin(step: TransactionPlugin) { - getGlobalPluginRegistry().buildPlugins.push(step); + static unregisterGlobalBuildPlugin(name: string) { + getGlobalPluginRegistry().buildPlugins.delete(name); } addSerializationPlugin(step: TransactionPlugin) { @@ -277,8 +307,8 @@ export class Transaction { constructor() { const globalPlugins = getGlobalPluginRegistry(); this.#data = new TransactionDataBuilder(); - this.#buildPlugins = [...globalPlugins.buildPlugins]; - this.#serializationPlugins = [...globalPlugins.serializationPlugins]; + this.#buildPlugins = [...globalPlugins.buildPlugins.values()]; + this.#serializationPlugins = [...globalPlugins.serializationPlugins.values()]; } /** Returns an argument for the gas coin, to be used in a transaction. */ diff --git a/sdk/typescript/src/transactions/index.ts b/sdk/typescript/src/transactions/index.ts index 92dae65381206..26699bdeee1cf 100644 --- a/sdk/typescript/src/transactions/index.ts +++ b/sdk/typescript/src/transactions/index.ts @@ -47,3 +47,10 @@ export type { } from './json-rpc-resolver.js'; export { Arguments } from './Arguments.js'; + +export { + namedPackagesPlugin, + type NamedPackagesPluginOptions, +} from './plugins/NamedPackagesPlugin.js'; + +export { type NamedPackagesPluginCache } from './plugins/utils.js'; diff --git a/sdk/typescript/src/transactions/plugins/NamedPackagesPlugin.ts b/sdk/typescript/src/transactions/plugins/NamedPackagesPlugin.ts new file mode 100644 index 0000000000000..eb6d0e2841daa --- /dev/null +++ b/sdk/typescript/src/transactions/plugins/NamedPackagesPlugin.ts @@ -0,0 +1,129 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +import type { SuiGraphQLClient } from '../../graphql/client.js'; +import type { BuildTransactionOptions } from '../json-rpc-resolver.js'; +import type { TransactionDataBuilder } from '../TransactionData.js'; +import type { NamedPackagesPluginCache, NameResolutionRequest } from './utils.js'; +import { findTransactionBlockNames, listToRequests, replaceNames } from './utils.js'; + +export type NamedPackagesPluginOptions = { + /** + * The SuiGraphQLClient to use for resolving names. + * The endpoint should be the GraphQL endpoint of the network you are targeting. + * For non-mainnet networks, if the plugin doesn't work as expected, you need to validate that the + * RPC provider has support for the `packageByName` and `typeByName` queries (using external resolver). + */ + suiGraphQLClient: SuiGraphQLClient; + /** + * The number of names to resolve in each batch request. + * Needs to be calculated based on the GraphQL query limits. + */ + pageSize?: number; + /** + * Local overrides for the resolution plugin. Pass this to pre-populate + * the cache with known packages / types (especially useful for local or CI testing). + * + * Expected format example: + * { + * packages: { + * 'std@framework': '0x1234', + * }, + * types: { + * 'std@framework::string::String': '0x1234::string::String', + * }, + * } + * + */ + overrides?: NamedPackagesPluginCache; +}; + +/** + * @experimental This plugin is in experimental phase and there might be breaking changes in the future + * + * Adds named resolution so that you can use .move names in your transactions. + * e.g. `app@org::type::Type` will be resolved to `0x1234::type::Type`. + * This plugin will resolve all names & types in the transaction block. + * + * To install this plugin globally in your app, use: + * ``` + * Transaction.registerGlobalSerializationPlugin("namedPackagesPlugin", namedPackagesPlugin({ suiGraphQLClient })); + * ``` + * + * You can also define `overrides` to pre-populate name resolutions locally (removes the GraphQL request). + */ +export const namedPackagesPlugin = ({ + suiGraphQLClient, + pageSize = 10, + overrides = { packages: {}, types: {} }, +}: NamedPackagesPluginOptions) => { + const cache = { + packages: { ...overrides.packages }, + types: { ...overrides.types }, + }; + + return async ( + transactionData: TransactionDataBuilder, + _buildOptions: BuildTransactionOptions, + next: () => Promise, + ) => { + const names = findTransactionBlockNames(transactionData); + const batches = listToRequests( + { + packages: names.packages.filter((x) => !cache.packages[x]), + types: names.types.filter((x) => !cache.types[x]), + }, + pageSize, + ); + + // now we need to bulk resolve all the names + types, and replace them in the transaction data. + (await Promise.all(batches.map((batch) => query(suiGraphQLClient, batch)))).forEach((res) => { + Object.assign(cache.types, res.types); + Object.assign(cache.packages, res.packages); + }); + + replaceNames(transactionData, cache); + + await next(); + }; + + async function query(client: SuiGraphQLClient, requests: NameResolutionRequest[]) { + const results: NamedPackagesPluginCache = { packages: {}, types: {} }; + // avoid making a request if there are no names to resolve. + if (requests.length === 0) return results; + + // Create multiple queries for each name / type we need to resolve + // TODO: Replace with bulk APIs when available. + const gqlQuery = `{ + ${requests.map((req) => { + const request = req.type === 'package' ? 'packageByName' : 'typeByName'; + const fields = req.type === 'package' ? 'address' : 'repr'; + + return `${gqlQueryKey(req.id)}: ${request}(name:"${req.name}") { + ${fields} + }`; + })} + }`; + + const result = await client.query({ + query: gqlQuery, + variables: undefined, + }); + + if (result.errors) throw new Error(JSON.stringify({ query: gqlQuery, errors: result.errors })); + + // Parse the results and create a map of ` -> ` + for (const req of requests) { + const key = gqlQueryKey(req.id); + if (!result.data || !result.data[key]) throw new Error(`No result found for: ${req.name}`); + const data = result.data[key] as { address?: string; repr?: string }; + + if (req.type === 'package') results.packages[req.name] = data.address!; + if (req.type === 'moveType') results.types[req.name] = data.repr!; + } + + return results; + } +}; + +const gqlQueryKey = (idx: number) => `key_${idx}`; diff --git a/sdk/typescript/src/transactions/plugins/utils.ts b/sdk/typescript/src/transactions/plugins/utils.ts new file mode 100644 index 0000000000000..cc08781cfb9d2 --- /dev/null +++ b/sdk/typescript/src/transactions/plugins/utils.ts @@ -0,0 +1,118 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +import { isValidNamedPackage, isValidNamedType } from '../../utils/move-registry.js'; +import type { TransactionDataBuilder } from '../TransactionData.js'; + +export type NamedPackagesPluginCache = { + packages: Record; + types: Record; +}; + +const NAME_SEPARATOR = '@'; + +export type NameResolutionRequest = { + id: number; + type: 'package' | 'moveType'; + name: string; +}; + +/** + * Looks up all `.move` names in a transaction block. + * Returns a list of all the names found. + */ +export const findTransactionBlockNames = ( + builder: TransactionDataBuilder, +): { packages: string[]; types: string[] } => { + const packages: Set = new Set(); + const types: Set = new Set(); + + for (const command of builder.commands) { + if (!('MoveCall' in command)) continue; + const tx = command.MoveCall; + + if (!tx) continue; + + const pkg = tx.package.split('::')[0]; + if (pkg.includes(NAME_SEPARATOR)) { + if (!isValidNamedPackage(pkg)) throw new Error(`Invalid package name: ${pkg}`); + packages.add(pkg); + } + + for (const type of tx.typeArguments ?? []) { + if (type.includes(NAME_SEPARATOR)) { + if (!isValidNamedType(type)) throw new Error(`Invalid type with names: ${type}`); + types.add(type); + } + } + } + + return { + packages: [...packages], + types: [...types], + }; +}; + +/** + * Replace all names & types in a transaction block + * with their resolved names/types. + */ +export const replaceNames = (builder: TransactionDataBuilder, cache: NamedPackagesPluginCache) => { + for (const command of builder.commands) { + const tx = command.MoveCall; + if (!tx) continue; + + const nameParts = tx.package.split('::'); + const name = nameParts[0]; + + if (name.includes(NAME_SEPARATOR) && !cache.packages[name]) + throw new Error(`No address found for package: ${name}`); + + nameParts[0] = cache.packages[name]; + tx.package = nameParts.join('::'); + + const types = tx.typeArguments; + if (!types) continue; + + for (let i = 0; i < types.length; i++) { + if (!types[i].includes(NAME_SEPARATOR)) continue; + + if (!cache.types[types[i]]) throw new Error(`No resolution found for type: ${types[i]}`); + types[i] = cache.types[types[i]]; + } + + tx.typeArguments = types; + } +}; + +export const listToRequests = ( + names: { packages: string[]; types: string[] }, + batchSize: number, +): NameResolutionRequest[][] => { + const results: NameResolutionRequest[] = []; + const uniqueNames = deduplicate(names.packages); + const uniqueTypes = deduplicate(names.types); + + for (const [idx, name] of uniqueNames.entries()) { + results.push({ id: idx, type: 'package', name } as NameResolutionRequest); + } + for (const [idx, type] of uniqueTypes.entries()) { + results.push({ + id: idx + uniqueNames.length, + type: 'moveType', + name: type, + } as NameResolutionRequest); + } + + return batch(results, batchSize); +}; + +const deduplicate = (arr: T[]): T[] => [...new Set(arr)]; + +const batch = (arr: T[], size: number): T[][] => { + const batches = []; + for (let i = 0; i < arr.length; i += size) { + batches.push(arr.slice(i, i + size)); + } + return batches; +}; diff --git a/sdk/typescript/src/utils/index.ts b/sdk/typescript/src/utils/index.ts index 8912ee344abd2..aa6bf138653fa 100644 --- a/sdk/typescript/src/utils/index.ts +++ b/sdk/typescript/src/utils/index.ts @@ -27,3 +27,5 @@ export { SUI_TYPE_ARG, SUI_SYSTEM_STATE_OBJECT_ID, } from './constants.js'; + +export { isValidNamedPackage, isValidNamedType } from './move-registry.js'; diff --git a/sdk/typescript/src/utils/move-registry.ts b/sdk/typescript/src/utils/move-registry.ts new file mode 100644 index 0000000000000..c374ef695a3a6 --- /dev/null +++ b/sdk/typescript/src/utils/move-registry.ts @@ -0,0 +1,24 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +/** The pattern to find an optionally versioned name */ +const NAME_PATTERN = /^([a-z0-9]+(?:-[a-z0-9]+)*)@([a-z0-9]+(?:-[a-z0-9]+)*)(?:\/v(\d+))?$/; + +export const isValidNamedPackage = (name: string): boolean => { + return NAME_PATTERN.test(name); +}; + +/** + * Checks if a type contains valid named packages. + * This DOES NOT check if the type is a valid Move type. + */ +export const isValidNamedType = (type: string): boolean => { + // split our type by all possible type delimeters. + const splitType = type.split(/::|<|>|,/); + for (const t of splitType) { + if (t.includes('@') && !isValidNamedPackage(t)) return false; + } + // TODO: Add `isValidStructTag` check once + // it's generally introduced. + return true; +}; diff --git a/sdk/typescript/src/version.ts b/sdk/typescript/src/version.ts index 1da0d1b6b7ba5..c8cd520ecb877 100644 --- a/sdk/typescript/src/version.ts +++ b/sdk/typescript/src/version.ts @@ -3,5 +3,5 @@ // This file is generated by genversion.mjs. Do not edit it directly. -export const PACKAGE_VERSION = '1.8.0'; +export const PACKAGE_VERSION = '1.9.0'; export const TARGETED_RPC_VERSION = '1.34.0'; diff --git a/sdk/typescript/test/e2e/named-packages-plugin.test.ts b/sdk/typescript/test/e2e/named-packages-plugin.test.ts new file mode 100644 index 0000000000000..c2c18c02d4dbb --- /dev/null +++ b/sdk/typescript/test/e2e/named-packages-plugin.test.ts @@ -0,0 +1,64 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, expect, it } from 'vitest'; + +import { SuiGraphQLClient } from '../../src/graphql/client'; +import { namedPackagesPlugin, Transaction } from '../../src/transactions'; +import { normalizeSuiAddress } from '../../src/utils'; + +Transaction.registerGlobalSerializationPlugin( + 'namedPackagesPlugin', + namedPackagesPlugin({ + suiGraphQLClient: new SuiGraphQLClient({ + url: 'http://127.0.0.1:9125', + }), + overrides: { + packages: { + 'std@framework': '0x1', + 'std@framework/v1': '0x1', + }, + types: { + 'std@framework::string::String': '0x1::string::String', + 'std@framework::vector::empty': + '0x1::vector::empty<0x1::string::String>', + }, + }, + }), +); + +describe('Name Resolution Plugin (.move)', () => { + it('Should replace names in a given PTB', async () => { + const transaction = new Transaction(); + + // replace .move names properly + transaction.moveCall({ + target: 'std@framework::string::utf8', + arguments: [transaction.pure.string('Hello, world!')], + }); + + // replace type args properly + transaction.moveCall({ + target: 'std@framework::vector::empty', + typeArguments: ['std@framework::string::String'], + }); + + // replace nested type args properly + transaction.moveCall({ + target: 'std@framework/v1::vector::empty', + typeArguments: ['std@framework::vector::empty'], + }); + + const json = JSON.parse(await transaction.toJSON()); + + expect(json.commands[0].MoveCall.package).toBe(normalizeSuiAddress('0x1')); + expect(json.commands[1].MoveCall.typeArguments[0]).toBe(`0x1::string::String`); + expect(json.commands[2].MoveCall.package).toBe(normalizeSuiAddress('0x1')); + expect(json.commands[2].MoveCall.typeArguments[0]).toBe( + `0x1::vector::empty<0x1::string::String>`, + ); + }); + + // TODO: Add some tests utilizing live GraphQL Queries (mainnet / testnet), + // not just overrides. +}); diff --git a/sdk/typescript/test/unit/utils/move-registry.test.ts b/sdk/typescript/test/unit/utils/move-registry.test.ts new file mode 100644 index 0000000000000..51d4ad3cbe0a8 --- /dev/null +++ b/sdk/typescript/test/unit/utils/move-registry.test.ts @@ -0,0 +1,54 @@ +// Copyright (c) Mysten Labs, Inc. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, expect, test } from 'vitest'; + +import { isValidNamedPackage, isValidNamedType } from '../../../src/utils'; + +describe('isValidNamedPackage', () => { + test('Valid/Invalid .move names', () => { + expect(isValidNamedPackage('app@org')).toBe(true); + expect(isValidNamedPackage('app@org/v1')).toBe(true); + expect(isValidNamedPackage('app@org/v123')).toBe(true); + expect(isValidNamedPackage('app-test@org/v1')).toBe(true); + expect(isValidNamedPackage('1app@org/v1')).toBe(true); + expect(isValidNamedPackage('1-app@org/v1')).toBe(true); + expect(isValidNamedPackage('test@o-rg/v1')).toBe(true); + + // failed scenarios. + expect(isValidNamedPackage('app.test@org/v123')).toBe(false); + expect(isValidNamedPackage('app@org/v')).toBe(false); + expect(isValidNamedPackage('app@org/v1/')).toBe(false); + expect(isValidNamedPackage('app@org/v1.')).toBe(false); + expect(isValidNamedPackage('app@org/v1.2')).toBe(false); + expect(isValidNamedPackage('app@org/v1.2.3')).toBe(false); + expect(isValidNamedPackage('@org/v1')).toBe(false); + expect(isValidNamedPackage('-org/v1')).toBe(false); + expect(isValidNamedPackage('.org/v1')).toBe(false); + expect(isValidNamedPackage('org/v1')).toBe(false); + expect(isValidNamedPackage('-test@1org/v1')).toBe(false); + expect(isValidNamedPackage('test@-org/v1')).toBe(false); + expect(isValidNamedPackage('test@o--rg/v1')).toBe(false); + expect(isValidNamedPackage('test@o-rg/v1-')).toBe(false); + expect(isValidNamedPackage('test@o-rg/v1@')).toBe(false); + expect(isValidNamedPackage('tes--t@org/v1')).toBe(false); + }); + + test('Valid/Invalid .move types', () => { + expect(isValidNamedType('app@org::string::String')).toBe(true); + expect(isValidNamedType('app@org/v1::string::String')).toBe(true); + expect(isValidNamedType('app@org/v123::string::String')).toBe(true); + expect(isValidNamedType('app-test@org/v1::string::String')).toBe(true); + expect(isValidNamedType('1app@org/v1::string::String')).toBe(true); + expect(isValidNamedType('1-app@org/v1::string::String')).toBe(true); + + // failed scenarios. + expect(isValidNamedType('app.test@org/v123::string::String')).toBe(false); + expect(isValidNamedType('app@org/v::string::String')).toBe(false); + expect(isValidNamedType('app@org/v1/::string::String')).toBe(false); + expect(isValidNamedType('app@org/v1.::string::String')).toBe(false); + expect(isValidNamedType('app@org/v1.2::string::String')).toBe(false); + expect(isValidNamedType('--app@org::string::String')).toBe(false); + expect(isValidNamedType('ap--p@org::string::String')).toBe(false); + }); +}); diff --git a/sdk/wallet-standard/CHANGELOG.md b/sdk/wallet-standard/CHANGELOG.md index 8259b66bc435e..dea2f51549338 100644 --- a/sdk/wallet-standard/CHANGELOG.md +++ b/sdk/wallet-standard/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/wallet-standard +## 0.13.4 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.13.3 ### Patch Changes diff --git a/sdk/wallet-standard/package.json b/sdk/wallet-standard/package.json index 113f17ea4b65d..847a9d0dbfeed 100644 --- a/sdk/wallet-standard/package.json +++ b/sdk/wallet-standard/package.json @@ -1,6 +1,6 @@ { "name": "@mysten/wallet-standard", - "version": "0.13.3", + "version": "0.13.4", "description": "A suite of standard utilities for implementing wallets based on the Wallet Standard.", "license": "Apache-2.0", "author": "Mysten Labs ", diff --git a/sdk/zklogin/CHANGELOG.md b/sdk/zklogin/CHANGELOG.md index 6621b4e04d6e3..785be11d28536 100644 --- a/sdk/zklogin/CHANGELOG.md +++ b/sdk/zklogin/CHANGELOG.md @@ -1,5 +1,13 @@ # @mysten/zklogin +## 0.7.19 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + ## 0.7.18 ### Patch Changes diff --git a/sdk/zklogin/package.json b/sdk/zklogin/package.json index b39b3afaf539a..c05f010f35975 100644 --- a/sdk/zklogin/package.json +++ b/sdk/zklogin/package.json @@ -1,6 +1,6 @@ { "name": "@mysten/zklogin", - "version": "0.7.18", + "version": "0.7.19", "description": "Utilities for interacting with zkLogin in Sui", "license": "Apache-2.0", "author": "Mysten Labs ", diff --git a/sdk/zksend/CHANGELOG.md b/sdk/zksend/CHANGELOG.md index 511a5de452b16..d59ecfa1fecd1 100644 --- a/sdk/zksend/CHANGELOG.md +++ b/sdk/zksend/CHANGELOG.md @@ -1,5 +1,20 @@ # @mysten/zksend +## 0.11.3 + +### Patch Changes + +- 0db770a: Check transaction status when creating and claiming transactions + +## 0.11.2 + +### Patch Changes + +- Updated dependencies [2c96b06] +- Updated dependencies [1fd22cc] + - @mysten/sui@1.9.0 + - @mysten/wallet-standard@0.13.4 + ## 0.11.1 ### Patch Changes diff --git a/sdk/zksend/package.json b/sdk/zksend/package.json index 89da28fbd83d1..4879e88c5831c 100644 --- a/sdk/zksend/package.json +++ b/sdk/zksend/package.json @@ -1,6 +1,6 @@ { "name": "@mysten/zksend", - "version": "0.11.1", + "version": "0.11.3", "description": "TODO: Write Description", "license": "Apache-2.0", "author": "Mysten Labs ", diff --git a/sdk/zksend/src/links/builder.ts b/sdk/zksend/src/links/builder.ts index 39991e4c55a32..d1c10feb40e12 100644 --- a/sdk/zksend/src/links/builder.ts +++ b/sdk/zksend/src/links/builder.ts @@ -136,8 +136,15 @@ export class ZkSendLinkBuilder { const result = await this.#client.signAndExecuteTransaction({ transaction: await tx.build({ client: this.#client }), signer, + options: { + showEffects: true, + }, }); + if (result.effects?.status.status !== 'success') { + throw new Error(`Transaction failed: ${result.effects?.status.error ?? 'Unknown error'}`); + } + if (options.waitForTransaction) { await this.#client.waitForTransaction({ digest: result.digest }); } diff --git a/sdk/zksend/src/links/claim.ts b/sdk/zksend/src/links/claim.ts index e8545e8302b7d..d80d6739bcfb2 100644 --- a/sdk/zksend/src/links/claim.ts +++ b/sdk/zksend/src/links/claim.ts @@ -240,7 +240,18 @@ export class ZkSendLink { const { digest } = await this.#executeSponsoredTransaction(sponsored, signature); - return this.#client.waitForTransaction({ digest }); + const result = await this.#client.waitForTransaction({ + digest, + options: { showEffects: true }, + }); + + if (result.effects?.status.status !== 'success') { + throw new Error( + `Claim transaction failed: ${result.effects?.status.error ?? 'Unknown error'}`, + ); + } + + return result; } createClaimTransaction( diff --git a/sui-execution/latest/sui-adapter/src/adapter.rs b/sui-execution/latest/sui-adapter/src/adapter.rs index a752f280a7840..c1856234639c7 100644 --- a/sui-execution/latest/sui-adapter/src/adapter.rs +++ b/sui-execution/latest/sui-adapter/src/adapter.rs @@ -72,6 +72,7 @@ mod checked { binary_config: to_binary_config(protocol_config), rethrow_serialization_type_layout_errors: protocol_config .rethrow_serialization_type_layout_errors(), + max_type_to_layout_nodes: protocol_config.max_type_to_layout_nodes_as_option(), }, ) .map_err(|_| SuiError::ExecutionInvariantViolation) diff --git a/sui-execution/latest/sui-move-natives/src/crypto/groth16.rs b/sui-execution/latest/sui-move-natives/src/crypto/groth16.rs index 20948d44ae02e..f1df54dfd9085 100644 --- a/sui-execution/latest/sui-move-natives/src/crypto/groth16.rs +++ b/sui-execution/latest/sui-move-natives/src/crypto/groth16.rs @@ -169,8 +169,8 @@ pub fn verify_groth16_proof_internal( .groth16_verify_groth16_proof_internal_bls12381_cost_base, groth16_verify_groth16_proof_internal_cost_params .groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input, - (public_proof_inputs.len() + fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE - 1) - / fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE, + (public_proof_inputs.len() + fastcrypto::groups::bls12381::SCALAR_LENGTH - 1) + / fastcrypto::groups::bls12381::SCALAR_LENGTH, ), BN254 => ( groth16_verify_groth16_proof_internal_cost_params @@ -202,7 +202,7 @@ pub fn verify_groth16_proof_internal( let result; if curve == BLS12381 { if public_proof_inputs.len() - > fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE * MAX_PUBLIC_INPUTS + > fastcrypto::groups::bls12381::SCALAR_LENGTH * MAX_PUBLIC_INPUTS { return Ok(NativeResult::err(cost, TOO_MANY_PUBLIC_INPUTS)); } diff --git a/sui-execution/latest/sui-move-natives/src/test_scenario.rs b/sui-execution/latest/sui-move-natives/src/test_scenario.rs index 8ce8e2812ba88..e536c051a68b9 100644 --- a/sui-execution/latest/sui-move-natives/src/test_scenario.rs +++ b/sui-execution/latest/sui-move-natives/src/test_scenario.rs @@ -25,8 +25,9 @@ use move_vm_types::{ use smallvec::smallvec; use std::{ borrow::Borrow, + cell::RefCell, collections::{BTreeMap, BTreeSet, VecDeque}, - sync::RwLock, + thread::LocalKey, }; use sui_types::{ base_types::{ObjectID, SequenceNumber, SuiAddress}, @@ -54,7 +55,7 @@ type Set = IndexSet; /// allows this to be used by both the object runtime (for reading) and the test scenario (for /// writing) while hiding mutability. #[derive(Tid)] -pub struct InMemoryTestStore(pub &'static RwLock); +pub struct InMemoryTestStore(pub &'static LocalKey>); impl ChildObjectResolver for InMemoryTestStore { fn read_child_object( @@ -63,10 +64,8 @@ impl ChildObjectResolver for InMemoryTestStore { child: &ObjectID, child_version_upper_bound: SequenceNumber, ) -> sui_types::error::SuiResult> { - self.0 - .read() - .unwrap() - .read_child_object(parent, child, child_version_upper_bound) + let l: &'static LocalKey> = self.0; + l.with_borrow(|store| store.read_child_object(parent, child, child_version_upper_bound)) } fn get_object_received_at_version( @@ -76,12 +75,14 @@ impl ChildObjectResolver for InMemoryTestStore { receive_object_at_version: SequenceNumber, epoch_id: sui_types::committee::EpochId, ) -> sui_types::error::SuiResult> { - self.0.read().unwrap().get_object_received_at_version( - owner, - receiving_object_id, - receive_object_at_version, - epoch_id, - ) + self.0.with_borrow(|store| { + store.get_object_received_at_version( + owner, + receiving_object_id, + receive_object_at_version, + epoch_id, + ) + }) } } @@ -109,12 +110,29 @@ pub fn end_transaction( // if true, we will "abort" let mut incorrect_shared_or_imm_handling = false; - let received = object_runtime_ref - .test_inventories - .allocated_tickets - .iter() - .map(|(k, (metadata, _))| (*k, metadata.clone())) - .collect(); + // Handle the allocated tickets: + // * Remove all allocated_tickets in the test inventories. + // * For each allocated ticket, if the ticket's object ID is loaded, move it to `received`. + // * Otherwise re-insert the allocated ticket into the objects inventory, and mark it to be + // removed from the backing storage (deferred due to needing to have acces to `context` which + // has outstanding references at this point). + let allocated_tickets = + std::mem::take(&mut object_runtime_ref.test_inventories.allocated_tickets); + let mut received = BTreeMap::new(); + let mut unreceived = BTreeSet::new(); + let loaded_runtime_objects = object_runtime_ref.loaded_runtime_objects(); + for (id, (metadata, value)) in allocated_tickets { + if loaded_runtime_objects.contains_key(&id) { + received.insert(id, metadata); + } else { + unreceived.insert(id); + // This must be untouched since the allocated ticket is still live, so ok to re-insert. + object_runtime_ref + .test_inventories + .objects + .insert(id, value); + } + } let object_runtime_state = object_runtime_ref.take_state(); // Determine writes and deletes @@ -168,6 +186,7 @@ pub fn end_transaction( } inventories.taken.remove(id); } + // handle transfers, inserting transferred/written objects into their respective inventory let mut created = vec![]; let mut written = vec![]; @@ -212,6 +231,21 @@ pub fn end_transaction( } } } + + // For any unused allocated tickets, remove them from the store. + let store: &&InMemoryTestStore = context.extensions().get(); + for id in unreceived { + if store + .0 + .with_borrow_mut(|store| store.remove_object(id).is_none()) + { + return Ok(NativeResult::err( + context.gas_used(), + E_UNABLE_TO_DEALLOCATE_RECEIVING_TICKET, + )); + } + } + // deletions already handled above, but we drop the delete kind for the effects let mut deleted = vec![]; for id in deleted_object_ids { @@ -587,6 +621,7 @@ pub fn allocate_receiving_ticket_for_object( let ty = get_specified_ty(ty_args); let id = pop_id(&mut args)?; + let abilities = context.type_to_abilities(&ty)?; let Some((tag, layout, _)) = get_tag_and_layouts(context, &ty)? else { return Ok(NativeResult::err( context.gas_used(), @@ -610,10 +645,11 @@ pub fn allocate_receiving_ticket_for_object( E_UNABLE_TO_ALLOCATE_RECEIVING_TICKET, )); }; + let has_public_transfer = abilities.has_store(); let move_object = unsafe { MoveObject::new_from_execution_with_limit( tag.into(), - false, + has_public_transfer, object_version, bytes, 250 * 1024, @@ -654,7 +690,7 @@ pub fn allocate_receiving_ticket_for_object( // NB: Must be a `&&` reference since the extension stores a static ref to the object storage. let store: &&InMemoryTestStore = context.extensions().get(); - store.0.write().unwrap().insert_object(object); + store.0.with_borrow_mut(|store| store.insert_object(object)); Ok(NativeResult::ok( legacy_test_cost(), @@ -685,7 +721,10 @@ pub fn deallocate_receiving_ticket_for_object( // Remove the object from storage. We should never hit this scenario either. let store: &&InMemoryTestStore = context.extensions().get(); - if store.0.write().unwrap().remove_object(id).is_none() { + if store + .0 + .with_borrow_mut(|store| store.remove_object(id).is_none()) + { return Ok(NativeResult::err( context.gas_used(), E_UNABLE_TO_DEALLOCATE_RECEIVING_TICKET, diff --git a/sui-execution/v0/sui-adapter/src/adapter.rs b/sui-execution/v0/sui-adapter/src/adapter.rs index 1733184068fe6..d95c13b547eb3 100644 --- a/sui-execution/v0/sui-adapter/src/adapter.rs +++ b/sui-execution/v0/sui-adapter/src/adapter.rs @@ -74,6 +74,7 @@ mod checked { binary_config: to_binary_config(protocol_config), rethrow_serialization_type_layout_errors: protocol_config .rethrow_serialization_type_layout_errors(), + max_type_to_layout_nodes: protocol_config.max_type_to_layout_nodes_as_option(), }, ) .map_err(|_| SuiError::ExecutionInvariantViolation) diff --git a/sui-execution/v0/sui-move-natives/src/crypto/groth16.rs b/sui-execution/v0/sui-move-natives/src/crypto/groth16.rs index 20948d44ae02e..f1df54dfd9085 100644 --- a/sui-execution/v0/sui-move-natives/src/crypto/groth16.rs +++ b/sui-execution/v0/sui-move-natives/src/crypto/groth16.rs @@ -169,8 +169,8 @@ pub fn verify_groth16_proof_internal( .groth16_verify_groth16_proof_internal_bls12381_cost_base, groth16_verify_groth16_proof_internal_cost_params .groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input, - (public_proof_inputs.len() + fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE - 1) - / fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE, + (public_proof_inputs.len() + fastcrypto::groups::bls12381::SCALAR_LENGTH - 1) + / fastcrypto::groups::bls12381::SCALAR_LENGTH, ), BN254 => ( groth16_verify_groth16_proof_internal_cost_params @@ -202,7 +202,7 @@ pub fn verify_groth16_proof_internal( let result; if curve == BLS12381 { if public_proof_inputs.len() - > fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE * MAX_PUBLIC_INPUTS + > fastcrypto::groups::bls12381::SCALAR_LENGTH * MAX_PUBLIC_INPUTS { return Ok(NativeResult::err(cost, TOO_MANY_PUBLIC_INPUTS)); } diff --git a/sui-execution/v1/sui-adapter/src/adapter.rs b/sui-execution/v1/sui-adapter/src/adapter.rs index dfcb584a05e70..8b72d30244853 100644 --- a/sui-execution/v1/sui-adapter/src/adapter.rs +++ b/sui-execution/v1/sui-adapter/src/adapter.rs @@ -74,6 +74,7 @@ mod checked { binary_config: to_binary_config(protocol_config), rethrow_serialization_type_layout_errors: protocol_config .rethrow_serialization_type_layout_errors(), + max_type_to_layout_nodes: protocol_config.max_type_to_layout_nodes_as_option(), }, ) .map_err(|_| SuiError::ExecutionInvariantViolation) diff --git a/sui-execution/v1/sui-move-natives/src/crypto/groth16.rs b/sui-execution/v1/sui-move-natives/src/crypto/groth16.rs index 20948d44ae02e..f1df54dfd9085 100644 --- a/sui-execution/v1/sui-move-natives/src/crypto/groth16.rs +++ b/sui-execution/v1/sui-move-natives/src/crypto/groth16.rs @@ -169,8 +169,8 @@ pub fn verify_groth16_proof_internal( .groth16_verify_groth16_proof_internal_bls12381_cost_base, groth16_verify_groth16_proof_internal_cost_params .groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input, - (public_proof_inputs.len() + fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE - 1) - / fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE, + (public_proof_inputs.len() + fastcrypto::groups::bls12381::SCALAR_LENGTH - 1) + / fastcrypto::groups::bls12381::SCALAR_LENGTH, ), BN254 => ( groth16_verify_groth16_proof_internal_cost_params @@ -202,7 +202,7 @@ pub fn verify_groth16_proof_internal( let result; if curve == BLS12381 { if public_proof_inputs.len() - > fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE * MAX_PUBLIC_INPUTS + > fastcrypto::groups::bls12381::SCALAR_LENGTH * MAX_PUBLIC_INPUTS { return Ok(NativeResult::err(cost, TOO_MANY_PUBLIC_INPUTS)); } diff --git a/sui-execution/v2/sui-adapter/src/adapter.rs b/sui-execution/v2/sui-adapter/src/adapter.rs index 483f0eca59928..687494b9d57af 100644 --- a/sui-execution/v2/sui-adapter/src/adapter.rs +++ b/sui-execution/v2/sui-adapter/src/adapter.rs @@ -72,6 +72,7 @@ mod checked { binary_config: to_binary_config(protocol_config), rethrow_serialization_type_layout_errors: protocol_config .rethrow_serialization_type_layout_errors(), + max_type_to_layout_nodes: protocol_config.max_type_to_layout_nodes_as_option(), }, ) .map_err(|_| SuiError::ExecutionInvariantViolation) diff --git a/sui-execution/v2/sui-move-natives/src/crypto/groth16.rs b/sui-execution/v2/sui-move-natives/src/crypto/groth16.rs index 20948d44ae02e..f1df54dfd9085 100644 --- a/sui-execution/v2/sui-move-natives/src/crypto/groth16.rs +++ b/sui-execution/v2/sui-move-natives/src/crypto/groth16.rs @@ -169,8 +169,8 @@ pub fn verify_groth16_proof_internal( .groth16_verify_groth16_proof_internal_bls12381_cost_base, groth16_verify_groth16_proof_internal_cost_params .groth16_verify_groth16_proof_internal_bls12381_cost_per_public_input, - (public_proof_inputs.len() + fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE - 1) - / fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE, + (public_proof_inputs.len() + fastcrypto::groups::bls12381::SCALAR_LENGTH - 1) + / fastcrypto::groups::bls12381::SCALAR_LENGTH, ), BN254 => ( groth16_verify_groth16_proof_internal_cost_params @@ -202,7 +202,7 @@ pub fn verify_groth16_proof_internal( let result; if curve == BLS12381 { if public_proof_inputs.len() - > fastcrypto_zkp::bls12381::conversions::SCALAR_SIZE * MAX_PUBLIC_INPUTS + > fastcrypto::groups::bls12381::SCALAR_LENGTH * MAX_PUBLIC_INPUTS { return Ok(NativeResult::err(cost, TOO_MANY_PUBLIC_INPUTS)); }