From 090cb0b8fa5f1af1a7eaf57cf6eeebd062c78a1f Mon Sep 17 00:00:00 2001 From: David Murdoch <187813+davidmurdoch@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:12:01 -0400 Subject: [PATCH] build: add alternative build process to enable faster developer builds (#22506) Closes https://github.com/MetaMask/MetaMask-planning/issues/1477 and https://github.com/MetaMask/MetaMask-planning/issues/1903 This commit adds an alternative build process that is much faster than the gulp build we have now, which is quite slow and doesn't make use of modern build system improvements. The speed up is possible by making use of the SWC compiler, and more modern build system. The build system is also a bit simpler and hopefully more maintainable. This build doesn't yet support: * HMR/chromereload (requires we get rid of our circular dependencies) * lavamoat (neither running the build system _in_ lavamoat, or adding lavamoat protections) * production builds (because of not supporting lavamoat) * MV3 (requires writing a webpack plugin) Co-authored-by: Howard Braham --- .circleci/config.yml | 49 + .depcheckrc.yml | 16 + .github/workflows/run-unit-tests.yml | 20 + .vscode/package.json-schema.json | 69 + ...ing-controller-npm-17.1.1-098cb41930.patch | 13 + ...aps-controllers-npm-8.4.0-574cd5a8a9.patch | 13 + ...reduxjs-toolkit-npm-1.9.7-b14925495c.patch | 26 + README.md | 3 +- app/background.html | 3 +- app/home.html | 4 +- app/loading.html | 8 +- app/manifest/v2/brave.json | 8 +- app/notification.html | 4 +- app/popup.html | 4 +- app/scripts/load/_initialize.ts | 28 + app/scripts/load/background.ts | 9 + app/scripts/load/ui.ts | 9 + app/scripts/lockdown-install.js | 6 + app/scripts/metamask-controller.js | 5 +- app/scripts/runtime-cjs.ts | 5 + app/scripts/sentry-install.js | 2 +- app/trezor-usb-permissions.html | 8 +- builds.yml | 1 + development/build/scripts.js | 17 +- development/build/static.js | 4 +- .../fitness-functions/common/constants.ts | 2 +- development/lib/run-command.js | 2 +- development/webpack/.eslintrc.js | 41 + development/webpack/README.md | 214 +++ development/webpack/build.ts | 62 + development/webpack/fork.mts | 27 + development/webpack/launch.ts | 181 +++ development/webpack/test/cli.test.ts | 83 ++ development/webpack/test/config.test.ts | 110 ++ .../complex/manifest/v2/_base.json | 4 + .../complex/manifest/v2/chrome.json | 1 + .../complex/manifest/v3/_base.json | 10 + .../complex/manifest/v3/chrome.json | 1 + .../empty/manifest/v2/_base.json | 3 + .../empty/manifest/v2/chrome.json | 1 + .../empty/manifest/v3/_base.json | 4 + .../empty/manifest/v3/chrome.json | 1 + development/webpack/test/fixtures/git/HEAD | 1 + .../4b/825dc642cb6eb9a060e54bf8d69288fbee4904 | Bin 0 -> 15 bytes .../63/4cad6dd342dc5317b6c1677dc9ead3fb72f680 | Bin 0 -> 149 bytes .../webpack/test/fixtures/git/refs/heads/main | 1 + development/webpack/test/git.test.ts | 33 + development/webpack/test/helpers.test.ts | 371 ++++++ development/webpack/test/helpers.ts | 115 ++ .../test/loaders.codeFenceLoader.test.ts | 100 ++ .../webpack/test/loaders.swcLoader.test.ts | 132 ++ .../test/plugins.ManifestPlugin.test.ts | 284 ++++ .../test/plugins.SelfInjectPlugin.test.ts | 95 ++ development/webpack/test/version.test.ts | 118 ++ .../webpack/test/webpack.config.test.ts | 307 +++++ development/webpack/types.ts | 34 + development/webpack/utils/cli.ts | 391 ++++++ development/webpack/utils/config.ts | 221 ++++ development/webpack/utils/git.ts | 60 + development/webpack/utils/helpers.ts | 234 ++++ .../webpack/utils/loaders/codeFenceLoader.ts | 59 + .../webpack/utils/loaders/swcLoader.ts | 208 +++ .../utils/plugins/ManifestPlugin/helpers.ts | 56 + .../utils/plugins/ManifestPlugin/index.ts | 351 +++++ .../utils/plugins/ManifestPlugin/schema.ts | 115 ++ .../utils/plugins/ManifestPlugin/types.ts | 94 ++ .../utils/plugins/SelfInjectPlugin/index.ts | 175 +++ .../utils/plugins/SelfInjectPlugin/schema.ts | 58 + .../utils/plugins/SelfInjectPlugin/types.ts | 49 + development/webpack/utils/version.ts | 70 + development/webpack/webpack.config.ts | 387 ++++++ jest.config.js | 1 + lavamoat/browserify/beta/policy.json | 717 +++++----- lavamoat/browserify/flask/policy.json | 717 +++++----- lavamoat/browserify/main/policy.json | 717 +++++----- lavamoat/browserify/mmi/policy.json | 721 +++++----- lavamoat/build-system/policy.json | 394 +++--- package.json | 57 +- tsconfig.json | 2 + yarn.lock | 1170 +++++++++++++++-- 80 files changed, 7974 insertions(+), 1722 deletions(-) create mode 100644 .yarn/patches/@metamask-keyring-controller-npm-17.1.1-098cb41930.patch create mode 100644 .yarn/patches/@metamask-snaps-controllers-npm-8.4.0-574cd5a8a9.patch create mode 100644 app/scripts/load/_initialize.ts create mode 100644 app/scripts/load/background.ts create mode 100644 app/scripts/load/ui.ts create mode 100644 app/scripts/lockdown-install.js create mode 100644 app/scripts/runtime-cjs.ts create mode 100644 development/webpack/.eslintrc.js create mode 100644 development/webpack/README.md create mode 100644 development/webpack/build.ts create mode 100644 development/webpack/fork.mts create mode 100755 development/webpack/launch.ts create mode 100644 development/webpack/test/cli.test.ts create mode 100644 development/webpack/test/config.test.ts create mode 100644 development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/_base.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/chrome.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/_base.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/chrome.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/_base.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/chrome.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/_base.json create mode 100644 development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/chrome.json create mode 100644 development/webpack/test/fixtures/git/HEAD create mode 100644 development/webpack/test/fixtures/git/objects/4b/825dc642cb6eb9a060e54bf8d69288fbee4904 create mode 100644 development/webpack/test/fixtures/git/objects/63/4cad6dd342dc5317b6c1677dc9ead3fb72f680 create mode 100644 development/webpack/test/fixtures/git/refs/heads/main create mode 100644 development/webpack/test/git.test.ts create mode 100644 development/webpack/test/helpers.test.ts create mode 100644 development/webpack/test/helpers.ts create mode 100644 development/webpack/test/loaders.codeFenceLoader.test.ts create mode 100644 development/webpack/test/loaders.swcLoader.test.ts create mode 100644 development/webpack/test/plugins.ManifestPlugin.test.ts create mode 100644 development/webpack/test/plugins.SelfInjectPlugin.test.ts create mode 100644 development/webpack/test/version.test.ts create mode 100644 development/webpack/test/webpack.config.test.ts create mode 100644 development/webpack/types.ts create mode 100644 development/webpack/utils/cli.ts create mode 100644 development/webpack/utils/config.ts create mode 100644 development/webpack/utils/git.ts create mode 100644 development/webpack/utils/helpers.ts create mode 100644 development/webpack/utils/loaders/codeFenceLoader.ts create mode 100644 development/webpack/utils/loaders/swcLoader.ts create mode 100644 development/webpack/utils/plugins/ManifestPlugin/helpers.ts create mode 100644 development/webpack/utils/plugins/ManifestPlugin/index.ts create mode 100644 development/webpack/utils/plugins/ManifestPlugin/schema.ts create mode 100644 development/webpack/utils/plugins/ManifestPlugin/types.ts create mode 100644 development/webpack/utils/plugins/SelfInjectPlugin/index.ts create mode 100644 development/webpack/utils/plugins/SelfInjectPlugin/schema.ts create mode 100644 development/webpack/utils/plugins/SelfInjectPlugin/types.ts create mode 100644 development/webpack/utils/version.ts create mode 100644 development/webpack/webpack.config.ts diff --git a/.circleci/config.yml b/.circleci/config.yml index f25c1cd82288..678357183ac2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -163,6 +163,9 @@ workflows: - prep-build-test-mv2: requires: - prep-deps + - prep-build-test-webpack: + requires: + - prep-deps - prep-build-test-flask: requires: - prep-deps @@ -193,6 +196,10 @@ workflows: - test-lint-changelog: requires: - prep-deps + - test-e2e-chrome-webpack: + requires: + - prep-build-test-webpack + - get-changed-files-with-git-diff - test-e2e-chrome: requires: - prep-build-test @@ -293,6 +300,7 @@ workflows: - test-e2e-chrome-mmi - test-e2e-chrome-rpc-mmi - test-e2e-chrome-vault-decryption + - test-e2e-chrome-webpack - test-storybook - benchmark: requires: @@ -904,6 +912,26 @@ jobs: - dist-test-mv2 - builds-test-mv2 + prep-build-test-webpack: + executor: node-linux-medium + steps: + - run: *shallow-git-clone + - attach_workspace: + at: . + - run: + name: Activate yarn + command: corepack enable + - run: + name: Build extension for testing + command: yarn build:test:webpack + - run: + name: Move test build to 'dist-test-webpack' to avoid conflict with production build + command: mv ./dist ./dist-test-webpack + - persist_to_workspace: + root: . + paths: + - dist-test-webpack + prep-build-storybook: executor: node-linux-medium steps: @@ -1045,6 +1073,27 @@ jobs: name: depcheck command: yarn depcheck + test-e2e-chrome-webpack: + executor: node-browsers-medium-plus + parallelism: 20 + steps: + - run: *shallow-git-clone + - run: sudo corepack enable + - attach_workspace: + at: . + - run: + name: Move test build to dist + command: mv ./dist-test-webpack ./dist + - run: + name: test:e2e:chrome:webpack + command: timeout 20m yarn test:e2e:chrome:webpack --retries 1 + no_output_timeout: 5m + - store_artifacts: + path: test-artifacts + destination: test-artifacts + - store_test_results: + path: test/test-results/e2e + test-api-specs: executor: node-browsers-medium-plus steps: diff --git a/.depcheckrc.yml b/.depcheckrc.yml index e4169c5436f0..db1103c0e303 100644 --- a/.depcheckrc.yml +++ b/.depcheckrc.yml @@ -58,6 +58,22 @@ ignores: - 'resolve-url-loader' # jest environments - 'jest-environment-jsdom' + # webpack + - '@pmmmwh/react-refresh-webpack-plugin' # dev tool + - 'webpack-dev-server' # dev tool + - 'html-bundler-webpack-plugin' # build tool + - 'postcss-loader' # build tool + - '@swc/helpers' # build tool + - browserslist # build tool + - 'buffer' # polyfill + - 'crypto-browserify' # polyfill + - 'process' # polyfill + - 'stream-http' # polyfill + - 'rimraf' # misc: install helper + - 'json-schema-to-ts' # misc: typescript helper + - 'https-browserify' # polyfill + - 'path-browserify' # polyfill + - 'nyc' # coverage # babel - '@babel/plugin-transform-logical-assignment-operators' # trezor diff --git a/.github/workflows/run-unit-tests.yml b/.github/workflows/run-unit-tests.yml index 0f9753111173..6765352f5d7d 100644 --- a/.github/workflows/run-unit-tests.yml +++ b/.github/workflows/run-unit-tests.yml @@ -30,6 +30,26 @@ jobs: name: coverage-${{matrix.shard}} path: coverage/coverage-${{matrix.shard}}.json + test-webpack: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup environment + uses: metamask/github-tools/.github/actions/setup-environment@main + + - name: test:unit:webpack:coverage + run: yarn test:unit:webpack:coverage + + - name: Rename coverage + run: mv coverage/coverage-final.json coverage/coverage-webpack.json + + - uses: actions/upload-artifact@v4 + with: + name: coverage-webpack + path: coverage/coverage-webpack.json + report-coverage: runs-on: ubuntu-latest needs: diff --git a/.vscode/package.json-schema.json b/.vscode/package.json-schema.json index c377e274b3ce..458329d0741c 100644 --- a/.vscode/package.json-schema.json +++ b/.vscode/package.json-schema.json @@ -5,9 +5,22 @@ "required": ["lavamoat"], "properties": { "scripts": { + "required": ["webpack", "webpack:clearcache", "postinstall"], "properties": { "start:dev": { "description": "Runs `yarn start` with the addition of react/redux backend devtool servers enabled." + }, + "webpack": { + "type": "string", + "description": "Builds the extension in \"dev\" mode. Run `yarn webpack --help` for usage information." + }, + "webpack:clearcache": { + "type": "string", + "description": "Deletes webpack's build cache. Useful to force a rebuild (webpack not detecting changes, node_modules have changed, etc)." + }, + "postinstall": { + "type": "string", + "description": "Runs automatically after running `yarn` (`yarn install`) in order to prime the webpack dev build." } } }, @@ -16,6 +29,62 @@ "properties": { "autoprefixer": { "description": "Used by our build systems to automatically add prefixes to CSS rules based on our browserslist." + }, + "@types/chrome": { + "type": "string", + "description": "Provides type definitions for the Chrome extension manifest.json files." + }, + "buffer": { + "type": "string", + "description": "Provides a global Buffer object for use in the browser (webpack)" + }, + "crypto-browserify": { + "type": "string", + "description": "Polyfill's node's crypto API for use in the browser (webpack)" + }, + "dotenv": { + "type": "string", + "description": "Loads environment variables from a .metamaskrc file (webpack)" + }, + "fflate": { + "type": "string", + "description": "Provides zip capabilities for bundling (webpack)" + }, + "postcss-loader": { + "type": "string", + "description": "Loads postcss plugins (webpack)" + }, + "process": { + "type": "string", + "description": "Provides a global process object for use in the browser (webpack)" + }, + "schema-utils": { + "type": "string", + "description": "Provides utilities for validating options objects (webpack)" + }, + "stream-http": { + "type": "string", + "description": "Polyfill's node's stream API for use in the browser (webpack)" + }, + "@swc/core": { + "type": "string", + "description": "Transpiles javascript and typescript (webpack)" + }, + "tsx": { + "type": "string", + "description": "Provides blazing fast typescript compilation (no type checking) (webpack)" + }, + "rimraf": { + "type": "string", + "description": "Provides a cross-platform way of deleting files from the command line (webpack)" + }, + "json-schema-to-ts": { + "type": "string", + "description": "Generates typescript types from json schemas (webpack)" + }, + "@pmmmwh/react-refresh-webpack-plugin": { + "type": "string", + "description": "Provides hot reloading for react components (webpack)" } } }, diff --git a/.yarn/patches/@metamask-keyring-controller-npm-17.1.1-098cb41930.patch b/.yarn/patches/@metamask-keyring-controller-npm-17.1.1-098cb41930.patch new file mode 100644 index 000000000000..439c1ddf49ae --- /dev/null +++ b/.yarn/patches/@metamask-keyring-controller-npm-17.1.1-098cb41930.patch @@ -0,0 +1,13 @@ +diff --git a/package.json b/package.json +index 5a6217eaed16fdfe7f1ad693871f85320bd6b421..69bdf1a9155497e37fc58db7bbc74597fd543535 100644 +--- a/package.json ++++ b/package.json +@@ -18,7 +18,7 @@ + "sideEffects": false, + "exports": { + ".": { +- "import": "./dist/index.mjs", ++ "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/types/index.d.ts" + }, diff --git a/.yarn/patches/@metamask-snaps-controllers-npm-8.4.0-574cd5a8a9.patch b/.yarn/patches/@metamask-snaps-controllers-npm-8.4.0-574cd5a8a9.patch new file mode 100644 index 000000000000..4f07bb41d1ff --- /dev/null +++ b/.yarn/patches/@metamask-snaps-controllers-npm-8.4.0-574cd5a8a9.patch @@ -0,0 +1,13 @@ +diff --git a/package.json b/package.json +index 6dedde043d1bd5fc195e72b3e06ec37cf6532476..3986b5b0c1f3bf7ff49e023c934bed26f44735ae 100644 +--- a/package.json ++++ b/package.json +@@ -9,7 +9,7 @@ + "sideEffects": false, + "exports": { + ".": { +- "import": "./dist/index.mjs", ++ "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/types/index.d.ts" + }, diff --git a/.yarn/patches/@reduxjs-toolkit-npm-1.9.7-b14925495c.patch b/.yarn/patches/@reduxjs-toolkit-npm-1.9.7-b14925495c.patch index 019aa54534e5..a3c730d3a46b 100644 --- a/.yarn/patches/@reduxjs-toolkit-npm-1.9.7-b14925495c.patch +++ b/.yarn/patches/@reduxjs-toolkit-npm-1.9.7-b14925495c.patch @@ -92,3 +92,29 @@ index bb433432ec76331e12d6b62e200f06530055cb16..9caf4051aa96bd14ee2890ef6c79bf5b return EnhancerArray; }(Array)); function freezeDraftable(val) { +diff --git a/dist/redux-toolkit.esm.js b/dist/redux-toolkit.esm.js +index f26a1669405b4dd92dfecd791dc536078a7e2e12..591e7495fcaf3233d26cfb9c4eae09fd7ae3eb98 100644 +--- a/dist/redux-toolkit.esm.js ++++ b/dist/redux-toolkit.esm.js +@@ -192,9 +192,6 @@ function getMessage(type) { + } + function createActionCreatorInvariantMiddleware(options) { + if (options === void 0) { options = {}; } +- if (process.env.NODE_ENV === "production") { +- return function () { return function (next) { return function (action) { return next(action); }; }; }; +- } + var _c = options.isActionCreator, isActionCreator2 = _c === void 0 ? isActionCreator : _c; + return function () { return function (next) { return function (action) { + if (isActionCreator2(action)) { +diff --git a/package.json b/package.json +index 684ea845ee663f719bff6c140001baebdaa69344..568d6215514a8625bfb3be5e49b6cbfe11231e6a 100644 +--- a/package.json ++++ b/package.json +@@ -23,7 +23,6 @@ + "access": "public" + }, + "main": "dist/index.js", +- "module": "dist/redux-toolkit.esm.js", + "unpkg": "dist/redux-toolkit.umd.min.js", + "types": "dist/index.d.ts", + "devDependencies": { diff --git a/README.md b/README.md index 5c3e8f5823c1..59c65864f1e3 100644 --- a/README.md +++ b/README.md @@ -85,6 +85,7 @@ To start a development build (e.g. with logging and file watching) run `yarn sta Alternatively, one can skip wallet onboarding and preload the vault state with a specific SRP by adding `TEST_SRP=''` and `PASSWORD=''` to the `.metamaskrc` file and running `yarn start:skip-onboarding`. +You can also start a development build using the `yarn webpack` command, or `yarn webpack --watch`. This uses an alternative build system that is much faster, but not yet production ready. See the [Webpack README](./development/webpack/README.md) for more information. #### React and Redux DevTools @@ -138,7 +139,7 @@ Note: The `yarn start:test` command (which initiates the testDev build type) has Once you have your test build ready, choose the browser for your e2e tests: - For Firefox, run `yarn test:e2e:firefox`. - - Note: If you are running Firefox as a snap package on Linux, ensure you enable the appropriate environment variable: `FIREFOX_SNAP=true yarn test:e2e:firefox` + - Note: If you are running Firefox as a snap package on Linux, ensure you enable the appropriate environment variable: `FIREFOX_SNAP=true yarn test:e2e:firefox` - For Chrome, run `yarn test:e2e:chrome`. These scripts support additional options for debugging. Use `--help`to see all available options. diff --git a/app/background.html b/app/background.html index c0068295d730..9892810f3ffa 100644 --- a/app/background.html +++ b/app/background.html @@ -4,7 +4,6 @@ - - + diff --git a/app/home.html b/app/home.html index b94800b13d78..55ab0b8c2695 100644 --- a/app/home.html +++ b/app/home.html @@ -8,7 +8,7 @@ <% } else { %> MetaMask <% } %> - +
@@ -16,6 +16,6 @@
- + diff --git a/app/loading.html b/app/loading.html index 1c85bd296cc6..6319f760fb23 100644 --- a/app/loading.html +++ b/app/loading.html @@ -2,13 +2,13 @@ - <% if (it.shouldIncludeSnow) { %> - - - <% } %> MetaMask Loading + <% if (it.shouldIncludeSnow) { %> + + + <% } %> - +
@@ -50,6 +50,6 @@ />
- + diff --git a/app/popup.html b/app/popup.html index 296b0ceae711..e7bdb356a118 100644 --- a/app/popup.html +++ b/app/popup.html @@ -4,7 +4,7 @@ MetaMask - +
@@ -12,6 +12,6 @@
- + diff --git a/app/scripts/load/_initialize.ts b/app/scripts/load/_initialize.ts new file mode 100644 index 000000000000..5e0537876b73 --- /dev/null +++ b/app/scripts/load/_initialize.ts @@ -0,0 +1,28 @@ +/* eslint-disable @typescript-eslint/no-require-imports */ +// currently only used in webpack build. + +// The root compartment will populate this with hooks +global.stateHooks = {} as typeof stateHooks; + +if (process.env.ENABLE_LAVAMOAT === 'true') { + // TODO: lavamoat support + throw new Error('LAVAMOAT not supported in webpack build yet'); +} else { + if (process.env.ENABLE_SENTRY === 'true') { + require('../sentry-install'); + } + if (process.env.ENABLE_SNOW === 'true') { + require('@lavamoat/snow/snow.prod'); + require('../use-snow'); + } + if (process.env.ENABLE_LOCKDOWN === 'true') { + require('../lockdown-install'); + require('../lockdown-run'); + require('../lockdown-more'); + } + + require('../init-globals'); + require('../runtime-cjs'); +} + +export {}; diff --git a/app/scripts/load/background.ts b/app/scripts/load/background.ts new file mode 100644 index 000000000000..1017d516e361 --- /dev/null +++ b/app/scripts/load/background.ts @@ -0,0 +1,9 @@ +// currently only used in webpack build. + +import './_initialize'; +import '../background'; + +if (process.env.IN_TEST) { + // only used for testing + document.documentElement.classList.add('metamask-loaded'); +} diff --git a/app/scripts/load/ui.ts b/app/scripts/load/ui.ts new file mode 100644 index 000000000000..471489bf948f --- /dev/null +++ b/app/scripts/load/ui.ts @@ -0,0 +1,9 @@ +// currently only used in webpack build. + +import './_initialize'; +import '../ui'; + +if (process.env.IN_TEST) { + // only used for testing + document.documentElement.classList.add('metamask-loaded'); +} diff --git a/app/scripts/lockdown-install.js b/app/scripts/lockdown-install.js new file mode 100644 index 000000000000..add418dea917 --- /dev/null +++ b/app/scripts/lockdown-install.js @@ -0,0 +1,6 @@ +// currently only used in webpack build. + +import 'ses'; +// lockdown() is called in lockdown-run.js + +export {}; diff --git a/app/scripts/metamask-controller.js b/app/scripts/metamask-controller.js index d074757d5df5..51df7891ba31 100644 --- a/app/scripts/metamask-controller.js +++ b/app/scripts/metamask-controller.js @@ -840,7 +840,10 @@ export default class MetamaskController extends EventEmitter { }), storageBackend: new IndexedDBPPOMStorage('PPOMDB', 1), provider: this.provider, - ppomProvider: { PPOM: PPOMModule.PPOM, ppomInit: PPOMModule.default }, + ppomProvider: { + PPOM: PPOMModule.PPOM, + ppomInit: () => PPOMModule.default(process.env.PPOM_URI), + }, state: initState.PPOMController, chainId: this.networkController.state.providerConfig.chainId, securityAlertsEnabled: diff --git a/app/scripts/runtime-cjs.ts b/app/scripts/runtime-cjs.ts new file mode 100644 index 000000000000..e0169d25bd2e --- /dev/null +++ b/app/scripts/runtime-cjs.ts @@ -0,0 +1,5 @@ +// currently only used in webpack build. + +import '@lavamoat/lavapack/src/runtime-cjs'; + +export {}; diff --git a/app/scripts/sentry-install.js b/app/scripts/sentry-install.js index cbbc03ab10e2..9a88a77c9f7e 100644 --- a/app/scripts/sentry-install.js +++ b/app/scripts/sentry-install.js @@ -1,7 +1,7 @@ import setupSentry from './lib/setupSentry'; // The root compartment will populate this with hooks -global.stateHooks = {}; +global.stateHooks = global.stateHooks || {}; // setup sentry error reporting global.sentry = setupSentry(); diff --git a/app/trezor-usb-permissions.html b/app/trezor-usb-permissions.html index 8c92552cfa02..ef87f9a1b8aa 100644 --- a/app/trezor-usb-permissions.html +++ b/app/trezor-usb-permissions.html @@ -2,13 +2,13 @@ - <% if (it.shouldIncludeSnow) { %> - - - <% } %> TrezorConnect | Trezor + <% if (it.shouldIncludeSnow) { %> + + + <% } %> diff --git a/builds.yml b/builds.yml index 2c00cc8b38d1..9dcde670f8ff 100644 --- a/builds.yml +++ b/builds.yml @@ -151,6 +151,7 @@ env: - SUPPORT_LINK: https://support.metamask.io - SUPPORT_REQUEST_LINK: https://support.metamask.io - SKIP_BACKGROUND_INITIALIZATION: false + - PPOM_URI: ./ppom_bg.wasm # CDN for blockaid files - BLOCKAID_FILE_CDN: static.cx.metamask.io/api/v1/confirmations/ppom # Blockaid public key for verifying signatures of data files downloaded from CDN diff --git a/development/build/scripts.js b/development/build/scripts.js index 2e3661cfd21e..2e947a1e3783 100644 --- a/development/build/scripts.js +++ b/development/build/scripts.js @@ -1212,11 +1212,18 @@ function renderHtmlFile({ const htmlTemplate = readFileSync(htmlFilePath, 'utf8'); const eta = new Eta(); - const htmlOutput = eta.renderString(htmlTemplate, { - isMMI, - isTest, - shouldIncludeSnow, - }); + const htmlOutput = eta + .renderString(htmlTemplate, { isMMI, isTest, shouldIncludeSnow }) + // these replacements are added to support the webpack build's automatic + // compilation of html files, which the gulp-based process doesn't support. + .replace('./scripts/load/background.ts', './load-background.js') + .replace( + '', + '\n ', + ) + .replace('./scripts/load/ui.ts', './load-app.js') + .replace('../ui/css/index.scss', './index.css') + .replace('@lavamoat/snow/snow.prod.js', './scripts/snow.js'); browserPlatforms.forEach((platform) => { const dest = `./dist/${platform}/${htmlName}.html`; // we dont have a way of creating async events atm diff --git a/development/build/static.js b/development/build/static.js index f3d94d4cc6bc..edf6d8f618d4 100644 --- a/development/build/static.js +++ b/development/build/static.js @@ -146,9 +146,7 @@ function getCopyTargets(shouldIncludeLockdown, shouldIncludeSnow) { ...(shouldIncludeSnow ? [ { - src: shouldIncludeSnow - ? `./node_modules/@lavamoat/snow/snow.prod.js` - : EMPTY_JS_FILE, + src: `./node_modules/@lavamoat/snow/snow.prod.js`, dest: `scripts/snow.js`, }, { diff --git a/development/fitness-functions/common/constants.ts b/development/fitness-functions/common/constants.ts index 0514c6ed9002..5758d4e2a6e1 100644 --- a/development/fitness-functions/common/constants.ts +++ b/development/fitness-functions/common/constants.ts @@ -1,7 +1,7 @@ // include JS, TS, JSX, TSX files only excluding files in the e2e tests and // fitness functions directories const EXCLUDE_E2E_TESTS_REGEX = - '^(?!test/e2e)(?!development/fitness).*.(js|ts|jsx|tsx)$'; + '^(?!test/e2e)(?!development/fitness|development/webpack).*.(js|ts|jsx|tsx)$'; // include JS and JSX files in the shared directory only const SHARED_FOLDER_JS_REGEX = '^(shared).*.(js|jsx)$'; diff --git a/development/lib/run-command.js b/development/lib/run-command.js index fab1eca361d4..62fe2284ea6d 100644 --- a/development/lib/run-command.js +++ b/development/lib/run-command.js @@ -96,7 +96,7 @@ async function runInShell(command, args, output) { const internalError = new Error('Internal'); try { await new Promise((resolve, reject) => { - const childProcess = spawn(command, args); + const childProcess = spawn(command, args, { shell: true }); childProcess.stdout.setEncoding('utf8'); childProcess.stderr.setEncoding('utf8'); childProcess.stdout.pipe(process.stdout); diff --git a/development/webpack/.eslintrc.js b/development/webpack/.eslintrc.js new file mode 100644 index 000000000000..1b142e9a39d1 --- /dev/null +++ b/development/webpack/.eslintrc.js @@ -0,0 +1,41 @@ +// this file is named .eslintrc.js because eslint checks for that file first + +module.exports = { + rules: { + '@typescript-eslint/no-shadow': [ + 'error', + { + allow: [ + // so uh, these aren't always globals, ya know. + 'describe', + 'it', + 'test', + 'afterEach', + 'beforeEach', + ], + }, + ], + // useful for lazy `require`s (makes start up faster) + '@typescript-eslint/no-require-imports': 'off', + // useful for modifying properties of `require`d modules (something `import`ed modules don't allow) + '@typescript-eslint/no-var-requires': 'off', + // Fun fact: ESM imports _require_ extensions. So silly. + 'import/extensions': 'off', + // sometimes its nice to do things like `something = else = null;` + 'no-multi-assign': ['error', { ignoreNonDeclaration: true }], + // Why? What's next, no addition? + 'no-bitwise': 'off', + // `void` is useful to ignore return values, the option `allowAsStatement: true` is broken for lambda functions, e.g., `() => void something()`. + 'no-void': 'off', + // `if (condition) return;` is useful for early returns without adding noise. + curly: ['error', 'multi-line'], + // require is required to load dynamic modules (well, JSON, mostly) synchronously (with Node's require cache, too!). + 'import/no-dynamic-require': 'off', + // uh, they're bullet points in markdown in a JSDoc comment. Stop this nonsense. + 'jsdoc/no-multi-asterisks': ['error', { allowWhitespace: true }], + // Really? I was joking about "no addition" above, but its (almost) real! + 'no-plusplus': 'off', + // I want to increment a variable outside my loop. This prevents that. + 'no-loop-func': 'off', + }, +}; diff --git a/development/webpack/README.md b/development/webpack/README.md new file mode 100644 index 000000000000..aa85fb67d444 --- /dev/null +++ b/development/webpack/README.md @@ -0,0 +1,214 @@ +# MetaMask Development Build Tool + +This tool is used to build the MetaMask extension for development purposes. It is not (yet) intended for production builds. + +## Usage + +For usage, examples, and options, run the following command: + +```bash +yarn webpack --help +``` + +To build the MetaMask extension, run the following command: + +```bash +yarn webpack +``` + +This will create a `dist/chrome` directory containing the built extension. See usage for more options. + +To watch for changes and rebuild the extension automatically, run the following command: + +```bash +yarn webpack --watch +``` + +### Set options using a `config.json` file + +You can skip using command line options and specify options using a JSON file +instead. You can use the same options as the command line, but in JSON form. For +example, to build a zip of the extension for Chrome and Firefox, create a +`config.json` file as follows (notice the use of an array for the `browser` +option): + +```json +{ + "browser": ["chrome", "firefox"], + "zip": true +} +``` + +Then you can use it as follows: + +```bash +yarn webpack --config config.json +``` + +And you can combine it with CLI options, too: + +```bash +yarn webpack --config config.json --dry-run +``` + +Run `yarn webpack --help` for the list of options. + +### Set options using environment variables + +You can use environment variables instead of command line options: + +```bash +BUNDLE_MINIFY=true yarn webpack +``` + +Run `yarn webpack --help` for the list of options. + +Note: multiple array options cannot be set this way, due to this bug in yargs: https://github.com/yargs/yargs/issues/821 + +You can also combine environment variables with `--config` and CLI options: + +```bash +BUNDLE_MINIFY=true yarn webpack --config config.json --dry-run +``` + +## Cache Invalidation + +The cache is invalidated when the build tool's code itself changes, or when the `package.json` file changes. The cache +is keyed by the effective options, so changing the options will also invalidate the cache. Not all options affect +the cache, but most do. Search for "`cacheKey`" in [./utils/cli.ts](./utils/cli.ts) to see which options affect the cache. + +## Tips + +- You can use the `--config` flag to specify your own JSON config file to use as the build configuration. This is useful + if you want to customize the defaults +- You can specify options via environment variables by prefixing the option name with `BUNDLE_`, e.g., + `BUNDLE_BROWSER=opera yarn webpack` on \*nix. +- don't run the build process with the Node Debugger attached; it will make things build much more slowly. + +## Development + +### Debugging the Build Process + +Webpack makes use of a cache to speed up builds. If you encounter issues with the build tool, try clearing the cache by +running the following command: + +```bash +yarn webpack:clearcache +``` + +You can also avoid using the cache by setting the `--no-cache` option. + +``` +yarn webpack --no-cache +``` + +Please to file an issue if you do encounter issues! + +### Linting + +Linting is exactly the same as the rest of the MetaMask project. To lint the build tool, run the following command: + +```bash +yarn lint +``` + +That said, the webpack build has its [own eslint configuration](./.eslintrc.js) that overrides some restrictive rules +that either: don't work well when optimizing for performance, or disable JavaScript features that are useful and +generally necessary. + +### Testing + +To run the build tool's test suite, run the following command: + +```bash +yarn test:unit:webpack +``` + +This will run the test suite for the build tool. These tests are also run as part of the MetaMask test suite in CI. + +To output an HTML, JSON, and text coverage reports, run the following command: + +```bash +yarn test:unit:webpack:coverage +``` + +Test coverage should be around 100% for the build tool, exceptions are made for some edge cases that are overly +difficult or complex to test, like exceptions. + +Testing uses node's built-in `node:test` and `node:assert` modules instead of jest/mocha. + +Unit tests are organized in `development/webpack/test` and are named `*.test.ts`, where \* is the name of the file being +tested. This is a guideline and not a rule. + +When checking coverage its is sometimes good to check if your coverage is intentional. One way to do that is to run the +test coverage on a single test file. This can be done by running the following command: + +```bash +yarn nyc --reporter=html tsx --test development/webpack/test/your-test-file.test.ts +``` + +### Performance + +The build tool only exists to build the project quickly. Don't make it slow. If you're adding a feature that makes the +build tool slower, go for a walk and maybe don't come back until you change your mind. + +Some things that might make the build tool slower: + +- using JavaScript (this tool is only fast because it uses [SWC](https://swc.rs/) for compilation, which is written in + Rust) +- requiring/importing large libraries +- functional programming paradigms (JavaScript is not Haskell after all) + - like chaining map, filter, reduce, etc. when a single loop would do. + - try to avoid looping over the same data/file multiple times +- using async IO when sync IO would do + - non-blocking IO is great, but not when it's the only IO happening at the time and we don't care about blocking the + main process. +- launching shells, workers, or other processes without measuring the cost +- unnecessary IO +- validation, linting, or other checks that are not necessary + +If you must add something that slows it down, consider putting it behind a flag. If it must be in the default mode, try +to run it in parallel with other tasks. + +### The Cache + +The build process uses a cache to speed up successive builds. The cache is stored in the `node_modules/.cache/webpack` +directory. + +The cache is slow. Very slow. It takes about 50% of the total time just to create the cache. But you shouldn't notice +that because the caching step is pushed to a background process. + +The way this works is by running the build in a background child process, and then detaching that child process from the +parent process once the build is complete (and cache reconciliation and persistance begins). + +Launching the build in a background process does take time, but its much less time than cache creation, so it works out. + +The child process is run with its own TTY for `stderr` and `stdout`; the child's stdio dimensions are kept in sync with +the parent's, and all TTY features of the parent are available in the child (formatting, colors, cursors, etc.). On +Windows an IPC channel is used to communicate between the parent and child processes, on \*nix this is done via signals. +The parent process listens for the child process and signal the parent, and when it does, the parent disconnects from +the child and shuts down, leaving the child to run in the background so the cache can be processed and persisted. + +### To do: + +- [define and wrangle the difference between `lockdown` and `lavamoat` options.](https://github.com/MetaMask/metamask-extension/issues/26254) +- [MV3 support](https://github.com/MetaMask/metamask-extension/issues/26255) + - Service workers, used by MV3, must load dependencies via `importScripts`. + - there are existing webpack plugins that do this, but they are not yet integrated into this build tool and would + require changes to our code and existing gulp-based build process to work. +- [Make lavamoat work so we can run production builds](https://github.com/MetaMask/metamask-extension/issues/26256) +- [Make LiveReload, Hot Module Reloading, and/or React Refresh work](https://github.com/MetaMask/metamask-extension/issues/26257) + - prerequisite: https://github.com/MetaMask/metamask-extension/issues/22450 +- [Make the build tool even faster (switch to RSPack once it hits 1.0.0?)](https://github.com/MetaMask/metamask-extension/issues/26258) +- [enable `yarn webpack completion`](https://github.com/MetaMask/metamask-extension/issues/26259) + - It doesn't work with multiple-word commands (`yarn webpack ...`) and is currently disabled. +- [implement overrides for icons and manifests fields for non-main builds](https://github.com/MetaMask/metamask-extension/issues/26260) + +### Ideas + +- investigate using `DLLPlugin` for even faster builds +- make it work in Bun.js and/or Deno +- investigate adding a long-running background daemon mode for always up-to-date builds +- investigate adding linting, testing, validation, AI code review, etc.; especially in `--watch` mode +- investigate a "one CLI to rule them all" approach to MetaMask developer tooling and scripts +- allow changing some options without restarting the build process diff --git a/development/webpack/build.ts b/development/webpack/build.ts new file mode 100644 index 000000000000..12ce2c95e693 --- /dev/null +++ b/development/webpack/build.ts @@ -0,0 +1,62 @@ +import { webpack } from 'webpack'; +import type WebpackDevServerType from 'webpack-dev-server'; +import { noop, logStats, __HMR_READY__ } from './utils/helpers'; +import config from './webpack.config.js'; + +// disable browserslist stats as it needlessly traverses the filesystem multiple +// times looking for a stats file that doesn't exist. +require('browserslist/node').getStat = noop; + +/** + * Builds the extension + * + * @param onComplete + */ +export function build(onComplete: () => void = noop) { + const isDevelopment = config.mode === 'development'; + + const { watch, ...options } = config; + const compiler = webpack(options); + if (__HMR_READY__ && watch) { + // DISABLED BECAUSE WE AREN'T `__HMR_READY__` YET + // Use `webpack-dev-server` to enable HMR + const WebpackDevServer: typeof WebpackDevServerType = require('webpack-dev-server'); + const serverOptions = { + hot: isDevelopment, + liveReload: isDevelopment, + server: { + // TODO: is there any benefit to using https? + type: 'https', + }, + // always use loopback, as 0.0.0.0 tends to fail on some machines (WSL2?) + host: 'localhost', + devMiddleware: { + // browsers need actual files on disk + writeToDisk: true, + }, + // we don't need/have a "static" directory, so disable it + static: false, + allowedHosts: 'all', + } as const satisfies WebpackDevServerType.Configuration; + + const server = new WebpackDevServer(serverOptions, compiler); + server.start().then(() => console.log('🦊 Watching for changes…')); + } else { + console.error(`🦊 Running ${options.mode} build…`); + if (watch) { + // once HMR is ready (__HMR_READY__ variable), this section should be removed. + compiler.watch(options.watchOptions, (err, stats) => { + logStats(err ?? undefined, stats); + console.error('🦊 Watching for changes…'); + }); + } else { + compiler.run((err, stats) => { + logStats(err ?? undefined, stats); + // `onComplete` must be called synchronously _before_ `compiler.close` + // or the caller might observe output from the `close` command. + onComplete(); + compiler.close(noop); + }); + } + } +} diff --git a/development/webpack/fork.mts b/development/webpack/fork.mts new file mode 100644 index 000000000000..73fb7c4a7e4d --- /dev/null +++ b/development/webpack/fork.mts @@ -0,0 +1,27 @@ +/** + * @file Executes the build process in a child process environment, ensuring it + * was correctly spawned by checking for a `PPID` environment variable that + * matches the parent's process ID. This script is responsible for running the + * build logic defined in './build' and managing output streams to prevent + * unwanted output after completion. It leverages IPC for communication back to + * the parent process or falls back to sending a POSIX signal (`SIGUSR2`) to + * signal completion. + * @see {@link ./launch.ts} + */ + +const PPID = Number(process.env.PPID); +if (isNaN(PPID) || PPID !== process.ppid) { + throw new Error( + `${__filename} must be run with a \`PPID\` environment variable. See ${__dirname}/launch.ts for an example.`, + ); +} + +const { build } = await import('./build.ts'); +build(() => { + // stop writing now because the parent process is still listening to these + // streams and we don't want any more output to be shown to the user. + process.stdout.write = process.stderr.write = () => true; + + // use IPC if we have it, otherwise send a POSIX signal + process.send?.('SIGUSR2') || process.kill(PPID, 'SIGUSR2'); +}); diff --git a/development/webpack/launch.ts b/development/webpack/launch.ts new file mode 100755 index 000000000000..acf1149045fe --- /dev/null +++ b/development/webpack/launch.ts @@ -0,0 +1,181 @@ +#!/usr/bin/env -S node --require "./node_modules/tsx/dist/preflight.cjs" --import "./node_modules/tsx/dist/loader.mjs" + +/** + * @file This script optimizes build processes by conditionally forking child + * processes based on command-line arguments. It handles memory management, + * stdio stream creation, and process lifecycle to improve performance and + * maintainability. Supports cross-platform execution with specific + * considerations for Windows environments. + * + * On Linux-like systems you can skip the overhead of running `yarn` by + * executing this file directly, e.g., `./development/webpack/launch.ts`, or via + * bun or tsx. + */ + +// Note: minimize non-`type` imports to decrease load time. +import { join } from 'node:path'; +import { spawn, type StdioOptions } from 'node:child_process'; +import parser from 'yargs-parser'; +import type { Child, PTY, Stdio, StdName } from './types.ts'; + +const rawArgv = process.argv.slice(2); + +const alias = { cache: 'c', help: 'h', watch: 'h' }; +type Args = { [x in keyof typeof alias]?: boolean }; +const args = parser(rawArgv, { alias, boolean: Object.keys(alias) }) as Args; + +if (args.cache === false || args.help === true || args.watch === true) { + // there are no time savings to running the build in a child process if: the + // cache is disabled, we need to output "help", or we're in watch mode. + require('./build.ts').build(); +} else { + fork(process, join(__dirname, 'fork.mts'), rawArgv); +} + +/** + * Runs the `file` in a child process. This allows the parent process to + * exit as soon as the build completes, but lets the child process continue to + * serialize and persist the cache in the background. + * + * @param process - The parent process, like `globalThis.process` + * @param file - Path to the file to run, given as an argument to the command + * @param argv - Arguments to pass to the executable + */ +function fork(process: NodeJS.Process, file: string, argv: string[]) { + const env = { NODE_OPTIONS: '', ...process.env, PPID: `${process.pid}` }; + // node recommends using 75% of the available memory for `max-old-space-size` + // https://github.com/nodejs/node/blob/dd67bf08cb1ab039b4060d381cc68179ee78701a/doc/api/cli.md#--max-old-space-sizesize-in-megabytes + const maxOldSpaceMB = ~~((require('node:os').totalmem() * 0.75) / (1 << 20)); + // `--huge-max-old-generation-size` and `--max-semi-space-size=128` reduce + // garbage collection pauses; 128MB provided max benefit in perf testing. + const nodeOptions = [ + `--max-old-space-size=${maxOldSpaceMB}`, + '--max-semi-space-size=128', + '--huge-max-old-generation-size', + ]; + + // run the build in a child process so that we can exit the parent process as + // soon as the build completes, but let the cache serialization finish in the + // background (the cache can take 30% of build-time to serialize and persist). + const { connectToChild, destroy, stdio } = createOutputStreams(process); + + const node = process.execPath; + const options = { detached: true, env, stdio }; + spawn(node, [...nodeOptions, ...process.execArgv, file, ...argv], options) + .once('close', destroy) // clean up if the child crashes + .once('spawn', connectToChild); +} + +/** + * Create the stdio streams (stderr and stdout) for the child process to use and + * for the parent to control and listen to. + * + * @param process - The parent process, like `globalThis.process` + * @returns The stdio streams for the child process to use + */ +function createOutputStreams(process: NodeJS.Process) { + const { isatty } = require('node:tty'); + const isWindows = process.platform === 'win32'; + // use IPC for communication on Windows, as it doesn't support POSIX signals + const ipc = isWindows ? 'ipc' : 'ignore'; + const outs = (['stdout', 'stderr'] as const).map(function createStream(name) { + const parentStream = process[name]; + // TODO: get Windows PTY working + return !isWindows && isatty(parentStream.fd) + ? createTTYStream(parentStream) + : createNonTTYStream(parentStream, name); + }) as [Stdio, Stdio]; + + return { + /** + * + * @param this + * @param child + */ + connectToChild(this: Child, child = this) { + // hook up the child's stdio to the parent's & unref so we can exit later + outs.forEach((stream) => { + stream.listen(child); + stream.unref(child); + }); + + listenForShutdownSignal(process, child); + + process + // kill the child process if we didn't exit cleanly + .on('exit', (code) => code > 128 && child.kill(code - 128)) + // `SIGWINCH` means the terminal was resized + .on('SIGWINCH', function handleSigwinch(signal) { + // resize the tty's + outs.forEach((out) => out.resize()); + // then tell the child process to update its dimensions + child.kill(signal); + }); + }, + destroy: () => outs.forEach((out) => out.destroy()), + stdio: ['ignore', outs[0].pty, outs[1].pty, ipc] as StdioOptions, + }; +} + +/** + * Create a non-TTY (pipe) stream for the child process to use as its stdio. + * + * @param stream - The parent process's stdio stream + * @param name - Either `stdout` or `stderr` + * @returns The stream for the child process to use + */ +function createNonTTYStream(stream: NodeJS.WriteStream, name: StdName): Stdio { + return { + destroy: () => undefined, + listen: (child: Child) => void child[name].pipe(stream), + pty: 'pipe', // let Node create the Pipes + resize: () => undefined, + unref: (child: Child) => void child[name].unref(), + }; +} + +/** + * Create a PTY stream for the child process to use as its stdio. + * + * @param stream - The parent process's stdio stream + * @returns The PTY stream for the child process to use + */ +function createTTYStream(stream: NodeJS.WriteStream): Stdio { + // create a PTY (Pseudo TTY) so the child stream behaves like a TTY + const options = { cols: stream.columns, encoding: null, rows: stream.rows }; + const pty: PTY = require('@lydell/node-pty').open(options); + + return { + destroy: () => { + pty.master.destroy(); + pty.slave.destroy(); + }, + listen: (_child: Child) => void pty.master.pipe(stream), + pty: pty.slave, + resize: () => pty.resize(stream.columns, stream.rows), + unref: (_child: Child) => { + pty.master.unref(); + pty.slave.unref(); + }, + }; +} + +/** + * Listens for a shutdown signal either on the child's IPC channel or via the + * parent process's `SIGUSR2` event. When the signal is received, the child + * process is unref'd so that it can continue running in the background. + * + * Once the child process is unref'd, the parent process may exit on its own. + * + * @param process - The parent process, like `globalThis.process` + * @param child - The child process to listen to + */ +function listenForShutdownSignal(process: NodeJS.Process, child: Child) { + // exit gracefully when the child signals the parent via `SIGUSR2` + if (child.channel === null || child.channel === undefined) { + process.on('SIGUSR2', () => child.unref()); + } else { + child.channel.unref(); + child.on('message', (signal) => signal === 'SIGUSR2' && child.unref()); + } +} diff --git a/development/webpack/test/cli.test.ts b/development/webpack/test/cli.test.ts new file mode 100644 index 000000000000..7f9d5f2fea53 --- /dev/null +++ b/development/webpack/test/cli.test.ts @@ -0,0 +1,83 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import { getDryRunMessage, parseArgv } from '../utils/cli'; +import { getBuildTypes } from '../utils/config'; +import { Browsers } from '../utils/helpers'; + +describe('./utils/cli.ts', () => { + const defaultArgs = { + env: 'development', + watch: false, + cache: true, + progress: true, + releaseVersion: 0, + devtool: 'source-map', + sentry: false, + test: false, + zip: false, + minify: false, + browser: ['chrome'], + manifest_version: 2, + type: 'main', + lavamoat: false, + lockdown: false, + snow: false, + dryRun: false, + stats: false, + }; + + it('should return defaults', () => { + const { args, cacheKey, features } = parseArgv([], getBuildTypes()); + assert.deepStrictEqual(args, defaultArgs); + assert.strictEqual( + typeof cacheKey, + 'string', + 'cacheKey should be a string', + ); + assert(cacheKey.length > 0, 'cacheKey should not be empty'); + // features come from build.yml, and change often, so let's just check the shape + assert(features, 'features should be defined'); + assert(features.all instanceof Set, 'features.all should be a Set'); + assert(features.active instanceof Set, 'features.active should be a Set'); + }); + + it('getDryRunMessage', () => { + const { args, features } = parseArgv([], getBuildTypes()); + const message = getDryRunMessage(args, features); + // testing the exact message could be nice, but verbose and maybe a bit + // brittle, so we just check that it returns a string + assert.strictEqual( + typeof message, + 'string', + 'Dry run message should be a string', + ); + assert(message.length > 0, 'Dry run message should not be empty'); + }); + + it('should allow for build types with no features', () => { + const buildTypesConfig = getBuildTypes(); + delete buildTypesConfig.buildTypes.main.features; + const { features } = parseArgv([], buildTypesConfig); + assert.strictEqual( + features.active.size, + 0, + 'features.active should be an empty Set', + ); + }); + + it('should allow for a build type with no features section', () => { + const buildTypesConfig = getBuildTypes(); + delete buildTypesConfig.buildTypes.main.features; + const { features } = parseArgv([], buildTypesConfig); + assert.strictEqual( + features.active.size, + 0, + 'features.active should be an empty Set', + ); + }); + + it('should return all browsers when `--browser all` is specified', () => { + const { args } = parseArgv(['--browser', 'all'], getBuildTypes()); + assert.deepStrictEqual(args.browser, Browsers); + }); +}); diff --git a/development/webpack/test/config.test.ts b/development/webpack/test/config.test.ts new file mode 100644 index 000000000000..c19e4871fbb8 --- /dev/null +++ b/development/webpack/test/config.test.ts @@ -0,0 +1,110 @@ +import fs from 'node:fs'; +import { describe, it, after, mock } from 'node:test'; +import assert from 'node:assert'; +import { resolve } from 'node:path'; +import * as config from '../utils/config'; +import { parseArgv } from '../utils/cli'; +import { version } from '../../../package.json'; + +describe('./utils/config.ts', () => { + // variables logic is complex, and is "owned" mostly by the other build + // system, so we don't check for everything, just that the interface is + // behaving + describe('variables', () => { + const originalReadFileSync = fs.readFileSync; + function mockRc(env: Record = {}) { + mock.method(fs, 'readFileSync', (path: string, options: object) => { + if (path === resolve(__dirname, '../../../.metamaskrc')) { + // mock `.metamaskrc`, as users might have customized it which may + // break our tests + return ` +${Object.entries(env) + .map(([key, value]) => `${key}=${value}`) + .join('\n')} +`; + } + return originalReadFileSync(path, options); + }); + } + after(() => mock.restoreAll()); + + it('should return valid build variables for the default build', () => { + const buildTypes = config.getBuildTypes(); + const { args } = parseArgv([], buildTypes); + const { variables, safeVariables } = config.getVariables( + args, + buildTypes, + ); + + assert.strictEqual(variables.get('METAMASK_VERSION'), version); + assert.strictEqual(variables.get('IN_TEST'), args.test); + assert.strictEqual(variables.get('METAMASK_BUILD_TYPE'), args.type); + assert.strictEqual(variables.get('NODE_ENV'), args.env); + + // PPOM_URI is unique in that it is code, and has not been JSON.stringified, so we check it separately: + assert.strictEqual( + safeVariables.PPOM_URI, + `new URL('@blockaid/ppom_release/ppom_bg.wasm', import.meta.url)`, + ); + }); + + it('should prefer .metamaskrc variables over others', () => { + const buildTypes = config.getBuildTypes(); + const { args } = parseArgv([], buildTypes); + const defaultVars = config.getVariables(args, buildTypes); + + // verify the default value of the main build is false + assert.strictEqual(defaultVars.variables.get('ALLOW_LOCAL_SNAPS'), false); + + mockRc({ + ALLOW_LOCAL_SNAPS: 'true', + }); + + const overrides = config.getVariables(args, buildTypes); + + // verify the value of the main build is set to the value in .metamaskrc + assert.strictEqual(overrides.variables.get('ALLOW_LOCAL_SNAPS'), true); + }); + + it('should return valid build variables for a non-default build', () => { + mockRc({ + // required by the `beta` build type + SEGMENT_BETA_WRITE_KEY: '.', + }); + const buildTypes = config.getBuildTypes(); + const { args } = parseArgv( + ['--type', 'beta', '--test', '--env', 'production'], + buildTypes, + ); + const { variables } = config.getVariables(args, buildTypes); + assert.strictEqual( + variables.get('METAMASK_VERSION'), + `${version}-${args.type}.0`, + ); + assert.strictEqual(variables.get('IN_TEST'), args.test); + assert.strictEqual(variables.get('METAMASK_BUILD_TYPE'), args.type); + assert.strictEqual(variables.get('NODE_ENV'), args.env); + }); + + it("should handle true/false/null/'' in rc", () => { + const buildTypes = config.getBuildTypes(); + const { args } = parseArgv([], buildTypes); + + mockRc({ + TESTING_TRUE: 'true', + TESTING_FALSE: 'false', + TESTING_NULL: 'null', + TESTING_MISC: 'MISC', + TESTING_EMPTY_STRING: '', + }); + + const { variables } = config.getVariables(args, buildTypes); + + assert.strictEqual(variables.get('TESTING_TRUE'), true); + assert.strictEqual(variables.get('TESTING_FALSE'), false); + assert.strictEqual(variables.get('TESTING_NULL'), null); + assert.strictEqual(variables.get('TESTING_MISC'), 'MISC'); + assert.strictEqual(variables.get('TESTING_EMPTY_STRING'), null); + }); + }); +}); diff --git a/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/_base.json b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/_base.json new file mode 100644 index 000000000000..46c463a53ef8 --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/_base.json @@ -0,0 +1,4 @@ +{ + "description": "base description", + "web_accessible_resources": ["file.png"] +} diff --git a/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/chrome.json b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/chrome.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v2/chrome.json @@ -0,0 +1 @@ +{} diff --git a/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/_base.json b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/_base.json new file mode 100644 index 000000000000..57cd1ac506d9 --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/_base.json @@ -0,0 +1,10 @@ +{ + "description": "base description", + "manifest_version": 3, + "web_accessible_resources": [ + { + "matches": [""], + "resources": ["file.png"] + } + ] +} diff --git a/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/chrome.json b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/chrome.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/complex/manifest/v3/chrome.json @@ -0,0 +1 @@ +{} diff --git a/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/_base.json b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/_base.json new file mode 100644 index 000000000000..5e74046537e7 --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/_base.json @@ -0,0 +1,3 @@ +{ + "description": "base description" +} diff --git a/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/chrome.json b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/chrome.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v2/chrome.json @@ -0,0 +1 @@ +{} diff --git a/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/_base.json b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/_base.json new file mode 100644 index 000000000000..4869ffc9cb61 --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/_base.json @@ -0,0 +1,4 @@ +{ + "description": "base description", + "manifest_version": 3 +} diff --git a/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/chrome.json b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/chrome.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/development/webpack/test/fixtures/ManifestPlugin/empty/manifest/v3/chrome.json @@ -0,0 +1 @@ +{} diff --git a/development/webpack/test/fixtures/git/HEAD b/development/webpack/test/fixtures/git/HEAD new file mode 100644 index 000000000000..b870d82622c1 --- /dev/null +++ b/development/webpack/test/fixtures/git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/main diff --git a/development/webpack/test/fixtures/git/objects/4b/825dc642cb6eb9a060e54bf8d69288fbee4904 b/development/webpack/test/fixtures/git/objects/4b/825dc642cb6eb9a060e54bf8d69288fbee4904 new file mode 100644 index 0000000000000000000000000000000000000000..adf64119a33d7621aeeaa505d30adb58afaa5559 GIT binary patch literal 15 WcmbQg;m&Ii}x!2hjqeM*$ Do@q*0 literal 0 HcmV?d00001 diff --git a/development/webpack/test/fixtures/git/refs/heads/main b/development/webpack/test/fixtures/git/refs/heads/main new file mode 100644 index 000000000000..38d8162043b8 --- /dev/null +++ b/development/webpack/test/fixtures/git/refs/heads/main @@ -0,0 +1 @@ +634cad6dd342dc5317b6c1677dc9ead3fb72f680 diff --git a/development/webpack/test/git.test.ts b/development/webpack/test/git.test.ts new file mode 100644 index 000000000000..ee7d9bef65d1 --- /dev/null +++ b/development/webpack/test/git.test.ts @@ -0,0 +1,33 @@ +import assert from 'node:assert'; +import { describe, it } from 'node:test'; +import { join } from 'node:path'; +import { getLatestCommit } from '../utils/git'; + +describe('getLatestCommit', () => { + const gitDir = join(__dirname, '.', 'fixtures', 'git'); + it('should return some values by default', () => { + const { hash, timestamp } = getLatestCommit(); + + assert.strictEqual(hash().length, 8, 'The hash length is wrong'); + assert.ok(typeof timestamp() === 'number', 'The timestamp type is wrong'); + }); + + it('should return the latest commit hash and timestamp', () => { + const { hash, timestamp } = getLatestCommit(gitDir); + + assert.strictEqual(hash(), '634cad6d', 'The hash is wrong'); + assert.strictEqual(timestamp(), 1711385030000, 'The timestamp is wrong'); + }); + + it('should use the cache', () => { + const firstCallCustom = getLatestCommit(gitDir); + const firstCallDefault = getLatestCommit(); + const secondCallCustom = getLatestCommit(gitDir); + const secondCallDefault = getLatestCommit(); + + assert.notStrictEqual(firstCallCustom, firstCallDefault); + assert.notStrictEqual(secondCallCustom, secondCallDefault); + assert.strictEqual(firstCallCustom, secondCallCustom); + assert.strictEqual(firstCallDefault, secondCallDefault); + }); +}); diff --git a/development/webpack/test/helpers.test.ts b/development/webpack/test/helpers.test.ts new file mode 100644 index 000000000000..fc1955eec3c6 --- /dev/null +++ b/development/webpack/test/helpers.test.ts @@ -0,0 +1,371 @@ +import fs from 'node:fs'; +import { describe, it, afterEach, beforeEach, mock } from 'node:test'; +import assert from 'node:assert'; +import { join } from 'node:path'; +import { + version, + type Chunk, + type Stats, + type Compilation, + type StatsOptions, + type StatsCompilation, +} from 'webpack'; +import * as helpers from '../utils/helpers'; +import { type Combination, generateCases } from './helpers'; + +describe('./utils/helpers.ts', () => { + afterEach(() => mock.restoreAll()); + + it('should return undefined when noop it called', () => { + const nothing = helpers.noop(); + assert.strictEqual(nothing, undefined); + }); + + it('should return all entries listed in the manifest and file system for manifest_version 2', () => { + const originalReaddirSync = fs.readdirSync; + const otherHtmlEntries = ['one.html', 'two.html']; + const appRoot = ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + mock.method(fs, 'readdirSync', function (path: string, options: any) { + if (path === appRoot) { + return [...otherHtmlEntries, 'three.not-html']; + } + return originalReaddirSync.call(fs, path, options); + }); + + const manifest = { + manifest_version: 2, + background: { + scripts: ['background.js'], + page: 'background.html', + }, + browser_action: { + // use one from `otherHtmlEntries`, to ensure we don't duplicate things + default_popup: otherHtmlEntries[0], + }, + // images/test.ing.png will be omitted from entry points + web_accessible_resources: ['images/test.ing.png', 'testing.js'], + content_scripts: [ + { + matches: ['file://*/*', 'http://*/*', 'https://*/*'], + js: ['scripts/contentscript.js', 'scripts/inpage.js'], + run_at: 'document_start', + all_frames: true, + }, + { + matches: ['*://connect.trezor.io/*/popup.html'], + js: ['vendor/trezor/content-script.js'], + }, + ], + } as helpers.ManifestV2; + const { entry, canBeChunked } = helpers.collectEntries(manifest, appRoot); + const expectedScripts = { + 'background.js': { + chunkLoading: false, + filename: 'background.js', + import: join(appRoot, `background.js`), + }, + 'scripts/contentscript.js': { + chunkLoading: false, + filename: 'scripts/contentscript.js', + import: join(appRoot, `scripts/contentscript.js`), + }, + 'scripts/inpage.js': { + chunkLoading: false, + filename: 'scripts/inpage.js', + import: join(appRoot, `/scripts/inpage.js`), + }, + 'vendor/trezor/content-script.js': { + chunkLoading: false, + filename: 'vendor/trezor/content-script.js', + import: join(appRoot, `vendor/trezor/content-script.js`), + }, + 'testing.js': { + chunkLoading: false, + filename: 'testing.js', + import: join(appRoot, `testing.js`), + }, + }; + const expectedHtml = { + background: join(appRoot, `background.html`), + one: join(appRoot, `one.html`), + two: join(appRoot, `two.html`), + // notice: three.not-html is NOT included, since it doesn't have an `.html` extension + }; + const expectedEntries = { ...expectedScripts, ...expectedHtml }; + assert.deepStrictEqual(entry, expectedEntries); + + const jsFiles = Object.keys(entry).filter((key) => key.endsWith('.js')); + assert(jsFiles.length > 0, "JS files weren't found in the manifest"); + jsFiles.forEach((name) => { + assert.strictEqual(canBeChunked({ name } as Chunk), false); + }); + + // scripts that are *not* in our manifest *can* be chunked + assert.strictEqual(canBeChunked({ name: 'anything.js' } as Chunk), true); + }); + + it('should return all entries listed in the manifest and file system for manifest_version 3', () => { + const originalReaddirSync = fs.readdirSync; + const otherHtmlEntries = ['one.html', 'two.html']; + const appRoot = ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + mock.method(fs, 'readdirSync', (path: string, options: any) => { + if (path === appRoot) { + return [...otherHtmlEntries, 'three.not-html']; + } + return originalReaddirSync.call(fs, path, options); + }); + + const manifest = { + name: 'MetaMask', + version: '1.0.0', + manifest_version: 3, + background: { + service_worker: 'background.js', + }, + web_accessible_resources: [ + { + matches: [''], + // images/test.ing.png will be omitted from entry points + resources: ['images/test.ing.png', 'testing.js'], + }, + ], + browser_action: { + // use one from `otherHtmlEntries`, to ensure we don't duplicate things + default_popup: otherHtmlEntries[0], + }, + content_scripts: [ + { + matches: ['file://*/*', 'http://*/*', 'https://*/*'], + js: ['scripts/contentscript.js'], + run_at: 'document_start', + all_frames: true, + }, + { + matches: ['*://connect.trezor.io/*/popup.html'], + js: ['vendor/trezor/content-script.js'], + }, + ], + } as helpers.ManifestV3; + const { entry, canBeChunked } = helpers.collectEntries(manifest, appRoot); + const expectedScripts = { + 'scripts/contentscript.js': { + chunkLoading: false, + filename: 'scripts/contentscript.js', + import: join(appRoot, `scripts/contentscript.js`), + }, + 'vendor/trezor/content-script.js': { + chunkLoading: false, + filename: 'vendor/trezor/content-script.js', + import: join(appRoot, `vendor/trezor/content-script.js`), + }, + 'background.js': { + chunkLoading: false, + filename: 'background.js', + import: join(appRoot, `background.js`), + }, + 'testing.js': { + chunkLoading: false, + filename: 'testing.js', + import: join(appRoot, `testing.js`), + }, + }; + const expectedHtml = { + one: join(appRoot, `one.html`), + two: join(appRoot, `two.html`), + // notice: three.not-html is NOT included, since it doesn't have an `.html` extension + }; + const expectedEntries = { + ...expectedScripts, + ...expectedHtml, + }; + assert.deepStrictEqual(entry, expectedEntries); + + const jsFiles = Object.keys(entry).filter((key) => key.endsWith('.js')); + assert(jsFiles.length > 0, "JS files weren't found in the manifest"); + jsFiles.forEach((name) => { + assert.strictEqual(canBeChunked({ name } as Chunk), false); + }); + + // scripts that are *not* in our manifest *can* be chunked + assert.strictEqual(canBeChunked({ name: 'anything.js' } as Chunk), true); + }); + + it('should handle manifest.json files with empty sections', () => { + const originalReaddirSync = fs.readdirSync; + const appRoot = ''; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + mock.method(fs, 'readdirSync', (path: string, options: any) => { + if (path === appRoot) { + return []; + } + return originalReaddirSync.call(fs, path, options); + }); + + const manifestv2 = { + manifest_version: 2, + background: {}, + } as helpers.ManifestV2; + const { entry: entryv2 } = helpers.collectEntries(manifestv2, appRoot); + assert.deepStrictEqual(entryv2, {}); + + const manifestv3 = { + name: 'MetaMask', + version: '1.0.0', + manifest_version: 3, + background: {}, + } as helpers.ManifestV3; + const { entry: entryv3 } = helpers.collectEntries(manifestv3, appRoot); + assert.deepStrictEqual(entryv3, {}); + }); + + it('should throw if an entry file starts with an underscore', () => { + const manifest = { + manifest_version: 2, + background: { + page: '_badfile.html', + }, + } as helpers.ManifestV2; + assert.throws( + () => helpers.collectEntries(manifest, ''), + /Error: Invalid Entrypoint Filename Detected/u, + ); + }); + + describe('logStats', () => { + const getStatsMock = ( + stats: 'normal' | 'none', + mode: 'development' | 'production', + hasError: boolean, + hasWarning: boolean, + ) => { + return { + hash: 'test-hash', + toJson: null as unknown as () => StatsCompilation, + endTime: 1000, + startTime: 0, + hasErrors: mock.fn(() => hasError), + hasWarnings: mock.fn(() => hasWarning), + compilation: { + options: { + mode, + stats, + }, + compiler: { + name: 'test-compiler-name', + }, + } as Compilation, + toString: mock.fn((_?: unknown) => 'test-stats'), + } as const satisfies Stats; + }; + + it('should log nothing if err and stats are both not defined', () => { + const { mock: error } = mock.method(console, 'error', helpers.noop); + helpers.logStats(undefined, undefined); + assert.strictEqual(error.callCount(), 0, 'error should not be called'); + }); + + it('should log only the error when error and stats are provided', () => { + const stats = getStatsMock('normal', 'production', false, false); + const { mock: error } = mock.method(console, 'error', helpers.noop); + const errorToLog = new Error('test error'); + + // should only log the error, and nothing else + helpers.logStats(errorToLog, stats); + + assert.strictEqual(error.callCount(), 1, 'error should be called'); + assert.deepStrictEqual( + error.calls[0].arguments, + [errorToLog], + 'error should be logged', + ); + assert.strictEqual( + stats.toString.mock.callCount(), + 0, + 'stats.toString should not be called', + ); + }); + + const matrix = { + colorDepth: [undefined, 1, 4, 8, 24] as const, + level: ['normal', 'none'] as const, + env: ['development', 'production'] as const, + hasErrors: [true, false] as const, + hasWarnings: [true, false] as const, + }; + + generateCases(matrix).forEach(runTest); + + function runTest(settings: Combination) { + const { colorDepth, level, env, hasErrors, hasWarnings } = settings; + + let testHelpers: typeof import('../utils/helpers'); + const originalGetColorDepth = process.stderr.getColorDepth; + beforeEach(() => { + // getColorDepth is undefined sometimes, so we need to mock it like this + process.stderr.getColorDepth = ( + colorDepth ? mock.fn(() => colorDepth) : colorDepth + ) as (env?: object | undefined) => number; + + // helpers caches `getColorDepth` on initialization, so we need to a new + // one after we mock `getColorDepth`. + delete require.cache[require.resolve('../utils/helpers')]; + testHelpers = require('../utils/helpers'); + }); + + afterEach(() => { + process.stderr.getColorDepth = originalGetColorDepth; + }); + + it(`should log message when stats is "${level}" and env is "${env}", with errors: \`${hasErrors}\` and warnings: \`${hasWarnings}\``, () => { + const stats = getStatsMock(level, env, hasErrors, hasWarnings); + const { mock: error } = mock.method(console, 'error', testHelpers.noop); + + testHelpers.logStats(null, stats); // <- this is what we are testing + + assert.strictEqual(error.callCount(), 1, 'error should be called once'); + + let toStringOptions: StatsOptions | undefined; + if (level === 'normal') { + toStringOptions = { colors: testHelpers.colors }; + } else if (hasErrors || hasWarnings) { + toStringOptions = { + colors: testHelpers.colors, + preset: 'errors-warnings', + }; + } + if (toStringOptions) { + assert.strictEqual( + stats.toString.mock.callCount(), + 1, + 'stats.toString should be called once', + ); + assert.deepStrictEqual( + stats.toString.mock.calls[0].arguments, + [toStringOptions], + 'stats should be called with the colors option', + ); + assert.deepStrictEqual( + error.calls[0].arguments, + [stats.toString(toStringOptions)], + 'stats should be logged', + ); + } else { + assert.strictEqual( + stats.toString.mock.callCount(), + 0, + 'stats.toString should not be called', + ); + const colorFn = + env === 'production' ? testHelpers.toOrange : testHelpers.toPurple; + const name = colorFn(`🦊 ${stats.compilation.compiler.name}`); + const status = testHelpers.toGreen('successfully'); + const time = stats.endTime - stats.startTime; + const expectedMessage = `${name} (webpack ${version}) compiled ${status} in ${time} ms`; + assert.deepStrictEqual(error.calls[0].arguments, [expectedMessage]); + } + }); + } + }); +}); diff --git a/development/webpack/test/helpers.ts b/development/webpack/test/helpers.ts new file mode 100644 index 000000000000..b57ec7b46a6e --- /dev/null +++ b/development/webpack/test/helpers.ts @@ -0,0 +1,115 @@ +import { mock } from 'node:test'; +import { + sources, + type Compiler, + type Chunk, + type WebpackOptionsNormalized, + type Asset, + type Compilation, +} from 'webpack'; + +const { SourceMapSource, RawSource } = sources; + +type Assets = { [k: string]: unknown }; + +export type Combination = { + [P in keyof T]: T[P] extends readonly (infer U)[] ? U : never; +}; + +export function generateCases(obj: T): Combination[] { + return Object.entries(obj).reduce( + (acc, [key, value]) => { + return acc.flatMap((cases) => + value.map((cas: unknown) => ({ ...cases, [key]: cas })), + ); + }, + [{} as Combination], + ); +} + +export function mockWebpack( + files: string[], + contents: (string | Buffer)[], + maps: (string | null)[], + devtool: 'source-map' | 'hidden-source-map' | false = 'source-map', +) { + const assets = files.reduce((acc, name, i) => { + const source = contents[i]; + const map = maps?.[i]; + const webpackSource = map + ? new SourceMapSource(source, name, map) + : new RawSource(source); + acc[name] = { + name, + info: { + size: webpackSource.size(), + }, + source: webpackSource, + }; + return acc; + }, {} as Record); + let done: () => void; + const promise = new Promise((resolve) => { + done = resolve; + }); + const compilation = { + get assets() { + return Object.fromEntries( + Object.entries(assets).map(([name, asset]) => [name, asset.source]), + ); + }, + emitAsset: mock.fn((name, source, info) => { + assets[name] = { + name, + info, + source, + }; + }), + options: { + devtool, + } as unknown as WebpackOptionsNormalized, + chunks: new Set([ + { + files: new Set(Object.keys(assets)), + } as Chunk, + ]), + getAsset: mock.fn((name) => assets[name]), + updateAsset: mock.fn( + (name: string, fn: (source: sources.Source) => sources.Source) => { + return fn(assets[name].source); + }, + ), + deleteAsset: mock.fn((name: string) => { + delete assets[name]; + }), + hooks: { + processAssets: { + async tapPromise(_: unknown, fn: (assets: Assets) => Promise) { + await fn(compilation.assets); + done(); + }, + tap(_: unknown, fn: (assets: Assets) => void) { + fn(compilation.assets); + done(); + }, + }, + }, + }; + const compiler = { + hooks: { + compilation: { + tap(_: unknown, fn: (compilation: Compilation) => void) { + fn(compilation as unknown as Compilation); + }, + }, + }, + webpack: { + sources: { SourceMapSource, RawSource }, + }, + } as Compiler; + return { + compiler, + compilation: compilation as Compilation & typeof compilation, + promise, + }; +} diff --git a/development/webpack/test/loaders.codeFenceLoader.test.ts b/development/webpack/test/loaders.codeFenceLoader.test.ts new file mode 100644 index 000000000000..a8a4dda59508 --- /dev/null +++ b/development/webpack/test/loaders.codeFenceLoader.test.ts @@ -0,0 +1,100 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import { LoaderContext } from 'webpack'; +import { FeatureLabels } from '@metamask/build-utils'; +import codeFenceLoader, { + getCodeFenceLoader, + CodeFenceLoaderOptions, +} from '../utils/loaders/codeFenceLoader'; + +describe('codeFenceLoader', () => { + type CallbackArgs = Parameters< + LoaderContext['callback'] + >; + + function generateData({ omitFeature }: { omitFeature: boolean }) { + const featureLabel = 'feature-label'; + const fencedSource = `///: BEGIN:ONLY_INCLUDE_IF(${featureLabel}) +console.log('I am Groot.'); +///: END:ONLY_INCLUDE_IF`; + const source = ` +console.log('I am Groot.'); +${fencedSource} +console.log('I am Groot.'); +`; + const expected = omitFeature + ? source.replace(`${fencedSource}\n`, '') + : source; + + let resolveCallback: (value: CallbackArgs) => void; + const mockContext = { + getOptions: () => { + return { + features: { + active: new Set(omitFeature ? [] : [featureLabel]), + all: new Set([featureLabel]), + }, + }; + }, + resourcePath: '', + callback: (...args: CallbackArgs) => resolveCallback(args), + } as unknown as LoaderContext; + const deferredPromise = new Promise((resolve) => { + resolveCallback = resolve; + }); + mockContext.callback = mockContext.callback.bind(mockContext); + return { context: mockContext, source, expected, deferredPromise }; + } + + [false, true].forEach((omitFeature) => { + it(`should ${omitFeature ? '' : 'not '}remove source when feature is ${ + omitFeature ? 'not ' : '' + }active`, async () => { + const data = generateData({ omitFeature }); + const returnValue = codeFenceLoader.call(data.context, data.source); + + assert.strictEqual(returnValue, undefined, 'should return undefined'); + const [err, content] = await data.deferredPromise; + assert.strictEqual(err, null); + assert.strictEqual(content, data.expected); + }); + }); + + it('should throw an error when options are invalid', () => { + const data = generateData({ omitFeature: false }); + data.context.getOptions = () => { + // invalid options + return {} as unknown as CodeFenceLoaderOptions; + }; + assert.throws( + () => codeFenceLoader.call(data.context, data.source), + /Invalid configuration object/u, + ); + }); + + it('should return an error when code fences are invalid', async () => { + const data = generateData({ omitFeature: false }); + data.source = '///: BEGIN:ONLY_INCLUDE_IF\nconsole.log("I am Groot.");\n'; // invalid because there is no end comment + const returnValue = codeFenceLoader.call(data.context, data.source); + assert.strictEqual(returnValue, undefined, 'should return undefined'); + const [err, content] = await data.deferredPromise; + assert(err); + assert.deepStrictEqual( + err.message, + 'Invalid code fence parameters in file "":\nNo parameters specified.', + ); + assert.strictEqual(content, undefined); + }); + + describe('getCodeFenceLoader', () => { + it('should return a loader with correct properties', () => { + const features: FeatureLabels = { active: new Set(), all: new Set() }; + const result = getCodeFenceLoader(features); + + assert.deepStrictEqual(result, { + loader: require.resolve('../utils/loaders/codeFenceLoader'), + options: { features }, + }); + }); + }); +}); diff --git a/development/webpack/test/loaders.swcLoader.test.ts b/development/webpack/test/loaders.swcLoader.test.ts new file mode 100644 index 000000000000..4e2e787c0e73 --- /dev/null +++ b/development/webpack/test/loaders.swcLoader.test.ts @@ -0,0 +1,132 @@ +import { describe, it, afterEach } from 'node:test'; +import assert from 'node:assert'; +import { LoaderContext } from 'webpack'; +import swcLoader, { + type SwcLoaderOptions, + type SwcConfig, +} from '../utils/loaders/swcLoader'; +import { Combination, generateCases } from './helpers'; + +describe('swcLoader', () => { + type CallbackArgs = Parameters['callback']>; + + function generateData() { + const source = ` export function hello(message: string) { + console.log(message) +}; `; + const expected = `export function hello(message) { + console.log(message); +} +`; + + // swc doesn't use node's fs module, so we can't mock + const resourcePath = 'test.ts'; + + let resolveCallback: (value: CallbackArgs) => void; + const mockContext = { + mode: 'production', + sourceMap: true, + getOptions: () => { + return {}; + }, + resourcePath, + async: () => { + return (...args: CallbackArgs) => { + resolveCallback(args); + }; + }, + } as unknown as LoaderContext; + const deferredPromise = new Promise((resolve) => { + resolveCallback = resolve; + }); + mockContext.async = mockContext.async.bind(mockContext); + return { context: mockContext, source, expected, deferredPromise }; + } + + it('should transform code', async () => { + const { context, source, deferredPromise, expected } = generateData(); + const returnValue = swcLoader.call(context, source); + + assert.strictEqual(returnValue, undefined, 'should return undefined'); + const [err, content, map] = await deferredPromise; + assert.strictEqual(err, null); + assert.strictEqual(content, expected); + const mapObj = JSON.parse(map as string); + assert.deepStrictEqual(mapObj.sources, [context.resourcePath]); + }); + + it('should throw an error when options are invalid', () => { + const { context, source } = generateData(); + context.getOptions = () => { + return { + invalid: true, + } as unknown as SwcLoaderOptions; + }; + assert.throws( + () => swcLoader.call(context, source), + /[ValidationError]: Invalid configuration object/u, + ); + }); + + it('should return an error when code is invalid', async () => { + const { context, deferredPromise } = generateData(); + const brokenSource = 'this is not real code;'; + swcLoader.call(context, brokenSource); + const [err, content, map] = await deferredPromise; + assert(err); + assert.match(err.message, /Syntax Error/u); + assert.strictEqual(content, undefined); + assert.strictEqual(map, undefined); + }); + + describe('getSwcLoader', () => { + const matrix = { + syntax: ['typescript', 'ecmascript'] as const, + enableJsx: [true, false] as const, + watch: [true, false] as const, + isDevelopment: [true, false] as const, + }; + generateCases(matrix).forEach(runTest); + + type TestCase = Combination; + + afterEach(() => { + delete process.env.__HMR_READY__; + }); + function runTest({ syntax, enableJsx, watch, isDevelopment }: TestCase) { + it(`should return a loader with correct properties when syntax is ${syntax}, jsx is ${enableJsx}, watch is ${watch}, and isDevelopment is ${isDevelopment}`, () => { + process.env.__HMR_READY__ = 'true'; + // helpers caches `__HMR_READY__` on initialization, so we need to a new + // one after we mock `process.env.__HMR_READY__`. + delete require.cache[require.resolve('../utils/helpers')]; + delete require.cache[require.resolve('../utils/loaders/swcLoader')]; + const { + getSwcLoader, + }: typeof import('../utils/loaders/swcLoader') = require('../utils/loaders/swcLoader'); + + // note: this test isn't exhaustive of all possible `swcConfig` + // properties; it is mostly intended as sanity check. + const swcConfig: SwcConfig = { + args: { watch }, + safeVariables: {}, + browsersListQuery: '', + isDevelopment, + }; + + const loader = getSwcLoader(syntax, enableJsx, swcConfig); + assert.strictEqual( + loader.loader, + require.resolve('../utils/loaders/swcLoader'), + ); + assert.deepStrictEqual(loader.options.jsc.parser, { + syntax, + [syntax === 'typescript' ? 'tsx' : 'jsx']: enableJsx, + }); + assert.deepStrictEqual(loader.options.jsc.transform.react, { + development: isDevelopment, + refresh: isDevelopment && watch, + }); + }); + } + }); +}); diff --git a/development/webpack/test/plugins.ManifestPlugin.test.ts b/development/webpack/test/plugins.ManifestPlugin.test.ts new file mode 100644 index 000000000000..ff14904bb8d6 --- /dev/null +++ b/development/webpack/test/plugins.ManifestPlugin.test.ts @@ -0,0 +1,284 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import { join } from 'node:path'; +import { type Compilation } from 'webpack'; +import { ManifestPlugin } from '../utils/plugins/ManifestPlugin'; +import { ZipOptions } from '../utils/plugins/ManifestPlugin/types'; +import { Manifest } from '../utils/helpers'; +import { transformManifest } from '../utils/plugins/ManifestPlugin/helpers'; +import { generateCases, type Combination, mockWebpack } from './helpers'; + +describe('ManifestPlugin', () => { + describe('Plugin', () => { + const matrix = { + zip: [true, false], + files: [ + [ + { + // will be compressed + name: 'filename.js', + source: Buffer.from('console.log(1 + 2);', 'utf8'), + }, + ], + [ + { + // will be compressed + name: 'filename.js', + source: Buffer.from('console.log(1 + 2);', 'utf8'), + }, + { + // will be omitted + name: 'filename.js.map', + source: Buffer.alloc(0), + }, + { + // will not be compressed + name: 'pixel.png', + source: Buffer.from([ + 137, 80, 78, 71, 13, 10, 26, 10, 0, 0, 0, 13, 73, 72, 68, 82, 0, + 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 55, 110, 249, 36, 0, 0, 0, 10, + 73, 68, 65, 84, 120, 1, 99, 96, 0, 0, 0, 2, 0, 1, 115, 117, 1, 24, + 0, 0, 0, 0, 73, 69, 78, 68, 174, 66, 96, 130, + ]), + }, + ], + ], + browsers: [['chrome', 'firefox'], ['chrome']] as const, + fixture: ['empty', 'complex'], + description: [null, 'description'], + manifestVersion: [2, 3] as const, + webAccessibleResources: [undefined, ['filename.map.js']], + }; + generateCases(matrix).forEach(runTest); + + type TestCase = Combination; + + function runTest(testCase: TestCase) { + const { + browsers, + fixture, + files, + description, + manifestVersion, + webAccessibleResources, + zip, + } = testCase; + const context = join(__dirname, `fixtures/ManifestPlugin/${fixture}`); + const baseManifest = require(join( + context, + `manifest/v${manifestVersion}`, + '_base.json', + )); + const expectedAssets = getExpectedAssets(zip, browsers, files); + const validateManifest = getValidateManifest(testCase, baseManifest); + + it(`should produce a ${ + zip ? 'zip file' : 'folder' + } for browsers [${browsers.join( + ', ', + )}] using the v${manifestVersion} "${fixture}" manifest, including files [${files + .map((file) => file.name) + .join(', ')}], ${ + description ? 'a description' : 'no description' + }, and ${ + webAccessibleResources ? webAccessibleResources.length : 0 + } web_accessible_resources`, async () => { + const { compiler, compilation, promise } = mockWebpack( + files.map(({ name }) => name), + files.map(({ source }) => source), + files.map(() => null), + ); + compilation.options.context = context; + const manifestPlugin = new ManifestPlugin({ + browsers, + manifest_version: manifestVersion, + version: '1.0.0.0', + versionName: '1.0.0', + description, + web_accessible_resources: webAccessibleResources, + ...getZipOptions(zip), + }); + manifestPlugin.apply(compiler); + await promise; + + assert.deepStrictEqual(Object.keys(compilation.assets), expectedAssets); + validateManifest(compilation as unknown as Compilation); + }); + } + + function getZipOptions( + zip: boolean, + ): ({ zip: true } & ZipOptions) | { zip: false } { + if (zip) { + return { + zip: true, + zipOptions: { + level: 0, + mtime: 1711141205825, + excludeExtensions: ['.map'], + outFilePath: '[browser]/extension.zip', + }, + }; + } + return { + zip: false, + }; + } + + function getExpectedAssets( + zip: boolean, + browsers: readonly string[], + files: { name: string }[], + ) { + const assets: string[] = []; + if (zip) { + browsers.forEach((browser) => { + assets.push(`${browser}/extension.zip`); + }); + } + browsers.forEach((browser) => { + assets.push(`${browser}/manifest.json`); + assets.push(...files.map(({ name }) => `${browser}/${name}`)); + }); + return [...new Set(assets)]; // unique + } + function getValidateManifest(testCase: TestCase, baseManifest: Manifest) { + // Handle case when the output is a zip file + if (testCase.zip) { + return () => { + // Assume the validation is successful, as unzipping and checking contents is skipped + assert.ok(true, 'Zip file creation assumed successful.'); + }; + } + + // Common validation for non-zip outputs, applicable to both manifest versions 2 and 3 + return (compilation: Compilation) => { + testCase.browsers.forEach((browser) => { + const manifest = compilation.assets[`${browser}/manifest.json`]; + const json = JSON.parse(manifest.source().toString()) as Manifest; + + // Validate description, if applicable + if (testCase.description) { + assert( + json.description, + "should have a 'description' in the manifest", + ); + const descMessage = `should have the correct description in ${browser} manifest`; + assert( + json.description.endsWith(testCase.description), + descMessage, + ); + } + + // Validate web accessible resources + let expectedWar: Manifest['web_accessible_resources']; + if (testCase.webAccessibleResources) { + if (baseManifest.manifest_version === 3) { + // Extend expected resources for manifest version 3 + expectedWar = baseManifest.web_accessible_resources || []; + expectedWar = [ + { + // the manifest plugin only supports `` for manifest version 3 + // so we don't test other `matches`. + matches: [''], + resources: [ + ...(expectedWar[0]?.resources || []), + ...testCase.webAccessibleResources, + ], + }, + ]; + } else { + expectedWar = baseManifest.web_accessible_resources || []; + // Keep or extend expected resources for manifest version 2 + expectedWar = [ + ...expectedWar, + ...testCase.webAccessibleResources, + ]; + } + } else { + expectedWar = baseManifest.web_accessible_resources || []; + } + + assert.deepStrictEqual( + json.web_accessible_resources || [], + expectedWar, + "should have the correct 'web_accessible_resources' in the manifest", + ); + }); + }; + } + }); + + describe('should transform the manifest object', () => { + const keep = ['scripts/contentscript.js', 'scripts/inpage.js']; + const argsMatrix = { + lockdown: [true, false], + test: [true, false], + }; + const manifestMatrix = { + content_scripts: [ + undefined, + [], + [{ js: [...keep] }], + [{ js: ['lockdown.js', ...keep] }], + ], + permissions: [undefined, [], ['tabs'], ['something']], + }; + generateCases(argsMatrix).forEach(setupTests); + + function setupTests(args: Combination) { + generateCases(manifestMatrix).forEach(runTest); + + function runTest(baseManifest: Combination) { + const manifest = baseManifest as unknown as chrome.runtime.Manifest; + const hasTabsPermission = (manifest.permissions || []).includes('tabs'); + const transform = transformManifest(args); + + if (args.test && hasTabsPermission) { + it("throws in test mode when manifest already contains 'tabs' permission", () => { + assert(transform, 'transform should be truthy'); + const p = () => { + transform(manifest, 'chrome'); + }; + assert.throws( + p, + /manifest contains 'tabs' already; this transform should be removed./u, + 'should throw when manifest contains tabs already', + ); + }); + } else if (!args.lockdown || args.test) { + it(`works for args.test of ${args.test} and args.lockdown of ${ + args.lockdown + }. Manifest: ${JSON.stringify(manifest)}`, () => { + assert(transform, 'transform should be truthy'); + const transformed = transform(manifest, 'chrome'); + if (args.lockdown) { + assert.deepStrictEqual( + transformed.content_scripts, + manifest.content_scripts, + 'nothing should change in lockdown mode', + ); + } else { + const stripped = manifest.content_scripts?.[0]?.js?.filter( + (js) => js !== 'lockdown.js', + ); + assert.deepStrictEqual( + transformed.content_scripts?.[0]?.js, + stripped, + 'lockdown.js should be removed when not in lockdown mode.', + ); + } + + if (args.test) { + assert.deepStrictEqual( + transformed.permissions, + [...(manifest.permissions || []), 'tabs'], + "manifest should have 'tabs' permission", + ); + } + }); + } + } + } + }); +}); diff --git a/development/webpack/test/plugins.SelfInjectPlugin.test.ts b/development/webpack/test/plugins.SelfInjectPlugin.test.ts new file mode 100644 index 000000000000..3a3ef729eacf --- /dev/null +++ b/development/webpack/test/plugins.SelfInjectPlugin.test.ts @@ -0,0 +1,95 @@ +import { describe, it } from 'node:test'; +import assert from 'node:assert'; +import { SelfInjectPlugin } from '../utils/plugins/SelfInjectPlugin'; +import { generateCases, type Combination, mockWebpack } from './helpers'; + +describe('SelfInjectPlugin', () => { + const matrix = { + test: [/\.js$/u, /\.ts$/u] as const, + filename: ['file.js', 'file.ts'], + source: ['console.log(3);'], + // sourceMap generated via https://www.digitalocean.com/community/tools/minify + map: [ + null, + '{"version":3,"file":"file.js","names":["console","log"],"sources":["0"],"mappings":"AAAAA,QAAQC,IAAI"}', + ], + devtool: ['source-map', 'hidden-source-map', false] as const, + }; + + generateCases(matrix).forEach(runTest); + + function runTest({ + test, + filename, + source, + map, + devtool, + }: Combination) { + it(`should produce valid output when test is ${test}, filename is ${filename}, map is ${ + map ? 'available' : 'missing' + }, and devtool is ${devtool}`, () => { + const { compiler, compilation } = mockWebpack( + [filename], + [source], + [map], + devtool, + ); + + const plugin = new SelfInjectPlugin({ test }); + plugin.apply(compiler); + + if (filename.match(test)) { + // we should have matched our file so it should have been updated: + + assert.strictEqual(compilation.updateAsset.mock.callCount(), 1); + const newAsset = compilation.updateAsset.mock.calls[0].result; + assert(newAsset, 'newAsset should be defined'); + const { source: newSource, map: newMap } = newAsset.sourceAndMap(); + + // `newMap` should be `null` here, because the file has been transformed + // to be self-injecting, so there is no way to map it anymore. + assert.strictEqual(newMap, null); + + if (map !== null && devtool === 'source-map') { + // if we have a map and devtool is `source-map` the newSource should + // reference the `sourceMappingURL` + assert.strictEqual( + newSource, + `{let d=document,s=d.createElement('script');s.textContent="${source}\\n//# sourceMappingURL=${filename}.map"+\`\\n//# sourceURL=\${(globalThis.browser||chrome).runtime.getURL("${filename}")};\`;d.documentElement.appendChild(s).remove()}`, + ); + } else { + // the new source should NOT reference the new sourcemap, since it's + // "hidden" (or we aren't generating source maps at all). Notice that + // we DO still include `sourceURL`, as this aids in debugging + // (and development) gives the injected source a name that will show + // in the console if the source throws an exception or logs to the + // console. + assert.strictEqual( + newSource, + `{let d=document,s=d.createElement('script');s.textContent="console.log(3);"+\`\\n//# sourceURL=\${(globalThis.browser||chrome).runtime.getURL("${filename}")};\`;d.documentElement.appendChild(s).remove()}`, + ); + } + + if (map) { + // If we provided a `map` the source map should have been emitted as + // a separate asset. Note that this happens even when devtool is set + // to `false`, as this means the map file already existed and we + // should not remove it (we don't care how it got there). The devtool + // directive is about whether to generate a new map, not whether to + // emit an existing one. + assert.strictEqual(compilation.emitAsset.mock.callCount(), 1); + const [sourceMapFilename, sourceMapSource] = + compilation.emitAsset.mock.calls[0].arguments; + assert.strictEqual(sourceMapFilename, `${filename}.map`); + assert.strictEqual(sourceMapSource.source(), map); + } + } else { + // we should not have matched our file so there should be no changes + assert.strictEqual(compilation.updateAsset.mock.callCount(), 0); + + // and no new assets should have been emitted + assert.strictEqual(compilation.emitAsset.mock.callCount(), 0); + } + }); + } +}); diff --git a/development/webpack/test/version.test.ts b/development/webpack/test/version.test.ts new file mode 100644 index 000000000000..14a98939df98 --- /dev/null +++ b/development/webpack/test/version.test.ts @@ -0,0 +1,118 @@ +import { describe, it, before } from 'node:test'; +import assert from 'node:assert'; +import { getExtensionVersion } from '../utils/version'; + +describe('getMetaMaskVersion', () => { + const MIN_ID = 10; + const MAX_ID = 64; + const MIN_RELEASE = 0; + const MAX_RELEASE = 999; + + describe('exceptions', () => { + it(`should throw for build with negative id (-1)`, () => { + const test = () => getExtensionVersion('main', { id: -1 }, 0); + assert.throws(test); + }); + + it('should throw for build with an invalid id (0)', () => { + const test = () => getExtensionVersion('main', { id: 0 }, 0); + assert.throws(test); + }); + + it(`should throw for build with an invalid id (${MIN_ID - 1})`, () => { + const test = () => getExtensionVersion('main', { id: MIN_ID - 1 }, 0); + assert.throws(test); + }); + + it(`should throw for build with invalid id (${MAX_ID + 1})`, () => { + const test = () => getExtensionVersion('main', { id: MAX_ID + 1 }, 0); + assert.throws(test); + }); + + it('should throw when computing the version for build with prerelease implicitly disallowed, release version: 1', () => { + const test = () => getExtensionVersion('main', { id: 10 }, 1); + assert.throws(test); + }); + + it('should throw when computing the version for build with prerelease explicitly disallowed, release version: 1', () => { + const test = () => + getExtensionVersion('main', { id: 10, isPrerelease: false }, 1); + assert.throws(test); + }); + + it(`should throw when computing the version for build with prerelease disallowed, release version: ${ + MAX_RELEASE + 1 + }`, () => { + const test = () => + getExtensionVersion('main', { id: 10 }, MAX_RELEASE + 1); + assert.throws(test); + }); + + it(`should throw for allowed prerelease, bad release version: ${ + MIN_RELEASE - 1 + }`, () => { + const test = () => + getExtensionVersion( + 'beta', + { id: 11, isPrerelease: true }, + MIN_RELEASE - 1, + ); + assert.throws(test); + }); + + it(`should throw when computing the version for allowed prerelease, bad release version: ${ + MAX_RELEASE + 1 + }`, () => { + const test = () => + getExtensionVersion( + 'beta', + { id: 11, isPrerelease: true }, + MAX_RELEASE + 1, + ); + assert.throws(test); + }); + }); + + describe('success', () => { + let pVersion: string; + before(() => { + pVersion = require('../../../package.json').version; + }); + + it(`for build with prerelease disallowed, id: ${MIN_ID}, release version: ${MIN_RELEASE}`, () => { + const mmVersion = getExtensionVersion( + 'main', + { id: MIN_ID }, + MIN_RELEASE, + ); + assert.deepStrictEqual(mmVersion, { + version: `${pVersion}.0`, + versionName: pVersion, + }); + }); + + it(`should return the computed version for allowed prerelease, id: ${MIN_ID}, release version: ${MIN_RELEASE}`, () => { + const mmVersion = getExtensionVersion( + 'beta', + { id: MIN_ID, isPrerelease: true }, + MIN_RELEASE, + ); + assert.deepStrictEqual(mmVersion, { + version: `${pVersion}.${MIN_ID}${MIN_RELEASE}`, + versionName: `${pVersion}-beta.${MIN_RELEASE}`, + }); + }); + + it(`should return the computed version for allowed prerelease, id: ${MAX_ID}, release version: ${MAX_RELEASE}`, () => { + const mmVersion = getExtensionVersion( + 'beta', + { id: MAX_ID, isPrerelease: true }, + MAX_RELEASE, + ); + assert.deepStrictEqual(mmVersion, { + version: `${pVersion}.${MAX_ID}${MAX_RELEASE}`, + versionName: `${pVersion}-beta.${MAX_RELEASE}`, + }); + }); + }); +}); diff --git a/development/webpack/test/webpack.config.test.ts b/development/webpack/test/webpack.config.test.ts new file mode 100644 index 000000000000..e1d11f953829 --- /dev/null +++ b/development/webpack/test/webpack.config.test.ts @@ -0,0 +1,307 @@ +import fs from 'node:fs'; +import { describe, it, afterEach, before, after, mock } from 'node:test'; +import assert from 'node:assert'; +import process from 'node:process'; +import { resolve } from 'node:path'; +import { + type Configuration, + webpack, + Compiler, + WebpackPluginInstance, +} from 'webpack'; +import { noop } from '../utils/helpers'; +import { ManifestPlugin } from '../utils/plugins/ManifestPlugin'; +import { getLatestCommit } from '../utils/git'; +import { ManifestPluginOptions } from '../utils/plugins/ManifestPlugin/types'; + +function getWebpackInstance(config: Configuration) { + // webpack logs a warning if we pass config.watch to it without a callback + // we don't want a callback because that will cause the build to run + // so we just delete the watch property. + delete config.watch; + return webpack(config); +} + +/** + * These tests are aimed at testing conditional branches in webpack.config.ts. + * These tests do *not* test the actual webpack build process itself, or that + * the parsed command line args are even valid. Instead, these tests ensure the + * branches of configuration options are reached and applied correctly. + */ + +describe('webpack.config.test.ts', () => { + let originalArgv: string[]; + let originalEnv: NodeJS.ProcessEnv; + const originalReadFileSync = fs.readFileSync; + before(() => { + // cache originals before we start messing with them + originalArgv = process.argv; + originalEnv = process.env; + }); + after(() => { + // restore originals for other tests + process.argv = originalArgv; + process.env = originalEnv; + }); + afterEach(() => { + // reset argv to avoid affecting other tests + process.argv = [process.argv0, process.argv[1]]; + // each test needs to load a fresh config, so we need to clear webpack's cache + // TODO: can we use `await import` instead to get a fresh copy each time? + const cliPath = require.resolve('../utils/cli.ts'); + const helpersPath = require.resolve('../utils/helpers.ts'); + const webpackConfigPath = require.resolve('../webpack.config.ts'); + delete require.cache[cliPath]; + delete require.cache[helpersPath]; + delete require.cache[webpackConfigPath]; + mock.restoreAll(); + }); + + function getWebpackConfig(args: string[] = [], env: NodeJS.ProcessEnv = {}) { + // argv is automatically read when webpack.config is required/imported. + // first two args are always ignored. + process.argv = [...process.argv.slice(0, 2), ...args]; + process.env = { ...env }; + mock.method(fs, 'readFileSync', (path: string, options?: null) => { + if (path === resolve(__dirname, '../../../.metamaskrc')) { + // mock `.metamaskrc`, as users might have customized it which may + // break our tests + return ` +${Object.entries(env) + .map(([key, value]) => `${key}=${value}`) + .join('\n')} +`; + } + return originalReadFileSync.call(fs, path, options); + }); + return require('../webpack.config.ts').default; + } + + it('should have the correct defaults', () => { + const config: Configuration = getWebpackConfig(); + // check that options are valid + const { options } = webpack(config); + assert.strictEqual(options.name, 'MetaMask – development'); + assert.strictEqual(options.mode, 'development'); + assert(options.cache); + assert.strictEqual(options.cache.type, 'filesystem'); + assert.strictEqual(options.devtool, 'source-map'); + const stats = options.stats as { preset: string }; + assert.strictEqual(stats.preset, 'none'); + const fallback = options.resolve.fallback as Record; + assert.strictEqual(typeof fallback['react-devtools'], 'string'); + assert.strictEqual(typeof fallback['remote-redux-devtools'], 'string'); + assert.strictEqual(options.optimization.minimize, false); + assert.strictEqual(options.optimization.sideEffects, false); + assert.strictEqual(options.optimization.providedExports, false); + assert.strictEqual(options.optimization.removeAvailableModules, false); + assert.strictEqual(options.optimization.usedExports, false); + assert.strictEqual(options.watch, false); + + const runtimeChunk = options.optimization.runtimeChunk as + | { + name?: (chunk: { name?: string }) => string | false; + } + | undefined; + assert(runtimeChunk); + assert(runtimeChunk.name); + assert(typeof runtimeChunk.name, 'function'); + assert.strictEqual( + runtimeChunk.name({ name: 'snow.prod' }), + false, + 'snow.prod should not be chunked', + ); + assert.strictEqual( + runtimeChunk.name({ name: 'use-snow' }), + false, + 'use-snow should not be chunked', + ); + assert.strictEqual( + runtimeChunk.name({ name: '< random >' }), + 'runtime', + 'other names should be chunked', + ); + assert.strictEqual( + runtimeChunk.name({}), + 'runtime', + 'chunks without a name name should be chunked', + ); + + const manifestPlugin = options.plugins.find( + (plugin) => plugin && plugin.constructor.name === 'ManifestPlugin', + ) as ManifestPlugin; + assert(manifestPlugin, 'Manifest plugin should be present'); + assert.deepStrictEqual(manifestPlugin.options.web_accessible_resources, [ + 'scripts/inpage.js.map', + 'scripts/contentscript.js.map', + ]); + assert.deepStrictEqual( + manifestPlugin.options.description, + `development build from git id: ${getLatestCommit().hash()}`, + ); + assert(manifestPlugin.options.transform); + assert.deepStrictEqual( + manifestPlugin.options.transform( + { + manifest_version: 3, + name: 'name', + version: '1.2.3', + content_scripts: [ + { + js: [ + 'ignored', + 'scripts/contentscript.js', + 'scripts/inpage.js', + 'ignored', + ], + }, + ], + }, + 'brave', + ), + { + manifest_version: 3, + name: 'name', + version: '1.2.3', + content_scripts: [ + { + js: ['scripts/contentscript.js', 'scripts/inpage.js'], + }, + ], + }, + ); + assert.strictEqual(manifestPlugin.options.zip, false); + const manifestOpts = manifestPlugin.options as ManifestPluginOptions; + assert.strictEqual(manifestOpts.zipOptions, undefined); + + const progressPlugin = options.plugins.find( + (plugin) => plugin && plugin.constructor.name === 'ProgressPlugin', + ); + assert(progressPlugin, 'Progress plugin should present'); + }); + + it('should apply non-default options', () => { + const removeUnsupportedFeatures = ['--no-lavamoat']; + const config: Configuration = getWebpackConfig( + [ + '--env', + 'production', + '--watch', + '--stats', + '--no-progress', + '--no-cache', + '--zip', + ...removeUnsupportedFeatures, + ], + { + INFURA_PROD_PROJECT_ID: '00000000000000000000000000000000', + SEGMENT_WRITE_KEY: '-', + SEGMENT_PROD_WRITE_KEY: '-', + }, + ); + // webpack logs a warning if we specify `watch: true`, `getWebpackInstance` + // removes the property, so we test it here instead + assert.strictEqual(config.watch, true); + + // check that options are valid + const instance: Compiler = getWebpackInstance(config); + assert.strictEqual(instance.options.name, 'MetaMask – production'); + assert.strictEqual(instance.options.mode, 'production'); + assert.ok(instance.options.cache); + assert.strictEqual(instance.options.cache.type, 'memory'); + assert.strictEqual(instance.options.devtool, 'hidden-source-map'); + const stats = instance.options.stats as { preset: string }; + assert.strictEqual(stats.preset, 'normal'); + const fallback = instance.options.resolve.fallback as Record; + assert.strictEqual(fallback['react-devtools'], false); + assert.strictEqual(fallback['remote-redux-devtools'], false); + assert.strictEqual(instance.options.optimization.minimize, true); + assert.strictEqual(instance.options.optimization.sideEffects, true); + assert.strictEqual(instance.options.optimization.providedExports, true); + assert.strictEqual( + instance.options.optimization.removeAvailableModules, + true, + ); + assert.strictEqual(instance.options.optimization.usedExports, true); + + const manifestPlugin = instance.options.plugins.find( + (plugin) => plugin && plugin.constructor.name === 'ManifestPlugin', + ) as WebpackPluginInstance; + assert.deepStrictEqual(manifestPlugin.options.web_accessible_resources, []); + assert.deepStrictEqual(manifestPlugin.options.description, null); + assert.deepStrictEqual(manifestPlugin.options.zip, true); + assert(manifestPlugin.options.zipOptions, 'Zip options should be present'); + assert.strictEqual(manifestPlugin.options.transform, undefined); + + const progressPlugin = instance.options.plugins.find( + (plugin) => plugin && plugin.constructor.name === 'ProgressPlugin', + ); + assert.strictEqual( + progressPlugin, + undefined, + 'Progress plugin should be absent', + ); + }); + + it('should allow disabling source maps', () => { + const config: Configuration = getWebpackConfig(['--devtool', 'none']); + // check that options are valid + const instance = getWebpackInstance(config); + assert.strictEqual(instance.options.devtool, false); + }); + + it('should write the `dry-run` message then call exit(0)', () => { + const exit = mock.method(process, 'exit', noop, { times: 1 }); + const error = mock.method(console, 'error', noop, { times: 1 }); + + // we don't care about the return value, just that it logs and calls `exit` + getWebpackConfig(['--dry-run']); + assert.strictEqual(error.mock.calls.length, 1); + assert.strictEqual(error.mock.calls[0].arguments.length, 1); + // we don't care about the message, just that it is logged + assert.strictEqual(typeof error.mock.calls[0].arguments[0], 'string'); + + assert.strictEqual(exit.mock.calls.length, 1); + assert.strictEqual(exit.mock.calls[0].arguments.length, 1); + assert.strictEqual(exit.mock.calls[0].arguments[0], 0); + }); + + it('should write the `dryRun` message then call exit(0)', () => { + const exit = mock.method(process, 'exit', noop, { times: 1 }); + const error = mock.method(console, 'error', noop, { times: 1 }); + + // we don't care about the return value, just that it logs and calls `exit` + getWebpackConfig(['--dryRun']); + assert.strictEqual(error.mock.calls.length, 1); + assert.strictEqual(error.mock.calls[0].arguments.length, 1); + // we don't care about the message, just that it is logged + assert.strictEqual(typeof error.mock.calls[0].arguments[0], 'string'); + + assert.strictEqual(exit.mock.calls.length, 1); + assert.strictEqual(exit.mock.calls[0].arguments.length, 1); + assert.strictEqual(exit.mock.calls[0].arguments[0], 0); + }); + + it('should enable ReactRefreshPlugin in a development env when `--watch` is specified', () => { + const config: Configuration = getWebpackConfig(['--watch'], { + __HMR_READY__: 'true', + }); + delete config.watch; + const instance = webpack(config); + const reactRefreshPlugin = instance.options.plugins.find( + (plugin) => plugin && plugin.constructor.name === 'ReactRefreshPlugin', + ); + assert(reactRefreshPlugin, 'ReactRefreshPlugin should be present'); + }); + + // these tests should be temporary until the below options are supported + const unsupportedOptions = [['--lavamoat'], ['--manifest_version', '3']]; + for (const args of unsupportedOptions) { + it(`should throw on unsupported option \`${args.join('=')}\``, () => { + assert.throws( + () => getWebpackConfig(args), + `Unsupported option: ${args.join(' ')}`, + ); + }); + } +}); diff --git a/development/webpack/types.ts b/development/webpack/types.ts new file mode 100644 index 000000000000..f2739861246b --- /dev/null +++ b/development/webpack/types.ts @@ -0,0 +1,34 @@ +import type { ChildProcess } from 'node:child_process'; +import { type Readable } from 'node:stream'; +import { type Socket } from 'node:net'; +import { type IPty } from '@lydell/node-pty'; + +/** + * A more complete type for the `node-pty` module's `IPty` interface + */ +export type PTY = IPty & { + master: Socket; + slave: Socket; +}; + +/** + * Node's ChildProcess type extended with `stderr` and `stdout`'s `unref` + * method, which is missing from the standard Node.js types. + */ +export type Child = ChildProcess & { + stderr: Readable & { unref: () => Readable }; + stdout: Readable & { unref: () => Readable }; +}; + +export type StdName = 'stdout' | 'stderr'; + +/** + * The control interface for a child process's stdio streams. + */ +export type Stdio = { + destroy: () => void; + listen: (child: Child) => void; + pty: Socket | 'pipe'; + resize: () => void; + unref: (child: Child) => void; +}; diff --git a/development/webpack/utils/cli.ts b/development/webpack/utils/cli.ts new file mode 100644 index 000000000000..05e18f8cf3b8 --- /dev/null +++ b/development/webpack/utils/cli.ts @@ -0,0 +1,391 @@ +/** + * @file This file contains the CLI parser for the webpack build script. + * It is responsible for parsing the command line arguments and returning a + * structured object representing the parsed arguments. + */ + +import type { Options as YargsOptions } from 'yargs'; +import yargs from 'yargs/yargs'; +import parser from 'yargs-parser'; +import { + Browsers, + type Manifest, + type Browser, + uniqueSort, + toOrange, +} from './helpers'; +import { type BuildConfig } from './config'; + +const ENV_PREFIX = 'BUNDLE'; +const addFeat = 'addFeature' as const; +const omitFeat = 'omitFeature' as const; +type YargsOptionsMap = { [key: string]: YargsOptions }; +type OptionsKeys = keyof Omit; + +/** + * Some options affect the default values of other options. + */ +const prerequisites = { + env: { + alias: 'e', + array: false, + default: 'development' as const, + description: 'Enables/disables production optimizations/development hints', + choices: ['development', 'production'] as const, + group: toOrange('Build options:'), + type: 'string', + }, + // `as const` makes it easier for developers to see the values of the type + // when hovering over it in their IDE. `satisfies Options` enables type + // checking, without loosing the `const` property of the values, which is + // necessary for yargs to infer the final types +} as const satisfies YargsOptionsMap; + +/** + * Parses the given args from `argv` and returns whether or not the requested + * build is a production build or not. + * + * @param argv + * @param opts + * @returns `true` if this is a production build, otherwise `false` + */ +function preParse( + argv: string[], + opts: typeof prerequisites, +): { env: 'production' | 'development' } { + const options: { [k: string]: { [k: string]: unknown } } = { + configuration: { + envPrefix: ENV_PREFIX, + }, + }; + // convert the `opts` object into a format that `yargs-parser` can understand + for (const [arg, val] of Object.entries(opts)) { + for (const [key, valEntry] of Object.entries(val)) { + if (!options[key]) { + options[key] = {}; + } + options[key][arg] = valEntry; + } + } + + const { env } = parser(argv, options); + return { env }; +} + +/** + * Type representing the parsed arguments + */ +export type Args = ReturnType['args']; +export type Features = ReturnType['features']; + +/** + * Parses an array of command line arguments into a structured format. + * + * @param argv - An array of command line arguments, excluding the program + * executable and file name. Typically used as + * `parseArgv(process.argv.slice(2))`. + * @param buildConfig - The build config. + * @param buildConfig.buildTypes - The build types. + * @param buildConfig.features - The features. + * @returns An object representing the parsed arguments. + */ +export function parseArgv( + argv: string[], + { buildTypes, features }: BuildConfig, +) { + const allBuildTypeNames = Object.keys(buildTypes); + const allFeatureNames = Object.keys(features); + + // args like `production` may change our CLI defaults, so we pre-parse them + const preconditions = preParse(argv, prerequisites); + const options = getOptions(preconditions, allBuildTypeNames, allFeatureNames); + const args = getCli(options, 'yarn webpack').parseSync(argv); + // the properties `$0` and `_` are added by yargs, but we don't need them. We + // transform `add` and `omit`, so we also remove them from the config object. + const { $0, _, addFeature: add, omitFeature: omit, ...config } = args; + + // set up feature flags + const active = new Set(); + const defaultFeaturesForBuildType = buildTypes[config.type].features ?? []; + const setActive = (f: string) => omit.includes(f) || active.add(f); + [defaultFeaturesForBuildType, add].forEach((feat) => feat.forEach(setActive)); + + const ignore = new Set(['$0', 'conf', 'progress', 'stats', 'watch']); + const cacheKey = Object.entries(args) + .filter(([key]) => key.length > 1 && !ignore.has(key) && !key.includes('-')) + .sort(([x], [y]) => x.localeCompare(y)); + return { + // narrow the `config` type to only the options we're returning + args: config as { [key in OptionsKeys]: (typeof config)[key] }, + cacheKey: JSON.stringify(cacheKey), + features: { + active, + all: new Set(allFeatureNames), + }, + }; +} + +/** + * Gets a yargs instance for parsing CLI arguments. + * + * @param options + * @param name + */ +function getCli(options: T, name: string) { + const cli = yargs() + // Ensure unrecognized commands/options are reported as errors. + .strict() + // disable yargs's version, as we use it ourselves + .version(false) + // use the scriptName in `--help` output + .scriptName(name) + // wrap output at a maximum of 120 characters or `process.stdout.columns` + .wrap(Math.min(120, process.stdout.columns)) + // enable the `--config` command, which allows the user to specify a custom + // config file containing webpack options + .config() + .parserConfiguration({ + 'strip-aliased': true, + 'strip-dashed': true, + }) + // enable ENV parsing, which allows the user to specify webpack options via + // environment variables prefixed with `BUNDLE_` + // TODO: choose a better name than `BUNDLE` (it looks like `MM` is already being used in CI for ✨something✨) + .env(ENV_PREFIX) + // TODO: enable completion once https://github.com/yargs/yargs/pull/2422 is released. + // enable the `completion` command, which outputs a bash completion script + // .completion( + // 'completion', + // 'Enable bash/zsh completions; concat the script generated by running this command to your .bashrc or .bash_profile', + // ) + .example( + '$0 --env development --browser brave --browser chrome --zip', + 'Builds the extension for development for Chrome & Brave; generate zip files for both', + ) + // TODO: enable completion once https://github.com/yargs/yargs/pull/2422 is released. + // .example( + // '$0 completion', + // `Generates a bash completion script for the \`${name}\` command`, + // ) + .updateStrings({ + 'Options:': toOrange('Options:'), + 'Examples:': toOrange('Examples:'), + }) + .options(options); + return cli; +} + +type Options = ReturnType; + +function getOptions( + { env }: ReturnType, + buildTypes: string[], + allFeatures: string[], +) { + const isProduction = env === 'production'; + const prodDefaultDesc = "If `env` is 'production', `true`, otherwise `false`"; + return { + watch: { + alias: 'w', + array: false, + default: false, + description: 'Build then watch for files changes', + group: toOrange('Developer assistance:'), + type: 'boolean', + }, + cache: { + alias: 'c', + array: false, + default: true, + description: 'Cache build for faster rebuilds', + group: toOrange('Developer assistance:'), + type: 'boolean', + }, + progress: { + alias: 'p', + array: false, + default: true, + description: 'Show build progress', + group: toOrange('Developer assistance:'), + type: 'boolean', + }, + devtool: { + alias: 'd', + array: false, + default: isProduction ? 'hidden-source-map' : 'source-map', + defaultDescription: + "If `env` is 'production', 'hidden-source-map', otherwise 'source-map'", + description: 'Sourcemap type to generate', + choices: ['none', 'source-map', 'hidden-source-map'] as const, + group: toOrange('Developer assistance:'), + type: 'string', + }, + sentry: { + array: false, + default: isProduction, + defaultDescription: prodDefaultDesc, + description: 'Enables/disables Sentry Application Monitoring', + group: toOrange('Developer assistance:'), + type: 'boolean', + }, + test: { + array: false, + default: false, + description: 'Enables/disables testing mode', + group: toOrange('Developer assistance:'), + type: 'boolean', + }, + + ...prerequisites, + zip: { + alias: 'z', + array: false, + default: false, + description: 'Generate a zip file of the build', + group: toOrange('Build options:'), + type: 'boolean', + }, + minify: { + alias: 'm', + array: false, + default: isProduction, + defaultDescription: "If `env` is 'production', `true`, otherwise `false`", + description: 'Minify the output', + group: toOrange('Build options:'), + type: 'boolean', + }, + browser: { + alias: 'b', + array: true, + choices: ['all', ...Browsers], + coerce: (browsers: (Browser | 'all')[]) => { + type OneOrMoreBrowsers = [Browser, ...Browser[]]; + // sort browser for determinism (important for caching) + const set = new Set(browsers.sort()); + return (set.has('all') ? [...Browsers] : [...set]) as OneOrMoreBrowsers; + }, + default: 'chrome', + description: 'Browsers to build for', + group: toOrange('Build options:'), + type: 'string', + }, + manifest_version: { + alias: 'v', + array: false, + choices: [2, 3] as Manifest['manifest_version'][], + default: 2 as Manifest['manifest_version'], + description: "Changes manifest.json format to the given version's schema", + group: toOrange('Build options:'), + type: 'number', + }, + releaseVersion: { + alias: 'r', + array: false, + default: 0, + description: + 'The (pre)release version of the extension, e.g., the `6` in `18.7.25-flask.6`.', + group: toOrange('Build options:'), + type: 'number', + }, + type: { + alias: 't', + array: false, + choices: ['none', ...buildTypes], + default: 'main' as const, + description: 'Configure features for the build (main, beta, etc)', + group: toOrange('Build options:'), + type: 'string', + }, + [addFeat]: { + alias: 'a', + array: true, + choices: allFeatures, + coerce: uniqueSort, + default: [] as typeof allFeatures, + description: 'Add features not be included in the selected build `type`', + group: toOrange('Build options:'), + type: 'string', + }, + [omitFeat]: { + alias: 'o', + array: true, + choices: allFeatures, + coerce: uniqueSort, + default: [] as typeof allFeatures, + description: 'Omit features included in the selected build `type`', + group: toOrange('Build options:'), + type: 'string', + }, + + lavamoat: { + alias: 'l', + array: false, + default: isProduction, + defaultDescription: prodDefaultDesc, + description: 'Apply LavaMoat to the build assets', + group: toOrange('Security:'), + type: 'boolean', + }, + lockdown: { + alias: 'k', + array: false, + default: isProduction, + defaultDescription: prodDefaultDesc, + description: 'Enable/disable runtime hardening (also see --snow)', + group: toOrange('Security:'), + type: 'boolean', + }, + snow: { + alias: 's', + array: false, + default: isProduction, + defaultDescription: prodDefaultDesc, + description: 'Apply Snow to the build assets', + group: toOrange('Security:'), + type: 'boolean', + }, + + dryRun: { + array: false, + default: false, + description: 'Outputs the config without building', + group: toOrange('Options:'), + type: 'boolean', + }, + stats: { + array: false, + default: false, + description: 'Display build stats after building', + group: toOrange('Options:'), + type: 'boolean', + }, + } as const satisfies YargsOptionsMap; +} + +/** + * Returns a string representation of the given arguments and features. + * + * @param args + * @param features + */ +export function getDryRunMessage(args: Args, features: Features) { + return `🦊 Build Config 🦊 + +Environment: ${args.env} +Minify: ${args.minify} +Watch: ${args.watch} +Cache: ${args.cache} +Progress: ${args.progress} +Zip: ${args.zip} +Snow: ${args.snow} +LavaMoat: ${args.lavamoat} +Lockdown: ${args.lockdown} +Manifest version: ${args.manifest_version} +Release version: ${args.releaseVersion} +Browsers: ${args.browser.join(', ')} +Devtool: ${args.devtool} +Build type: ${args.type} +Features: ${[...features.active].join(', ')} +Test: ${args.test} +`; +} diff --git a/development/webpack/utils/config.ts b/development/webpack/utils/config.ts new file mode 100644 index 000000000000..49f4f9e97ba4 --- /dev/null +++ b/development/webpack/utils/config.ts @@ -0,0 +1,221 @@ +import { join } from 'node:path'; +import { readFileSync } from 'node:fs'; +import { parse as parseYaml } from 'yaml'; +import { parse } from 'dotenv'; +import { setEnvironmentVariables } from '../../build/set-environment-variables'; +import type { Variables } from '../../lib/variables'; +import { type Args } from './cli'; +import { getExtensionVersion } from './version'; + +const BUILDS_YML_PATH = join(__dirname, '../../../builds.yml'); + +/** + * Coerce `"true"`, `"false"`, and `"null"` to their respective JavaScript + * values. Coerce the empty string (`""`) to `undefined`; + * + * @param value + * @returns + */ +function coerce(value: string) { + if (value === 'true') return true; + if (value === 'false') return false; + if (value === 'null') return null; + if (value === '') return null; + return value; +} + +/** + * @returns The definitions loaded from process.env. + */ +function loadEnv(): Map { + const definitions = new Map(); + Object.entries(process.env).forEach(([key, value]) => { + if (typeof value === 'undefined') return; + definitions.set(key, coerce(value)); + }); + return definitions; +} + +/** + * @param definitions + * @param rcFilePath - The path to the rc file. + */ +function addRc(definitions: Map, rcFilePath: string): void { + try { + const rc = parse(readFileSync(rcFilePath, 'utf8')); + Object.entries(rc).forEach(([key, value]) => { + if (definitions.has(key)) return; + definitions.set(key, coerce(value)); + }); + } catch { + // ignore + } +} + +/** + * Get the name for the current build. + * + * @param type + * @param build + * @param isDev + * @param args + */ +export function getBuildName( + type: string, + build: BuildType, + isDev: boolean, + args: Pick, +) { + const buildName = + build.buildNameOverride || + `MetaMask ${type.slice(0, 1).toUpperCase()}${type.slice(1)}`; + if (isDev) { + const mv3Str = args.manifest_version === 3 ? ' MV3' : ''; + const lavamoatStr = args.lavamoat ? ' lavamoat' : ''; + const snowStr = args.snow ? ' snow' : ''; + const lockdownStr = args.lockdown ? ' lockdown' : ''; + return `${buildName}${mv3Str}${lavamoatStr}${snowStr}${lockdownStr}`; + } + return buildName; +} + +/** + * Computes the `variables` (extension runtime's `process.env.*`). + * + * @param args + * @param args.type + * @param args.test + * @param args.env + * @param buildConfig + */ +export function getVariables( + { type, env, ...args }: Args, + buildConfig: BuildConfig, +) { + const activeBuild = buildConfig.buildTypes[type]; + const variables = loadConfigVars(activeBuild, buildConfig); + const version = getExtensionVersion(type, activeBuild, args.releaseVersion); + const isDevBuild = env === 'development'; + + function set(key: string, value: unknown): void; + function set(key: Record): void; + function set(key: string | Record, value?: unknown): void { + if (typeof key === 'object') { + Object.entries(key).forEach(([k, v]) => variables.set(k, v)); + } else { + variables.set(key, value); + } + } + + // use the gulp-build's function to set the environment variables + setEnvironmentVariables({ + buildName: getBuildName(type, activeBuild, isDevBuild, args), + buildType: type, + environment: env, + isDevBuild, + isTestBuild: args.test, + version: version.versionName, + variables: { + set, + isDefined(key: string): boolean { + return variables.has(key); + }, + get(key: string): unknown { + return variables.get(key); + }, + getMaybe(key: string): unknown { + return variables.get(key); + }, + } as Variables, + }); + + // variables that are used in the webpack build's entry points. Our runtime + // code checks for the _string_ `"true"`, so we cast to string here. + variables.set('ENABLE_SENTRY', args.sentry.toString()); + variables.set('ENABLE_SNOW', args.snow.toString()); + variables.set('ENABLE_LAVAMOAT', args.lavamoat.toString()); + variables.set('ENABLE_LOCKDOWN', args.lockdown.toString()); + + // convert the variables to a format that can be used by SWC, which expects + // values be JSON stringified, as it JSON.parses them internally. + const safeVariables: Record = {}; + variables.forEach((value, key) => { + if (value === null || value === undefined) return; + safeVariables[key] = JSON.stringify(value); + }); + + // special location for the PPOM_URI, as we don't want to copy the wasm file + // to the build directory like the gulp build does + variables.set( + 'PPOM_URI', + `new URL('@blockaid/ppom_release/ppom_bg.wasm', import.meta.url)`, + ); + // the `PPOM_URI` shouldn't be JSON stringified, as it's actually code + safeVariables.PPOM_URI = variables.get('PPOM_URI') as string; + + return { variables, safeVariables, version }; +} + +export type BuildType = { + id: number; + features?: string[]; + env?: (string | { [k: string]: unknown })[]; + isPrerelease?: boolean; + buildNameOverride?: string; +}; + +export type BuildConfig = { + buildTypes: Record; + env: (string | Record)[]; + features: Record< + string, + null | { env?: (string | { [k: string]: unknown })[] } + >; +}; + +/** + * + */ +export function getBuildTypes(): BuildConfig { + return parseYaml(readFileSync(BUILDS_YML_PATH, 'utf8')); +} + +/** + * Loads configuration variables from process.env, .metamaskrc, and build.yml. + * + * The order of precedence is: + * 1. process.env + * 2. .metamaskrc + * 3. build.yml + * + * i.e., if a variable is defined in `process.env`, it will take precedence over + * the same variable defined in `.metamaskrc` or `build.yml`. + * + * @param activeBuild + * @param build + * @param build.env + * @param build.features + * @returns + */ +function loadConfigVars( + activeBuild: Pick, + { env, features }: BuildConfig, +) { + const definitions = loadEnv(); + addRc(definitions, join(__dirname, '../../../.metamaskrc')); + addVars(activeBuild.env); + activeBuild.features?.forEach((feature) => addVars(features[feature]?.env)); + addVars(env); + + function addVars(pairs?: (string | Record)[]): void { + pairs?.forEach((pair) => { + if (typeof pair === 'string') return; + Object.entries(pair).forEach(([key, value]) => { + if (definitions.has(key)) return; + definitions.set(key, value); + }); + }); + } + + return definitions; +} diff --git a/development/webpack/utils/git.ts b/development/webpack/utils/git.ts new file mode 100644 index 000000000000..887db184ea01 --- /dev/null +++ b/development/webpack/utils/git.ts @@ -0,0 +1,60 @@ +import { spawnSync } from 'node:child_process'; +import { join, normalize } from 'node:path'; + +type Commit = { + hash: () => string; + timestamp: () => number; +}; + +/** + * Cache variable to store the most recent commit information. This is used to + * avoid repeated calls to the git command for performance optimization. + */ +const cache: Map = new Map(); + +/** + * Retrieves the most recent Git commit information, including its short hash + * and timestamp. If the information is cached, the cached value is returned to + * improve performance. Otherwise, it executes a git command to retrieve the + * latest commit's hash and timestamp, caches it, and then returns the + * information. + * + * @param gitDir + * @returns The latest commit's hash and timestamp. + */ +export function getLatestCommit( + gitDir: string = join(__dirname, '../../../.git'), +): Commit { + const cached = cache.get(gitDir); + if (cached) return cached; + + // execute the `git` command to get the latest commit's 8 character hash + // (`%h` and `--abbrev=8`) and authorship timestamp (seconds since the Unix + // epoch) + const hashLength = 8; + const args = [ + `--git-dir`, + normalize(gitDir), + 'log', + '-1', + '--format=%h%at', + `--abbrev=${hashLength}`, + ] as const; + const { stdout } = spawnSync('git', args, { + encoding: 'buffer', + env: process.env, + maxBuffer: 256, // we really only need like 19 bytes + stdio: ['ignore', 'pipe', 'ignore'], + }); + const response = { + hash() { + return stdout.toString('utf8', 0, hashLength); + }, + timestamp() { + // convert to milliseconds + return Number(stdout.toString('utf8', hashLength)) * 1000; + }, + }; + cache.set(gitDir, response); + return response; +} diff --git a/development/webpack/utils/helpers.ts b/development/webpack/utils/helpers.ts new file mode 100644 index 000000000000..2e7dc25b6da3 --- /dev/null +++ b/development/webpack/utils/helpers.ts @@ -0,0 +1,234 @@ +import { readdirSync } from 'node:fs'; +import { parse, join, relative, sep } from 'node:path'; +import type { Chunk, EntryObject, Stats } from 'webpack'; +import type TerserPluginType from 'terser-webpack-plugin'; + +export type Manifest = chrome.runtime.Manifest; +export type ManifestV2 = chrome.runtime.ManifestV2; +export type ManifestV3 = chrome.runtime.ManifestV3; + +// HMR (Hot Module Reloading) can't be used until all circular dependencies in +// the codebase are removed +// See: https://github.com/MetaMask/metamask-extension/issues/22450 +// TODO: remove this variable when HMR is ready. The env var is for tests and +// must also be removed everywhere. +export const __HMR_READY__ = Boolean(process.env.__HMR_READY__) || false; + +/** + * Target browsers + */ +export const Browsers = ['brave', 'chrome', 'firefox'] as const; +export type Browser = (typeof Browsers)[number]; + +const slash = `(?:\\${sep})?`; +/** + * Regular expression to match files in any `node_modules` directory + * Uses a platform-specific path separator: `/` on Unix-like systems and `\` on + * Windows. + */ +export const NODE_MODULES_RE = new RegExp(`${slash}node_modules${slash}`, 'u'); + +/** + * No Operation. A function that does nothing and returns nothing. + * + * @returns `undefined` + */ +export const noop = () => undefined; + +/** + * Collects all entry files for use with webpack. + * + * TODO: move this logic into the ManifestPlugin + * + * @param manifest - Base manifest file + * @param appRoot - Absolute directory to search for entry files listed in the + * base manifest + * @returns an `entry` object containing html and JS entry points for use with + * webpack, and an array, `manifestScripts`, list of filepaths of all scripts + * that were added to it. + */ +export function collectEntries(manifest: Manifest, appRoot: string) { + const entry: EntryObject = {}; + /** + * Scripts that must be self-contained and not split into chunks. + */ + const selfContainedScripts: Set = new Set([ + // Snow shouldn't be chunked + 'snow.prod', + 'use-snow', + ]); + + function addManifestScript(filename?: string) { + if (filename) { + selfContainedScripts.add(filename); + entry[filename] = { + chunkLoading: false, + filename, // output filename + import: join(appRoot, filename), // the path to the file to use as an entry + }; + } + } + + function addHtml(filename?: string) { + if (filename) { + assertValidEntryFileName(filename, appRoot); + entry[parse(filename).name] = join(appRoot, filename); + } + } + + // add content_scripts to entries + manifest.content_scripts?.forEach((s) => s.js?.forEach(addManifestScript)); + + if (manifest.manifest_version === 3) { + addManifestScript(manifest.background?.service_worker); + manifest.web_accessible_resources?.forEach(({ resources }) => + resources.forEach((filename) => { + filename.endsWith('.js') && addManifestScript(filename); + }), + ); + } else { + manifest.web_accessible_resources?.forEach((filename) => { + filename.endsWith('.js') && addManifestScript(filename); + }); + manifest.background?.scripts?.forEach(addManifestScript); + addHtml(manifest.background?.page); + } + + for (const filename of readdirSync(appRoot)) { + // ignore non-htm/html files + if (/\.html?$/iu.test(filename)) { + addHtml(filename); + } + } + + /** + * Ignore scripts that were found in the manifest, as these are only loaded by + * the browser extension platform itself. + * + * @param chunk + * @param chunk.name + * @returns + */ + function canBeChunked({ name }: Chunk): boolean { + return !name || !selfContainedScripts.has(name); + } + return { entry, canBeChunked }; +} + +/** + * @param filename + * @param appRoot + * @throws Throws an `Error` if the file is an invalid entrypoint filename + * (a file starting with "_") + */ +function assertValidEntryFileName(filename: string, appRoot: string) { + if (!filename.startsWith('_')) { + return; + } + + const relativeFile = relative(process.cwd(), join(appRoot, filename)); + const error = `Invalid Entrypoint Filename Detected\nPath: ${relativeFile}`; + const reason = `Filenames at the root of the extension directory starting with "_" are reserved for use by the browser.`; + const newFile = filename.slice(1); + const solutions = [ + `Rename this file to remove the underscore, e.g., '${filename}' to '${newFile}'`, + `Move this file to a subdirectory and, if necessary, add it manually to the build 😱`, + ]; + const context = `This file was included in the build automatically by our build script, which adds all HTML files at the root of '${appRoot}'.`; + + const message = `${error} + Reason: ${reason} + + Suggested Actions: + ${solutions.map((solution) => ` • ${solution}`).join('\n')} + ${`\n ${context}`} + `; + + throw new Error(message); +} + +/** + * It gets minimizers for the webpack build. + */ +export function getMinimizers() { + const TerserPlugin: typeof TerserPluginType = require('terser-webpack-plugin'); + return [ + new TerserPlugin({ + // use SWC to minify (about 7x faster than Terser) + minify: TerserPlugin.swcMinify, + // do not minify snow. + exclude: /snow\.prod/u, + }), + ]; +} + +/** + * Helpers for logging to the console with color. + */ +export const { colors, toGreen, toOrange, toPurple } = ((depth, esc) => { + if (depth === 1) { + const echo = (message: string): string => message; + return { colors: false, toGreen: echo, toOrange: echo, toPurple: echo }; + } + // 24: metamask green, 8: close to metamask green, 4: green + const green = { 24: '38;2;186;242;74', 8: '38;5;191', 4: '33' }[depth]; + // 24: metamask orange, 8: close to metamask orange, 4: red :-( + const orange = { 24: '38;2;247;85;25', 8: '38;5;208', 4: '31' }[depth]; + // 24: metamask purple, 8: close to metamask purple, 4: purple + const purple = { 24: '38;2;208;117;255', 8: '38;5;177', 4: '35' }[depth]; + return { + colors: { green: `${esc}[1;${green}m`, orange: `${esc}[1;${orange}m` }, + toGreen: (message: string) => `${esc}[1;${green}m${message}${esc}[0m`, + toOrange: (message: string) => `${esc}[1;${orange}m${message}${esc}[0m`, + toPurple: (message: string) => `${esc}[1;${purple}m${message}${esc}[0m`, + }; +})((process.stderr.getColorDepth?.() as 1 | 4 | 8 | 24) || 1, '\u001b'); + +/** + * Logs a summary of build information to `process.stderr` (webpack logs to + * stderr). + * + * Note: `err` and stats.hasErrors() are different. `err` prevents compilation + * from starting, while `stats.hasErrors()` is true if there were errors during + * compilation itself. + * + * @param err - If not `undefined`, logs the error to `process.stderr`. + * @param stats - If not `undefined`, logs the stats to `process.stderr`. + */ +export function logStats(err?: Error | null, stats?: Stats) { + if (err) { + console.error(err); + return; + } + + if (!stats) { + // technically this shouldn't happen, but webpack's TypeScript interface + // doesn't enforce that `err` and `stats` are mutually exclusive. + return; + } + + const { options } = stats.compilation; + // orange for production builds, purple for development + const colorFn = options.mode === 'production' ? toOrange : toPurple; + stats.compilation.name = colorFn(`🦊 ${stats.compilation.compiler.name}`); + if (options.stats === 'normal') { + // log everything (computing stats is slow, so we only do it if asked). + console.error(stats.toString({ colors })); + } else if (stats.hasErrors() || stats.hasWarnings()) { + // always log errors and warnings, if we have them. + console.error(stats.toString({ colors, preset: 'errors-warnings' })); + } else { + // otherwise, just log a simple update + const { name } = stats.compilation; + const status = toGreen('successfully'); + const time = `${stats.endTime - stats.startTime} ms`; + const { version } = require('webpack'); + console.error(`${name} (webpack ${version}) compiled ${status} in ${time}`); + } +} + +/** + * @param array + * @returns a new array with duplicate values removed and sorted + */ +export const uniqueSort = (array: string[]) => [...new Set(array)].sort(); diff --git a/development/webpack/utils/loaders/codeFenceLoader.ts b/development/webpack/utils/loaders/codeFenceLoader.ts new file mode 100644 index 000000000000..5088cb161f61 --- /dev/null +++ b/development/webpack/utils/loaders/codeFenceLoader.ts @@ -0,0 +1,59 @@ +import type { LoaderContext, RuleSetRule } from 'webpack'; +import type { JSONSchema7 } from 'schema-utils/declarations/validate'; +import { validate } from 'schema-utils'; +import { removeFencedCode, type FeatureLabels } from '@metamask/build-utils'; + +const schema: JSONSchema7 = { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + required: ['features'], + properties: { + features: { + type: 'object', + description: + 'Configuration for code fence removal, specifying active and all possible features.', + required: ['active', 'all'], + properties: { + active: { + description: 'Features that should be included in the output.', + type: 'object', + }, + all: { + description: 'All features that can be toggled.', + type: 'object', + }, + }, + additionalProperties: false, + }, + }, + additionalProperties: false, +}; + +export type CodeFenceLoaderOptions = { features: FeatureLabels }; + +type Context = LoaderContext; +function codeFenceLoader(this: Context, content: string, map?: string) { + const options = this.getOptions(); + validate(schema, options, { name: 'codeFenceLoader' }); + try { + const result = removeFencedCode( + this.resourcePath, + content, + options.features, + ); + this.callback(null, result[0], map); + } catch (error: unknown) { + this.callback(error as Error); + } +} + +export default codeFenceLoader; + +export type Loader = RuleSetRule & { options: CodeFenceLoaderOptions }; + +export function getCodeFenceLoader(features: FeatureLabels): Loader { + return { + loader: __filename, + options: { features }, + }; +} diff --git a/development/webpack/utils/loaders/swcLoader.ts b/development/webpack/utils/loaders/swcLoader.ts new file mode 100644 index 000000000000..b6976ff8d66d --- /dev/null +++ b/development/webpack/utils/loaders/swcLoader.ts @@ -0,0 +1,208 @@ +import type { LoaderContext } from 'webpack'; +import type { JSONSchema7 } from 'schema-utils/declarations/validate'; +import type { FromSchema } from 'json-schema-to-ts'; +import { validate } from 'schema-utils'; +import { transform, type Options } from '@swc/core'; +import { type Args } from '../cli'; +import { __HMR_READY__ } from '../helpers'; + +// the schema here is limited to only the options we actually use +// there are loads more options available to SWC we could add. +const schema = { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + properties: { + env: { + type: 'object', + properties: { + targets: { + description: 'The browsers to target (browserslist format).', + type: 'string', + }, + }, + additionalProperties: false, + }, + jsc: { + type: 'object', + properties: { + externalHelpers: { + type: 'boolean', + default: false, + }, + transform: { + type: 'object', + properties: { + optimizer: { + type: 'object', + properties: { + globals: { + description: '', + type: 'object', + properties: { + envs: { + description: + 'Replaces environment variables (`if (process.env.DEBUG) `) with specified values/expressions at compile time.', + anyOf: [ + { + type: 'array', + items: { + type: 'string', + }, + }, + { + type: 'object', + additionalProperties: { + type: 'string', + }, + }, + ], + }, + vars: { + description: + 'Replaces variables `if(__DEBUG__){}` with specified values/expressions at compile time.', + type: 'object', + additionalProperties: { + type: 'string', + }, + }, + }, + additionalProperties: false, + }, + }, + additionalProperties: false, + }, + react: { + description: 'Effective only if `syntax` supports ƒ.', + type: 'object', + properties: { + development: { + description: + 'Toggles plugins that aid in development, such as @swc/plugin-transform-react-jsx-self and @swc/plugin-transform-react-jsx-source. Defaults to `false`.', + type: 'boolean', + }, + refresh: { + description: 'Enable fast refresh feature for React app', + type: 'boolean', + }, + }, + additionalProperties: false, + }, + }, + additionalProperties: false, + }, + parser: { + description: 'Defaults to EsParserConfig (syntax: ecmascript)', + type: 'object', + properties: { + syntax: { + type: 'string', + default: 'ecmascript', + enum: ['ecmascript', 'typescript'], + }, + }, + oneOf: [ + { + properties: { + syntax: { + const: 'typescript', + }, + tsx: { + default: false, + type: 'boolean', + }, + }, + additionalProperties: false, + required: ['syntax'], + }, + { + properties: { + syntax: { + const: 'ecmascript', + }, + jsx: { + default: false, + type: 'boolean', + }, + }, + additionalProperties: false, + required: ['syntax'], + }, + ], + }, + }, + additionalProperties: false, + }, + }, + additionalProperties: false, +} as const satisfies JSONSchema7; + +type SchemaOptions = { keepDefaultedPropertiesOptional: true }; +export type SwcLoaderOptions = FromSchema; + +type Context = LoaderContext; +export default function swcLoader(this: Context, src: string, srcMap?: string) { + const pluginOptions = this.getOptions(); + validate(schema, pluginOptions, { name: 'swcLoader' }); + + const options: Options = { + ...pluginOptions, + envName: this.mode, + filename: this.resourcePath, + inputSourceMap: srcMap, + sourceFileName: this.resourcePath, + sourceMaps: this.sourceMap, + swcrc: false, + }; + + const cb = this.async(); + transform(src, options).then(({ code, map }) => cb(null, code, map), cb); +} + +export type SwcConfig = { + args: Pick; + safeVariables: Record; + browsersListQuery: string; + isDevelopment: boolean; +}; + +/** + * Gets the Speedy Web Compiler (SWC) loader for the given syntax. + * + * @param syntax + * @param enableJsx + * @param swcConfig + * @returns + */ +export function getSwcLoader( + syntax: 'typescript' | 'ecmascript', + enableJsx: boolean, + swcConfig: SwcConfig, +) { + return { + loader: __filename, + options: { + env: { + targets: swcConfig.browsersListQuery, + }, + jsc: { + externalHelpers: true, + transform: { + react: { + development: swcConfig.isDevelopment, + refresh: + __HMR_READY__ && swcConfig.isDevelopment && swcConfig.args.watch, + }, + optimizer: { + globals: { + envs: swcConfig.safeVariables, + }, + }, + }, + parser: { + syntax, + [syntax === 'typescript' ? 'tsx' : 'jsx']: enableJsx, + }, + }, + } as const satisfies SwcLoaderOptions, + }; +} diff --git a/development/webpack/utils/plugins/ManifestPlugin/helpers.ts b/development/webpack/utils/plugins/ManifestPlugin/helpers.ts new file mode 100644 index 000000000000..82efa9acf253 --- /dev/null +++ b/development/webpack/utils/plugins/ManifestPlugin/helpers.ts @@ -0,0 +1,56 @@ +/** + * Returns a function that will transform a manifest JSON object based on the + * given build args. + * + * Applies the following transformations: + * - If `lockdown` is `false`, removes lockdown scripts from content_scripts + * - If `test` is `true`, adds the "tabs" permission to the manifest + * + * @param args + * @param args.lockdown + * @param args.test + * @returns a function that will transform the manifest JSON object + * @throws an error if the manifest already contains the "tabs" permission and + * `test` is `true` + */ +export function transformManifest(args: { lockdown: boolean; test: boolean }) { + const transforms: ((manifest: chrome.runtime.Manifest) => void)[] = []; + + function removeLockdown(browserManifest: chrome.runtime.Manifest) { + const mainScripts = browserManifest.content_scripts?.[0]; + if (mainScripts) { + const keep = ['scripts/contentscript.js', 'scripts/inpage.js']; + mainScripts.js = mainScripts.js?.filter((js) => keep.includes(js)); + } + } + + if (!args.lockdown) { + // remove lockdown scripts from content_scripts + transforms.push(removeLockdown); + } + + function addTabsPermission(browserManifest: chrome.runtime.Manifest) { + if (browserManifest.permissions) { + if (browserManifest.permissions.includes('tabs')) { + throw new Error( + "manifest contains 'tabs' already; this transform should be removed.", + ); + } + browserManifest.permissions.push('tabs'); + } else { + browserManifest.permissions = ['tabs']; + } + } + if (args.test) { + // test builds need "tabs" permission for switchToWindowWithTitle + transforms.push(addTabsPermission); + } + + return transforms.length + ? (browserManifest: chrome.runtime.Manifest, _browser: string) => { + const clone = structuredClone(browserManifest); + transforms.forEach((transform) => transform(clone)); + return clone; + } + : undefined; +} diff --git a/development/webpack/utils/plugins/ManifestPlugin/index.ts b/development/webpack/utils/plugins/ManifestPlugin/index.ts new file mode 100644 index 000000000000..c08cfd7ba6e6 --- /dev/null +++ b/development/webpack/utils/plugins/ManifestPlugin/index.ts @@ -0,0 +1,351 @@ +import { extname, join } from 'node:path/posix'; +import { readFileSync } from 'node:fs'; +import { + sources, + ProgressPlugin, + type Compilation, + type Compiler, + type Asset, +} from 'webpack'; +import { validate } from 'schema-utils'; +import { + type DeflateOptions, + Zip, + AsyncZipDeflate, + ZipPassThrough, +} from 'fflate'; +import { noop, type Manifest, Browser } from '../../helpers'; +import { schema } from './schema'; +import type { ManifestPluginOptions } from './types'; + +const { RawSource, ConcatSource } = sources; + +type Assets = Compilation['assets']; + +const NAME = 'ManifestPlugin'; +const BROWSER_TEMPLATE_RE = /\[browser\]/gu; + +/** + * Clones a Buffer or Uint8Array and returns it + * + * @param data + * @returns + */ +function clone(data: Buffer | Uint8Array): Buffer { + return Buffer.from(data); +} + +/** + * Adds the given asset to the zip file + * + * @param asset - The asset to add + * @param assetName - The name of the asset + * @param compress - Whether to compress the asset + * @param compressionOptions - The options to use for compression + * @param mtime - The modification time of the asset + * @param zip - The zip file to add the asset to + */ +function addAssetToZip( + asset: Buffer, + assetName: string, + compress: boolean, + compressionOptions: DeflateOptions | undefined, + mtime: number, + zip: Zip, +): void { + const zipFile = compress + ? new AsyncZipDeflate(assetName, compressionOptions) + : new ZipPassThrough(assetName); + zipFile.mtime = mtime; + zip.add(zipFile); + // use a copy of the Buffer, as Zip will consume it + zipFile.push(asset, true); +} + +/** + * A webpack plugin that generates extension manifests for browsers and organizes + * assets into browser-specific directories and optionally zips them. + * + * TODO: it'd be great if the logic to find entry points was also in this plugin + * instead of in helpers.ts. Moving that here would allow us to utilize the + * this.options.transform function to modify the manifest before collecting the + * entry points. + */ +export class ManifestPlugin { + /** + * File types that can be compressed well using DEFLATE compression, used when + * zipping assets. + */ + static compressibleFileTypes = new Set([ + '.bmp', + '.cjs', + '.css', + '.csv', + '.eot', + '.html', + '.js', + '.json', + '.log', + '.map', + '.md', + '.mjs', + '.svg', + '.txt', + '.wasm', + '.vtt', // very slow to process? + // ttf is disabled as some were getting corrupted during compression. You + // can test this by uncommenting it, running with --zip, and then unzipping + // the resulting zip file. If it is still broken the unzip operation will + // show an error. + // '.ttf', + '.wav', + '.xml', + ]); + + options: ManifestPluginOptions; + + manifests: Map = new Map(); + + constructor(options: ManifestPluginOptions) { + validate(schema, options, { name: NAME }); + this.options = options; + this.manifests = new Map(); + } + + apply(compiler: Compiler) { + compiler.hooks.compilation.tap(NAME, this.hookIntoPipelines.bind(this)); + } + + private async zipAssets( + compilation: Compilation, + assets: Assets, // an object of asset names to assets + options: ManifestPluginOptions, + ): Promise { + // TODO(perf): this zips (and compresses) every file individually for each + // browser. Can we share the compression and crc steps to save time? + const { browsers, zipOptions } = options; + const { excludeExtensions, level, outFilePath, mtime } = zipOptions; + const compressionOptions: DeflateOptions = { level }; + const assetsArray = Object.entries(assets); + + let filesProcessed = 0; + const numAssetsPerBrowser = assetsArray.length + 1; + const totalWork = numAssetsPerBrowser * browsers.length; // +1 for each browser's manifest.json + const reportProgress = + ProgressPlugin.getReporter(compilation.compiler) || noop; + // TODO(perf): run this in parallel. If you try without carefully optimizing the + // process will run out of memory pretty quickly, and crash. Fun! + for (const browser of browsers) { + const manifest = this.manifests.get(browser) as sources.Source; + const source = await new Promise((resolve, reject) => { + // since Zipping is async, a past chunk could cause an error after we've + // started processing additional chunks. We'll use this errored flag to + // short-circuit the rest of the processing if that happens. + let errored = false; + const zipSource = new ConcatSource(); + const zip = new Zip((error, data, final) => { + if (errored) return; // ignore additional errors + if (error) { + // set error flag to prevent additional processing + errored = true; + reject(error); + } else { + zipSource.add(new RawSource(clone(data))); + // we've received our final bit of data, return the zipSource + if (final) resolve(zipSource); + } + }); + + // add the browser's manifest.json file to the zip + addAssetToZip( + manifest.buffer(), + 'manifest.json', + true, + compressionOptions, + mtime, + zip, + ); + + const message = `${++filesProcessed}/${totalWork} assets zipped for ${browser}`; + reportProgress(0, message, 'manifest.json'); + + for (const [assetName, asset] of assetsArray) { + if (errored) return; + + const extName = extname(assetName); + if (excludeExtensions.includes(extName)) continue; + + addAssetToZip( + // make a copy of the asset Buffer as Zipping will *consume* it, + // which breaks things if we are compiling for multiple browsers. + clone(asset.buffer()), + assetName, + ManifestPlugin.compressibleFileTypes.has(extName), + compressionOptions, + mtime, + zip, + ); + reportProgress( + 0, + `${++filesProcessed}/${totalWork} assets zipped for ${browser}`, + assetName, + ); + } + + zip.end(); + }); + + // add the zip file to webpack's assets. + const zipFilePath = outFilePath.replace(BROWSER_TEMPLATE_RE, browser); + compilation.emitAsset(zipFilePath, source, { + javascriptModule: false, + compressed: true, + contentType: 'application/zip', + development: true, + }); + } + } + + /** + * Moves the assets to the correct browser locations and adds each browser's + * extension manifest.json file to the list of assets. + * + * @param compilation + * @param assets + * @param options + */ + private moveAssets( + compilation: Compilation, + assets: Assets, + options: ManifestPluginOptions, + ): void { + // we need to wait to delete assets until after we've zipped them all + const assetDeletions = new Set(); + const { browsers } = options; + const assetEntries = Object.entries(assets); + browsers.forEach((browser) => { + const manifest = this.manifests.get(browser) as sources.Source; + compilation.emitAsset(join(browser, 'manifest.json'), manifest, { + javascriptModule: false, + contentType: 'application/json', + }); + for (const [name, asset] of assetEntries) { + // move the assets to their final browser-relative locations + const assetDetails = compilation.getAsset(name) as Readonly; + compilation.emitAsset(join(browser, name), asset, assetDetails.info); + assetDeletions.add(name); + } + }); + // delete the assets after we've zipped them all + assetDeletions.forEach((assetName) => compilation.deleteAsset(assetName)); + } + + private prepareManifests(compilation: Compilation): void { + const context = compilation.options.context as string; + const manifestPath = join( + context, + `manifest/v${this.options.manifest_version}`, + ); + // Load the base manifest + const basePath = join(manifestPath, `_base.json`); + const baseManifest: Manifest = JSON.parse(readFileSync(basePath, 'utf8')); + + const { transform } = this.options; + const resources = this.options.web_accessible_resources; + const description = this.options.description + ? `${baseManifest.description} – ${this.options.description}` + : baseManifest.description; + const { version } = this.options; + + this.options.browsers.forEach((browser) => { + let manifest: Manifest = { ...baseManifest, description, version }; + + if (browser !== 'firefox') { + // version_name isn't used by FireFox, but is by Chrome, et al. + manifest.version_name = this.options.versionName; + } + + try { + const browserManifestPath = join(manifestPath, `${browser}.json`); + // merge browser-specific overrides into the browser manifest + manifest = { + ...manifest, + ...require(browserManifestPath), + }; + } catch { + // ignore if the file doesn't exist, as some browsers might not need overrides + } + + // merge provided `web_accessible_resources` + if (resources && resources.length > 0) { + if (manifest.manifest_version === 3) { + manifest.web_accessible_resources = + manifest.web_accessible_resources || []; + const war = manifest.web_accessible_resources.find((resource) => + resource.matches.includes(''), + ); + if (war) { + // merge the resources into the existing resource, ensure uniqueness using `Set` + war.resources = [...new Set([...war.resources, ...resources])]; + } else { + // add a new resource + manifest.web_accessible_resources.push({ + matches: [''], + resources: [...resources], + }); + } + } else { + manifest.web_accessible_resources = [ + ...(manifest.web_accessible_resources || []), + ...resources, + ]; + } + } + + // allow the user to `transform` the manifest. Use a copy of the manifest + // so modifications for one browser don't affect other browsers. + if (transform) { + manifest = transform?.(JSON.parse(JSON.stringify(manifest)), browser); + } + + // Add the manifest file to the assets + const source = new RawSource(JSON.stringify(manifest, null, 2)); + this.manifests.set(browser, source); + }); + } + + private hookIntoPipelines(compilation: Compilation): void { + // prepare manifests early so we can catch errors early instead of waiting + // until the end of the compilation. + this.prepareManifests(compilation); + + // TODO: MV3 needs to be handled differently. Specifically, it needs to + // load the files it needs via a function call to `importScripts`, plus some + // other shenanigans. + + // hook into the processAssets hook to move/zip assets + const tapOptions = { + name: NAME, + stage: Infinity, + }; + if (this.options.zip) { + const options = this.options as ManifestPluginOptions; + compilation.hooks.processAssets.tapPromise( + tapOptions, + async (assets: Assets) => { + await this.zipAssets(compilation, assets, options); + this.moveAssets( + compilation, + assets, + this.options as ManifestPluginOptions, + ); + }, + ); + } else { + const options = this.options as ManifestPluginOptions; + compilation.hooks.processAssets.tap(tapOptions, (assets: Assets) => { + this.moveAssets(compilation, assets, options); + }); + } + } +} diff --git a/development/webpack/utils/plugins/ManifestPlugin/schema.ts b/development/webpack/utils/plugins/ManifestPlugin/schema.ts new file mode 100644 index 000000000000..e6d5efac7ff6 --- /dev/null +++ b/development/webpack/utils/plugins/ManifestPlugin/schema.ts @@ -0,0 +1,115 @@ +import { ExtendedJSONSchema } from 'json-schema-to-ts'; +import { Browsers } from '../../helpers'; + +type Writeable = { -readonly [P in keyof T]: T[P] }; + +export const schema = { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + required: ['browsers', 'description', 'manifest_version', 'version', 'zip'], + properties: { + browsers: { + description: 'The browsers to build for.', + type: 'array', + items: { + type: 'string', + enum: Browsers as Writeable, + }, + minItems: 1, + maxItems: Browsers.length, + uniqueItems: true, + }, + version: { + description: + 'One to four dot-separated integers identifying the version of this extension.', + type: 'string', + }, + versionName: { + description: + 'A Semantic Versioning-compliant version number for the extension.', + type: 'string', + }, + description: { + description: 'A plain text string that describes the extension.', + type: ['string', 'null'], + maxLength: 132, + }, + manifest_version: { + description: + 'An integer specifying the version of the manifest file format your package requires.', + type: 'number', + enum: [2, 3], + }, + web_accessible_resources: { + description: + 'An array of strings specifying the paths of additional web-accessible resources.', + type: 'array', + items: { + type: 'string', + }, + }, + transform: { + description: 'Function to transform the manifest file.', + instanceof: 'Function', + tsType: '((manifest: Manifest, browser: Browser) => Manifest)', + }, + zip: { + description: 'Whether or not to zip the individual browser builds.', + type: 'boolean', + }, + zipOptions: { + required: ['outFilePath'], + properties: { + level: { + description: + 'Compression level for compressible assets. 0 is no compression, 9 is maximum compression. 6 is default.', + type: 'number', + default: 6, + minimum: 0, + maximum: 9, + }, + mtime: { + description: + 'Modification time for all files in the zip, specified as a UNIX timestamp (milliseconds since 1 January 1970 UTC). This property sets a uniform modification time for the contents of the zip file. Note: Zip files use FAT file timestamps, which have a limited range. Therefore, datetimes before 1980-01-01 (timestamp value of 315532800000) are invalid in standard Zip files, and datetimes on or after 2100-01-01 (timestamp value of 4102444800000) are also invalid. Values must fall within this range.', + type: 'number', + // Zip files use FAT file timestamps, which have a limited range. + // Datetimes before 1980-01-01 are invalid in standard Zip files. + minimum: Date.UTC(1980, 0, 1), + // datetimes after 2099-12-31 are invalid in zip files + exclusiveMaximum: Date.UTC(2100, 0, 1), + get default() { + return Date.now(); + }, + }, + excludeExtensions: { + description: 'File extensions to exclude from zip.', + type: 'array', + uniqueItems: true, + items: { + type: 'string', + pattern: '^\\.[a-zA-Z0-9]+$', + }, + default: [], + }, + outFilePath: { + description: + 'File path template for zip file relative to webpack output directory. You must include `[browser]` in the file path template, which will be replaced with the browser name. For example, `builds/[browser].zip`.', + type: 'string', + pattern: '.*\\[browser\\].*', + }, + }, + additionalProperties: false, + }, + }, + additionalProperties: false, + if: { + properties: { + zip: { + const: true, + }, + }, + }, + then: { + required: ['zipOptions'], + }, +} satisfies ExtendedJSONSchema>; diff --git a/development/webpack/utils/plugins/ManifestPlugin/types.ts b/development/webpack/utils/plugins/ManifestPlugin/types.ts new file mode 100644 index 000000000000..174d09d4f6b6 --- /dev/null +++ b/development/webpack/utils/plugins/ManifestPlugin/types.ts @@ -0,0 +1,94 @@ +import type { Browser, Manifest } from '../../helpers'; + +export type BaseManifestPluginOptions = { + /** + * The browsers to build for. + */ + browsers: readonly [Browser, ...Browser[]]; + + /** + * An array of strings specifying the paths of additional web-accessible resources. + */ + web_accessible_resources?: readonly string[]; + + /** + * An integer specifying the version of the manifest file format your package requires + */ + manifest_version: 2 | 3; + + /** + * One to four dot-separated integers identifying the version of this extension. A couple of rules apply to the integers: + * + * * The integers must be between 0 and 65535, inclusive. + * * Non-zero integers can't start with 0. For example, 032 is invalid because it begins with a zero. + * * They must not be all zero. For example, 0 and 0.0.0.0 are invalid while 0.1.0.0 is valid. + * + * Here are some examples of valid versions: + * + * * "version": "1" + * * "version": "1.0" + * * "version": "2.10.2" + * * "version": "3.1.2.4567" + * + * If the published extension has a newer version string than the installed extension, then the extension is automatically updated. + * + * The comparison starts with the leftmost integers. Then, if those integers are equal, the integers to the right are compared, and so on. For example, 1.2.0 is a newer version than 1.1.9.9999. + * + * A missing integer is equal to zero. For example, 1.1.9.9999 is newer than 1.1, and 1.1.9.9999 is older than 1.2. + */ + version: string; + + /** + * A Semantic Versioning-compliant version number for the extension. Not used in Firefox builds since Firefox doesn't currently support it. + */ + versionName: string; + + /** + * A plain text string (no HTML or other formatting; no more than 132 characters) that describes the extension. + * + * The description should be suitable for both the browser's Extensions page, e.g., chrome://extensions, and extension web stores. You can specify locale-specific strings for this field. + */ + description: string | null; + + /** + * Function to transform the manifest file. + * + * @param manifest + * @param browser + * @returns + */ + transform?: (manifest: Manifest, browser: Browser) => Manifest; + + /** + * Whether or not to zip the individual browser builds. + */ + zip: Zip; +}; + +export type ZipOptions = { + /** + * Options for the zip. + */ + zipOptions: { + /** + * Compression level for compressible assets. 0 is no compression, 9 is maximum compression. 6 is default. + */ + level: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9; + /** + * Modification time for all files in the zip, specified as a UNIX timestamp (milliseconds since 1 January 1970 UTC). This property sets a uniform modification time for the contents of the zip file. Note: Zip files use FAT file timestamps, which have a limited range. Therefore, datetimes before 1980-01-01 (timestamp value of 315532800000) are invalid in standard Zip files, and datetimes on or after 2100-01-01 (timestamp value of 4102444800000) are also invalid. Values must fall within this range. + */ + mtime: number; + /** + * File extensions to exclude from zip; should include the `.`, e.g., [`.map`]. + */ + excludeExtensions: string[]; + + /** + * File path template for zip file relative to webpack output directory. You must include `[browser]` in the file path template, which will be replaced with the browser name. For example, `builds/[browser].zip`. + */ + outFilePath: string; + }; +}; + +export type ManifestPluginOptions = + BaseManifestPluginOptions & (Zip extends true ? ZipOptions : object); diff --git a/development/webpack/utils/plugins/SelfInjectPlugin/index.ts b/development/webpack/utils/plugins/SelfInjectPlugin/index.ts new file mode 100644 index 000000000000..b80f6102ab75 --- /dev/null +++ b/development/webpack/utils/plugins/SelfInjectPlugin/index.ts @@ -0,0 +1,175 @@ +import { dirname, relative } from 'node:path'; +import { ModuleFilenameHelpers, Compilation, sources } from 'webpack'; +import { validate } from 'schema-utils'; +import { schema } from './schema'; +import type { SelfInjectPluginOptions, Source, Compiler } from './types'; + +export { type SelfInjectPluginOptions } from './types'; + +/** + * Default options for the SelfInjectPlugin. + */ +const defaultOptions = { + // The default `sourceUrlExpression` is configured for browser extensions. + // It generates the absolute url of the given file as an extension url. + // e.g., `chrome-extension:///scripts/inpage.js` + sourceUrlExpression: (filename: string) => + `(globalThis.browser||chrome).runtime.getURL(${JSON.stringify(filename)})`, +} satisfies SelfInjectPluginOptions; + +/** + * Modifies processed assets to inject a script tag that will execute the asset + * as an inline script. Primarily used in Chromium extensions that need to + * access a tab's `window` object from a `content_script`. + * + * @example + * Input: + * ```js + * // webpack.config.js + * module.exports = {plugins: [new SelfInjectPlugin({ test: /\.js$/ })]}; + * ``` + * + * ```js + * // src/index.js + * console.log("hello world"); + * ``` + * Output: + * ```js + * // dist/main.js + * {let d=document,s=d.createElement('script');s.textContent="console.log(\"hello world\");\n//# sourceMappingURL=main.js.map"+`\n//# sourceURL=${(globalThis.browser||chrome).runtime.getURL("main.js")};`;d.documentElement.appendChild(s).remove()} + * ``` + * ```json + * // dist/main.js.map (example) + * {"version":3,"file":"x","mappings":"AAAAA,QAAQC,IAAI","sources":["webpack://./src/index.js"],"sourcesContent":["console.log(\"hello world\");"],"names":["console","log"]} + * ``` + */ +export class SelfInjectPlugin { + private options: SelfInjectPluginOptions & typeof defaultOptions; + + constructor(options: SelfInjectPluginOptions) { + validate(schema, options, { name: SelfInjectPlugin.name }); + + this.options = { ...defaultOptions, ...options }; + } + + apply(compiler: Compiler): void { + compiler.hooks.compilation.tap(SelfInjectPlugin.name, (compilation) => { + this.processAssets(compilation); + }); + } + + /** + * Hooks into the compilation process to modify assets. + * + * @param compilation + */ + processAssets(compilation: Compilation): void { + const opts = { + name: SelfInjectPlugin.name, + stage: Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING, + }; + compilation.hooks.processAssets.tap(opts, () => this.process(compilation)); + } + + /** + * Processes compilation assets to inject a script tag that will execute the + * asset as an inline script. + * + * @param compilation + */ + process(compilation: Compilation): void { + const { test } = this.options; + const match = ModuleFilenameHelpers.matchObject.bind(null, { test }); + + for (const chunk of compilation.chunks) { + for (const file of chunk.files) { + if (match(file)) { + compilation.updateAsset(file, (asset: Source) => { + return this.updateAsset(compilation, file, asset); + }); + } + } + } + } + + /** + * Updates the given asset to inject a script tag that will execute the asset + * as an inline script. + * + * @param compilation + * @param file + * @param asset + */ + updateAsset(compilation: Compilation, file: string, asset: Source): Source { + const { ConcatSource, RawSource } = sources; + const { map, source } = asset.sourceAndMap(); + + let sourceMappingURLComment = ''; + // emit a separate source map file (if this asset already has one) + if (map /* `map` can be `null`; webpack's types are wrong */) { + const { devtool } = compilation.options; + const sourceMapPath = `${file}.map`; + + // we're removing the source map from the original webpack asset, since + // it's now a different file that isn't mappable, so we need to re-add it + // as a new asset: + const mapSource = new RawSource(JSON.stringify(map)); + compilation.emitAsset(sourceMapPath, mapSource); + + // we must "hide" the `sourceMappingURL` from the file when `hidden` + // source maps are requested by omitting the reference from the source + if (devtool && !devtool.startsWith('hidden-')) { + // `sourceMappingURL` needs to be relative to the file so that the + // browser's dev tools can find it. + const sourceMappingURL = relative(dirname(file), sourceMapPath); + sourceMappingURLComment = `\n//# sourceMappingURL=${sourceMappingURL}`; + } + } + + // generate the new self-injecting source code: + const newSource = new ConcatSource(); + // wrapped in a new lexical scope so we don't pollute the global namespace + newSource.add(`{`); + newSource.add(`let d=document,s=d.createElement('script');`); + newSource.add(`s.textContent=`); + newSource.add(this.escapeJs(source + sourceMappingURLComment)); + newSource.add(`+`); + // The browser's dev tools can't map our inline javascript back to its + // source. We add a sourceURL directive to help with that. It also helps + // organize the Sources panel in browser dev tools by separating the inline + // script into its own origin. + newSource.add( + `\`\\n//# sourceURL=\${${this.options.sourceUrlExpression(file)}};\``, + ); + newSource.add(`;`); + // add and immediately remove the script to avoid modifying the DOM. + newSource.add(`d.documentElement.appendChild(s).remove()`); + newSource.add(`}`); + + return newSource; + } + + /** + * Escapes the given JavaScript source as a JavaScript string. + * + * Replaces line separators and paragraph separators with their unicode escape + * sequences. + * + * @example + * ```js + * escapeJs(`console.log('hello world');`); + * // => "\"console.log('hello world');\"" + * ``` + * @param source + * @returns + */ + private escapeJs(source: string): string { + return ( + JSON.stringify(source) + // replace line separators + .replace(/\u2028/gu, '\\u2028') + // and paragraph separators + .replace(/\u2029/gu, '\\u2029') + ); + } +} diff --git a/development/webpack/utils/plugins/SelfInjectPlugin/schema.ts b/development/webpack/utils/plugins/SelfInjectPlugin/schema.ts new file mode 100644 index 000000000000..2b674ba93f31 --- /dev/null +++ b/development/webpack/utils/plugins/SelfInjectPlugin/schema.ts @@ -0,0 +1,58 @@ +import { ExtendedJSONSchema } from 'json-schema-to-ts'; + +export const schema = { + $schema: 'http://json-schema.org/draft-07/schema#', + type: 'object', + properties: { + exclude: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp', tsType: 'RegExp' }, + { + type: 'array', + items: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp', tsType: 'RegExp' }, + ], + }, + }, + ], + }, + include: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp' }, + { + type: 'array', + items: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp', tsType: 'RegExp' }, + ], + }, + }, + ], + }, + test: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp', tsType: 'RegExp' }, + { + type: 'array', + items: { + oneOf: [ + { type: 'string' }, + { instanceof: 'RegExp', tsType: 'RegExp' }, + ], + }, + }, + ], + }, + sourceUrlExpression: { + instanceof: 'Function', + tsType: '((filename: string) => string)', + }, + }, + additionalProperties: false, +} satisfies ExtendedJSONSchema>; diff --git a/development/webpack/utils/plugins/SelfInjectPlugin/types.ts b/development/webpack/utils/plugins/SelfInjectPlugin/types.ts new file mode 100644 index 000000000000..2240227bd76a --- /dev/null +++ b/development/webpack/utils/plugins/SelfInjectPlugin/types.ts @@ -0,0 +1,49 @@ +import type { Asset } from 'webpack'; + +export type { Compiler } from 'webpack'; +export type Source = Asset['source']; + +/** + * Options for the SelfInjectPlugin. + */ +export type SelfInjectPluginOptions = { + /** + * Specify which chunks to apply the transformation to. + * + * @example + * ```js + * { + * test: /inpage/, + * } + * ``` + */ + test?: string | RegExp | (string | RegExp)[]; + /** + * A function that returns a JavaScript expression escaped as a string which + * will be injected into matched file to provide a sourceURL for the self + * injected script. + * + * Defaults to `(filename: string) => (globalThis.browser||globalThis.chrome).runtime.getURL("${filename}")` + * + * @example Custom + * ```js + * Appends a runtime URL for a website, e.g., + * `// //# sourceURL=https://google.com/scripts/myfile.js` + * { + * sourceUrlExpression: (filename) => `document.location.origin/${filename}` + * } + * ``` + * @example Default + * Appends a runtime URL for a browser extension, e.g., + * `//# sourceURL=chrome-extension:///scripts/inpage.js` + * + * ```js + * { + * sourceUrlExpression: (filename) => `(globalThis.browser||globalThis.chrome).runtime.getURL("${filename}")` + * } + * ``` + * @param filename - the chunk's relative filename as it will exist in the output directory + * @returns + */ + sourceUrlExpression?: (filename: string) => string; +}; diff --git a/development/webpack/utils/version.ts b/development/webpack/utils/version.ts new file mode 100644 index 000000000000..ea9cbe7b6e24 --- /dev/null +++ b/development/webpack/utils/version.ts @@ -0,0 +1,70 @@ +import type { BuildType } from './config'; + +/** + * Computes the version number for use in the extension manifest. Uses the + * `version` field in the project's `package.json`. + * + * @param type + * @param options + * @param options.id + * @param options.isPrerelease + * @param releaseVersion + * @returns Returns the version and version_name values for the extension. + */ +export const getExtensionVersion = ( + type: string, + { id, isPrerelease }: Pick, + releaseVersion: number, +): { version: string; versionName: string } => { + const { version } = require('../../../package.json') as { version: string }; + + if (id < 10 || id > 64 || releaseVersion < 0 || releaseVersion > 999) { + throw new Error( + `Build id must be 10-64 and release version must be 0-999 +(inclusive). Received an id of '${id}' and a release version of +'${releaseVersion}'. + +Wait, but that seems so arbitrary? +================================== + +We encode the build id and the release version into the extension version by +concatenating the two numbers together. The maximum value for the concatenated +number is 65535 (a Chromium limitation). The value cannot start with a '0'. We +utilize 2 digits for the build id and 3 for the release version. This affords us +55 release types and 1000 releases per 'version' + build type. + +Okay, so how do I fix it? +========================= + +You'll need to adjust the build 'id' (in builds.yml) or the release version to +fit within these limits or bump the version number in package.json and start the +release version number over from 0. If you can't do that you'll need to come up +with a new way of encoding this information, or re-evaluate the need for this +metadata. + +Good luck on your endeavors.`, + ); + } + + if (!isPrerelease) { + if (releaseVersion !== 0) { + throw new Error( + `A '${type}' build's release version must always be '0'. Got '${releaseVersion}' instead.`, + ); + } + // main build (non-prerelease) version_name is just a plain version number + // the version field needs the `.0` because some runtime code freaks out + // if it's missing. + return { + version: `${version}.0`, + versionName: version, + }; + } + return { + // if version=18.7.25, id=10, releaseVersion=12 we get 18.7.25.1012 + version: `${version}.${id}${releaseVersion}`, + // The manifest.json's `version_name` field can be anything we want, so we + // make it human readable, e.g., `18.7.25-beta.123`. + versionName: `${version}-${type}.${releaseVersion}`, + }; +}; diff --git a/development/webpack/webpack.config.ts b/development/webpack/webpack.config.ts new file mode 100644 index 000000000000..3cef5fc3fc53 --- /dev/null +++ b/development/webpack/webpack.config.ts @@ -0,0 +1,387 @@ +/** + * @file The main webpack configuration file for the browser extension. + */ + +import { readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { argv, exit } from 'node:process'; +import { + ProvidePlugin, + type Configuration, + type WebpackPluginInstance, + type Chunk, + type MemoryCacheOptions, + type FileCacheOptions, +} from 'webpack'; +import CopyPlugin from 'copy-webpack-plugin'; +import HtmlBundlerPlugin from 'html-bundler-webpack-plugin'; +import rtlCss from 'postcss-rtlcss'; +import autoprefixer from 'autoprefixer'; +import type ReactRefreshPluginType from '@pmmmwh/react-refresh-webpack-plugin'; +import { SelfInjectPlugin } from './utils/plugins/SelfInjectPlugin'; +import { + type Manifest, + collectEntries, + getMinimizers, + NODE_MODULES_RE, + __HMR_READY__, +} from './utils/helpers'; +import { transformManifest } from './utils/plugins/ManifestPlugin/helpers'; +import { parseArgv, getDryRunMessage } from './utils/cli'; +import { getCodeFenceLoader } from './utils/loaders/codeFenceLoader'; +import { getSwcLoader } from './utils/loaders/swcLoader'; +import { getBuildTypes, getVariables } from './utils/config'; +import { ManifestPlugin } from './utils/plugins/ManifestPlugin'; +import { getLatestCommit } from './utils/git'; + +const buildTypes = getBuildTypes(); +const { args, cacheKey, features } = parseArgv(argv.slice(2), buildTypes); +if (args.dryRun) { + console.error(getDryRunMessage(args, features)); + exit(0); +} + +// #region short circuit for unsupported build configurations +if (args.lavamoat) { + throw new Error("The webpack build doesn't support LavaMoat yet. So sorry."); +} +if (args.manifest_version === 3) { + throw new Error( + "The webpack build doesn't support manifest_version version 3 yet. So sorry.", + ); +} +// #endregion short circuit for unsupported build configurations + +const context = join(__dirname, '../../app'); +const isDevelopment = args.env === 'development'; +const MANIFEST_VERSION = args.manifest_version; +const manifestPath = join(context, `manifest/v${MANIFEST_VERSION}/_base.json`); +const manifest: Manifest = require(manifestPath); +const { entry, canBeChunked } = collectEntries(manifest, context); +const codeFenceLoader = getCodeFenceLoader(features); +const browsersListPath = join(context, '../.browserslistrc'); +// read .browserslist now to stop it from searching for the file over and over +const browsersListQuery = readFileSync(browsersListPath, 'utf8'); +const { variables, safeVariables, version } = getVariables(args, buildTypes); +const webAccessibleResources = + args.devtool === 'source-map' + ? ['scripts/inpage.js.map', 'scripts/contentscript.js.map'] + : []; + +// #region cache +const cache = args.cache + ? ({ + type: 'filesystem', + name: `MetaMask—${args.env}`, + version: cacheKey, + idleTimeout: 0, + idleTimeoutForInitialStore: 0, + idleTimeoutAfterLargeChanges: 0, + // small performance gain by increase memory generations + maxMemoryGenerations: Infinity, + // Disable allowCollectingMemory because it can slow the build by 10%! + allowCollectingMemory: false, + buildDependencies: { + defaultConfig: [__filename], + // Invalidates the build cache when the listed files change. + // `__filename` makes all `require`d dependencies of *this* file + // `buildDependencies` + config: [ + __filename, + join(context, '../.metamaskrc'), + join(context, '../builds.yml'), + browsersListPath, + ], + }, + } as const satisfies FileCacheOptions) + : ({ type: 'memory' } as const satisfies MemoryCacheOptions); +// #endregion cache + +// #region plugins +const commitHash = isDevelopment ? getLatestCommit().hash() : null; +const plugins: WebpackPluginInstance[] = [ + new SelfInjectPlugin({ test: /^scripts\/inpage\.js$/u }), + // HtmlBundlerPlugin treats HTML files as entry points + new HtmlBundlerPlugin({ + preprocessorOptions: { useWith: false }, + minify: args.minify, + integrity: 'auto', + test: /\.html$/u, // default is eta/html, we only want html + data: { + isMMI: args.type === 'mmi', + isTest: args.test, + shouldIncludeSnow: args.snow, + }, + }), + new ManifestPlugin({ + web_accessible_resources: webAccessibleResources, + manifest_version: MANIFEST_VERSION, + description: commitHash + ? `${args.env} build from git id: ${commitHash.substring(0, 8)}` + : null, + version: version.version, + versionName: version.versionName, + browsers: args.browser, + transform: transformManifest(args), + zip: args.zip, + ...(args.zip + ? { + zipOptions: { + outFilePath: `../../builds/metamask-[browser]-${version.versionName}.zip`, // relative to output.path + mtime: getLatestCommit().timestamp(), + excludeExtensions: ['.map'], + // `level: 9` is the highest; it may increase build time by ~5% over level 1 + level: 9, + }, + } + : {}), + }), + // use ProvidePlugin to polyfill *global* node variables + new ProvidePlugin({ + // Make a global `Buffer` variable that points to the `buffer` package. + Buffer: ['buffer', 'Buffer'], + // Make a global `process` variable that points to the `process` package. + process: 'process/browser', + }), + new CopyPlugin({ + patterns: [ + { from: join(context, '_locales'), to: '_locales' }, // translations + // misc images + // TODO: fix overlap between this folder and automatically bundled assets + { from: join(context, 'images'), to: 'images' }, + ], + }), +]; +// enable React Refresh in 'development' mode when `watch` is enabled +if (__HMR_READY__ && isDevelopment && args.watch) { + const ReactRefreshWebpackPlugin: typeof ReactRefreshPluginType = require('@pmmmwh/react-refresh-webpack-plugin'); + plugins.push(new ReactRefreshWebpackPlugin()); +} +if (args.progress) { + const { ProgressPlugin } = require('webpack'); + plugins.push(new ProgressPlugin()); +} +// #endregion plugins + +const swcConfig = { args, safeVariables, browsersListQuery, isDevelopment }; +const tsxLoader = getSwcLoader('typescript', true, swcConfig); +const jsxLoader = getSwcLoader('ecmascript', true, swcConfig); +const ecmaLoader = getSwcLoader('ecmascript', false, swcConfig); + +const config = { + entry, + cache, + plugins, + context, + mode: args.env, + stats: args.stats ? 'normal' : 'none', + name: `MetaMask – ${args.env}`, + // use the `.browserlistrc` file directly to avoid browserslist searching + target: `browserslist:${browsersListPath}:defaults`, + // TODO: look into using SourceMapDevToolPlugin and its exclude option to speed up the build + // TODO: put source maps in an upper level directory (like the gulp build does now) + // see: https://webpack.js.org/plugins/source-map-dev-tool-plugin/#host-source-maps-externally + devtool: args.devtool === 'none' ? false : args.devtool, + output: { + wasmLoading: 'fetch', + // required for `integrity` to work in the browser + crossOriginLoading: 'anonymous', + // filenames for *initial* files (essentially JS entry points) + filename: '[name].[contenthash].js', + path: join(context, '..', 'dist'), + // Clean the output directory before emit, so that only the latest build + // files remain. Nearly 0 performance penalty for this clean up step. + clean: true, + // relative to HTML page. This value is essentially prepended to asset URLs + // in the output HTML, i.e., `